forked from lix-project/hydra
commit
4cabb37ebd
3
default.nix
Normal file
3
default.nix
Normal file
|
@ -0,0 +1,3 @@
|
|||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||
src = builtins.fetchGit ./.;
|
||||
}).defaultNix
|
65
flake.lock
Normal file
65
flake.lock
Normal file
|
@ -0,0 +1,65 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nix": {
|
||||
"info": {
|
||||
"lastModified": 1585573619,
|
||||
"narHash": "sha256-QbAxdnDkVvSVgkCNRbt3fUPUSNbeq9y3u7Vek/7Ib44="
|
||||
},
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
},
|
||||
"locked": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nix",
|
||||
"rev": "3e7aab81ce6787e01ea4ced5af1fc6a84e523762",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nix",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"info": {
|
||||
"lastModified": 1585405475,
|
||||
"narHash": "sha256-bESW0n4KgPmZ0luxvwJ+UyATrC6iIltVCsGdLiphVeE="
|
||||
},
|
||||
"locked": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "b88ff468e9850410070d4e0ccd68c7011f15b2be",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"ref": "nixos-20.03-small",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"info": {
|
||||
"lastModified": 1585388205,
|
||||
"narHash": "sha256-lOXYmCE6FSikoOHr1HFIkNLnA0qdYhe8CxB8rE6+SnE="
|
||||
},
|
||||
"locked": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "b0c285807d6a9f1b7562ec417c24fa1a30ecc31a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"ref": "nixos-20.03",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nix": "nix",
|
||||
"nixpkgs": "nixpkgs_2"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 5
|
||||
}
|
345
flake.nix
Normal file
345
flake.nix
Normal file
|
@ -0,0 +1,345 @@
|
|||
{
|
||||
description = "A Nix-based continuous build system";
|
||||
|
||||
edition = 201909;
|
||||
|
||||
inputs.nixpkgs.url = "nixpkgs/nixos-20.03";
|
||||
|
||||
outputs = { self, nixpkgs, nix }:
|
||||
let
|
||||
|
||||
version = "${builtins.readFile ./version}.${builtins.substring 0 8 self.lastModified}.${self.shortRev or "DIRTY"}";
|
||||
|
||||
pkgs = import nixpkgs {
|
||||
system = "x86_64-linux";
|
||||
overlays = [ self.overlay nix.overlay ];
|
||||
};
|
||||
|
||||
# NixOS configuration used for VM tests.
|
||||
hydraServer =
|
||||
{ config, pkgs, ... }:
|
||||
{ imports = [ self.nixosModules.hydraTest ];
|
||||
|
||||
virtualisation.memorySize = 1024;
|
||||
virtualisation.writableStore = true;
|
||||
|
||||
environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ];
|
||||
|
||||
nix = {
|
||||
# Without this nix tries to fetch packages from the default
|
||||
# cache.nixos.org which is not reachable from this sandboxed NixOS test.
|
||||
binaryCaches = [];
|
||||
};
|
||||
};
|
||||
|
||||
in rec {
|
||||
|
||||
# A Nixpkgs overlay that provides a 'hydra' package.
|
||||
overlay = final: prev: {
|
||||
|
||||
hydra = with final; let
|
||||
|
||||
perlDeps = buildEnv {
|
||||
name = "hydra-perl-deps";
|
||||
paths = with perlPackages; lib.closePropagation
|
||||
[ ModulePluggable
|
||||
CatalystActionREST
|
||||
CatalystAuthenticationStoreDBIxClass
|
||||
CatalystDevel
|
||||
CatalystDispatchTypeRegex
|
||||
CatalystPluginAccessLog
|
||||
CatalystPluginAuthorizationRoles
|
||||
CatalystPluginCaptcha
|
||||
CatalystPluginSessionStateCookie
|
||||
CatalystPluginSessionStoreFastMmap
|
||||
CatalystPluginStackTrace
|
||||
CatalystPluginUnicodeEncoding
|
||||
CatalystTraitForRequestProxyBase
|
||||
CatalystViewDownload
|
||||
CatalystViewJSON
|
||||
CatalystViewTT
|
||||
CatalystXScriptServerStarman
|
||||
CatalystXRoleApplicator
|
||||
CryptRandPasswd
|
||||
DBDPg
|
||||
DBDSQLite
|
||||
DataDump
|
||||
DateTime
|
||||
DigestSHA1
|
||||
EmailMIME
|
||||
EmailSender
|
||||
FileSlurp
|
||||
IOCompress
|
||||
IPCRun
|
||||
JSON
|
||||
JSONAny
|
||||
JSONXS
|
||||
LWP
|
||||
LWPProtocolHttps
|
||||
NetAmazonS3
|
||||
NetPrometheus
|
||||
NetStatsd
|
||||
PadWalker
|
||||
Readonly
|
||||
SQLSplitStatement
|
||||
SetScalar
|
||||
Starman
|
||||
SysHostnameLong
|
||||
TermSizeAny
|
||||
TestMore
|
||||
TextDiff
|
||||
TextTable
|
||||
XMLSimple
|
||||
final.nix
|
||||
final.nix.perl-bindings
|
||||
git
|
||||
];
|
||||
};
|
||||
|
||||
in stdenv.mkDerivation {
|
||||
|
||||
name = "hydra-${version}";
|
||||
|
||||
src = self;
|
||||
|
||||
buildInputs =
|
||||
[ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
|
||||
gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
|
||||
perlDeps perl final.nix
|
||||
postgresql95 # for running the tests
|
||||
boost
|
||||
(if lib.versionAtLeast lib.version "20.03pre"
|
||||
then nlohmann_json
|
||||
else nlohmann_json.override { multipleHeaders = true; })
|
||||
];
|
||||
|
||||
hydraPath = lib.makeBinPath (
|
||||
[ sqlite subversion openssh final.nix coreutils findutils pixz
|
||||
gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused bazaar
|
||||
] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] );
|
||||
|
||||
configureFlags = [ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
|
||||
|
||||
shellHook = ''
|
||||
PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH
|
||||
PERL5LIB=$(pwd)/src/lib:$PERL5LIB
|
||||
'';
|
||||
|
||||
preConfigure = "autoreconf -vfi";
|
||||
|
||||
NIX_LDFLAGS = [ "-lpthread" ];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
doCheck = true;
|
||||
|
||||
preCheck = ''
|
||||
patchShebangs .
|
||||
export LOGNAME=''${LOGNAME:-foo}
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $out/nix-support
|
||||
|
||||
for i in $out/bin/*; do
|
||||
read -n 4 chars < $i
|
||||
if [[ $chars =~ ELF ]]; then continue; fi
|
||||
wrapProgram $i \
|
||||
--prefix PERL5LIB ':' $out/libexec/hydra/lib:$PERL5LIB \
|
||||
--prefix PATH ':' $out/bin:$hydraPath \
|
||||
--set HYDRA_RELEASE ${version} \
|
||||
--set HYDRA_HOME $out/libexec/hydra \
|
||||
--set NIX_RELEASE ${final.nix.name or "unknown"}
|
||||
done
|
||||
'';
|
||||
|
||||
dontStrip = true;
|
||||
|
||||
meta.description = "Build of Hydra on ${system}";
|
||||
passthru.perlDeps = perlDeps;
|
||||
};
|
||||
};
|
||||
|
||||
hydraJobs = {
|
||||
|
||||
build.x86_64-linux = packages.x86_64-linux.hydra;
|
||||
|
||||
manual =
|
||||
pkgs.runCommand "hydra-manual-${version}" {}
|
||||
''
|
||||
mkdir -p $out/share
|
||||
cp -prvd ${pkgs.hydra}/share/doc $out/share/
|
||||
|
||||
mkdir $out/nix-support
|
||||
echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
tests.install.x86_64-linux =
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
|
||||
simpleTest {
|
||||
machine = hydraServer;
|
||||
testScript =
|
||||
''
|
||||
$machine->waitForJob("hydra-init");
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForJob("hydra-evaluator");
|
||||
$machine->waitForJob("hydra-queue-runner");
|
||||
$machine->waitForOpenPort("3000");
|
||||
$machine->succeed("curl --fail http://localhost:3000/");
|
||||
'';
|
||||
};
|
||||
|
||||
tests.api.x86_64-linux =
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
|
||||
simpleTest {
|
||||
machine = hydraServer;
|
||||
testScript =
|
||||
let dbi = "dbi:Pg:dbname=hydra;user=root;"; in
|
||||
''
|
||||
$machine->waitForJob("hydra-init");
|
||||
|
||||
# Create an admin account and some other state.
|
||||
$machine->succeed
|
||||
( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
|
||||
, "mkdir /run/jobset /tmp/nix"
|
||||
, "chmod 755 /run/jobset /tmp/nix"
|
||||
, "cp ${./tests/api-test.nix} /run/jobset/default.nix"
|
||||
, "chmod 644 /run/jobset/default.nix"
|
||||
, "chown -R hydra /run/jobset /tmp/nix"
|
||||
);
|
||||
|
||||
$machine->succeed("systemctl stop hydra-evaluator hydra-queue-runner");
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForOpenPort("3000");
|
||||
|
||||
# Run the API tests.
|
||||
$machine->mustSucceed("su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./tests/api-test.pl}' >&2");
|
||||
'';
|
||||
};
|
||||
|
||||
tests.notifications.x86_64-linux =
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { system = "x86_64-linux"; };
|
||||
simpleTest {
|
||||
machine = { pkgs, ... }: {
|
||||
imports = [ hydraServer ];
|
||||
services.hydra-dev.extraConfig = ''
|
||||
<influxdb>
|
||||
url = http://127.0.0.1:8086
|
||||
db = hydra
|
||||
</influxdb>
|
||||
'';
|
||||
services.influxdb.enable = true;
|
||||
};
|
||||
testScript = ''
|
||||
$machine->waitForJob("hydra-init");
|
||||
|
||||
# Create an admin account and some other state.
|
||||
$machine->succeed
|
||||
( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
|
||||
, "mkdir /run/jobset"
|
||||
, "chmod 755 /run/jobset"
|
||||
, "cp ${./tests/api-test.nix} /run/jobset/default.nix"
|
||||
, "chmod 644 /run/jobset/default.nix"
|
||||
, "chown -R hydra /run/jobset"
|
||||
);
|
||||
|
||||
# Wait until InfluxDB can receive web requests
|
||||
$machine->waitForJob("influxdb");
|
||||
$machine->waitForOpenPort("8086");
|
||||
|
||||
# Create an InfluxDB database where hydra will write to
|
||||
$machine->succeed(
|
||||
"curl -XPOST 'http://127.0.0.1:8086/query' \\
|
||||
--data-urlencode 'q=CREATE DATABASE hydra'");
|
||||
|
||||
# Wait until hydra-server can receive HTTP requests
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForOpenPort("3000");
|
||||
|
||||
# Setup the project and jobset
|
||||
$machine->mustSucceed(
|
||||
"su - hydra -c 'perl -I ${pkgs.hydra.perlDeps}/lib/perl5/site_perl ${./tests/setup-notifications-jobset.pl}' >&2");
|
||||
|
||||
# Wait until hydra has build the job and
|
||||
# the InfluxDBNotification plugin uploaded its notification to InfluxDB
|
||||
$machine->waitUntilSucceeds(
|
||||
"curl -s -H 'Accept: application/csv' \\
|
||||
-G 'http://127.0.0.1:8086/query?db=hydra' \\
|
||||
--data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success");
|
||||
'';
|
||||
};
|
||||
|
||||
container = nixosConfigurations.container.config.system.build.toplevel;
|
||||
};
|
||||
|
||||
checks.x86_64-linux.build = hydraJobs.build.x86_64-linux;
|
||||
checks.x86_64-linux.install = hydraJobs.tests.install.x86_64-linux;
|
||||
|
||||
packages.x86_64-linux.hydra = pkgs.hydra;
|
||||
defaultPackage.x86_64-linux = pkgs.hydra;
|
||||
|
||||
nixosModules.hydra = {
|
||||
imports = [ ./hydra-module.nix ];
|
||||
nixpkgs.overlays = [ self.overlay nix.overlay ];
|
||||
};
|
||||
|
||||
nixosModules.hydraTest = {
|
||||
imports = [ self.nixosModules.hydra ];
|
||||
|
||||
services.hydra-dev.enable = true;
|
||||
services.hydra-dev.hydraURL = "http://hydra.example.org";
|
||||
services.hydra-dev.notificationSender = "admin@hydra.example.org";
|
||||
|
||||
systemd.services.hydra-send-stats.enable = false;
|
||||
|
||||
services.postgresql.enable = true;
|
||||
services.postgresql.package = pkgs.postgresql95;
|
||||
|
||||
# The following is to work around the following error from hydra-server:
|
||||
# [error] Caught exception in engine "Cannot determine local time zone"
|
||||
time.timeZone = "UTC";
|
||||
|
||||
nix.extraOptions = ''
|
||||
allowed-uris = https://github.com/
|
||||
'';
|
||||
};
|
||||
|
||||
nixosModules.hydraProxy = {
|
||||
services.httpd = {
|
||||
enable = true;
|
||||
adminAddr = "hydra-admin@example.org";
|
||||
extraConfig = ''
|
||||
<Proxy *>
|
||||
Order deny,allow
|
||||
Allow from all
|
||||
</Proxy>
|
||||
|
||||
ProxyRequests Off
|
||||
ProxyPreserveHost On
|
||||
ProxyPass /apache-errors !
|
||||
ErrorDocument 503 /apache-errors/503.html
|
||||
ProxyPass / http://127.0.0.1:3000/ retry=5 disablereuse=on
|
||||
ProxyPassReverse / http://127.0.0.1:3000/
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
nixosConfigurations.container = nixpkgs.lib.nixosSystem {
|
||||
system = "x86_64-linux";
|
||||
modules =
|
||||
[ self.nixosModules.hydraTest
|
||||
self.nixosModules.hydraProxy
|
||||
{ system.configurationRevision = self.rev;
|
||||
|
||||
boot.isContainer = true;
|
||||
networking.useDHCP = false;
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
networking.hostName = "hydra";
|
||||
|
||||
services.hydra-dev.useSubstitutes = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
};
|
||||
}
|
|
@ -64,7 +64,7 @@ in
|
|||
|
||||
package = mkOption {
|
||||
type = types.path;
|
||||
#default = pkgs.hydra;
|
||||
default = pkgs.hydra;
|
||||
description = "The Hydra package.";
|
||||
};
|
||||
|
||||
|
@ -218,8 +218,6 @@ in
|
|||
|
||||
nix.trustedUsers = [ "hydra-queue-runner" ];
|
||||
|
||||
services.hydra-dev.package = mkDefault ((import ./release.nix {}).build.x86_64-linux);
|
||||
|
||||
services.hydra-dev.extraConfig =
|
||||
''
|
||||
using_frontend_proxy = 1
|
||||
|
|
333
release.nix
333
release.nix
|
@ -1,333 +0,0 @@
|
|||
{ hydraSrc ? builtins.fetchGit ./.
|
||||
, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/release-19.09.tar.gz
|
||||
, officialRelease ? false
|
||||
, shell ? false
|
||||
}:
|
||||
|
||||
with import (nixpkgs + "/lib");
|
||||
|
||||
let
|
||||
|
||||
pkgs = import nixpkgs {};
|
||||
|
||||
genAttrs' = genAttrs [ "x86_64-linux" /* "i686-linux" */ ];
|
||||
|
||||
hydraServer = hydraPkg:
|
||||
{ config, pkgs, ... }:
|
||||
{ imports = [ ./hydra-module.nix ];
|
||||
|
||||
virtualisation.memorySize = 1024;
|
||||
virtualisation.writableStore = true;
|
||||
|
||||
services.hydra-dev.enable = true;
|
||||
services.hydra-dev.package = hydraPkg;
|
||||
services.hydra-dev.hydraURL = "http://hydra.example.org";
|
||||
services.hydra-dev.notificationSender = "admin@hydra.example.org";
|
||||
|
||||
services.postgresql.enable = true;
|
||||
services.postgresql.package = pkgs.postgresql95;
|
||||
|
||||
environment.systemPackages = [ pkgs.perlPackages.LWP pkgs.perlPackages.JSON ];
|
||||
|
||||
# The following is to work around the following error from hydra-server:
|
||||
# [error] Caught exception in engine "Cannot determine local time zone"
|
||||
time.timeZone = "UTC";
|
||||
|
||||
nix = {
|
||||
# The following is to work around: https://github.com/NixOS/hydra/pull/432
|
||||
buildMachines = [
|
||||
{ hostName = "localhost";
|
||||
system = "x86_64-linux";
|
||||
}
|
||||
];
|
||||
# Without this nix tries to fetch packages from the default
|
||||
# cache.nixos.org which is not reachable from this sandboxed NixOS test.
|
||||
binaryCaches = [];
|
||||
};
|
||||
};
|
||||
|
||||
version = builtins.readFile ./version + "." + toString hydraSrc.revCount + "." + hydraSrc.rev;
|
||||
|
||||
in
|
||||
|
||||
rec {
|
||||
|
||||
build = genAttrs' (system:
|
||||
let pkgs = import nixpkgs { inherit system; }; in
|
||||
|
||||
with pkgs;
|
||||
|
||||
let
|
||||
|
||||
nix = pkgs.nixUnstable or pkgs.nix;
|
||||
|
||||
perlDeps = buildEnv {
|
||||
name = "hydra-perl-deps";
|
||||
paths = with perlPackages;
|
||||
[ ModulePluggable
|
||||
CatalystActionREST
|
||||
CatalystAuthenticationStoreDBIxClass
|
||||
CatalystDevel
|
||||
CatalystDispatchTypeRegex
|
||||
CatalystPluginAccessLog
|
||||
CatalystPluginAuthorizationRoles
|
||||
CatalystPluginCaptcha
|
||||
CatalystPluginSessionStateCookie
|
||||
CatalystPluginSessionStoreFastMmap
|
||||
CatalystPluginStackTrace
|
||||
CatalystPluginUnicodeEncoding
|
||||
CatalystTraitForRequestProxyBase
|
||||
CatalystViewDownload
|
||||
CatalystViewJSON
|
||||
CatalystViewTT
|
||||
CatalystXScriptServerStarman
|
||||
CatalystXRoleApplicator
|
||||
CryptRandPasswd
|
||||
DBDPg
|
||||
DBDSQLite
|
||||
DataDump
|
||||
DateTime
|
||||
DigestSHA1
|
||||
EmailMIME
|
||||
EmailSender
|
||||
FileSlurp
|
||||
IOCompress
|
||||
IPCRun
|
||||
JSON
|
||||
JSONAny
|
||||
JSONXS
|
||||
LWP
|
||||
LWPProtocolHttps
|
||||
NetAmazonS3
|
||||
NetPrometheus
|
||||
NetStatsd
|
||||
PadWalker
|
||||
Readonly
|
||||
SQLSplitStatement
|
||||
SetScalar
|
||||
Starman
|
||||
SysHostnameLong
|
||||
TermSizeAny
|
||||
TestMore
|
||||
TextDiff
|
||||
TextTable
|
||||
XMLSimple
|
||||
nix
|
||||
nix.perl-bindings
|
||||
git
|
||||
boehmgc
|
||||
];
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
releaseTools.nixBuild {
|
||||
name = "hydra-${version}";
|
||||
|
||||
src = if shell then null else hydraSrc;
|
||||
|
||||
buildInputs =
|
||||
[ makeWrapper autoconf automake libtool unzip nukeReferences pkgconfig sqlite libpqxx
|
||||
gitAndTools.topGit mercurial darcs subversion bazaar openssl bzip2 libxslt
|
||||
perlDeps perl nix
|
||||
postgresql95 # for running the tests
|
||||
boost
|
||||
(nlohmann_json.override { multipleHeaders = true; })
|
||||
];
|
||||
|
||||
hydraPath = lib.makeBinPath (
|
||||
[ sqlite subversion openssh nix coreutils findutils pixz
|
||||
gzip bzip2 lzma gnutar unzip git gitAndTools.topGit mercurial darcs gnused bazaar
|
||||
] ++ lib.optionals stdenv.isLinux [ rpm dpkg cdrkit ] );
|
||||
|
||||
postUnpack = optionalString (!shell) ''
|
||||
# Clean up when building from a working tree.
|
||||
(cd $sourceRoot && (git ls-files -o --directory | xargs -r rm -rfv)) || true
|
||||
'';
|
||||
|
||||
configureFlags = [ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
|
||||
|
||||
shellHook = ''
|
||||
PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH
|
||||
${lib.optionalString shell "PERL5LIB=$(pwd)/src/lib:$PERL5LIB"}
|
||||
'';
|
||||
|
||||
preConfigure = "autoreconf -vfi";
|
||||
|
||||
NIX_LDFLAGS = [ "-lpthread" ];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
preCheck = ''
|
||||
patchShebangs .
|
||||
export LOGNAME=''${LOGNAME:-foo}
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p $out/nix-support
|
||||
|
||||
for i in $out/bin/*; do
|
||||
read -n 4 chars < $i
|
||||
if [[ $chars =~ ELF ]]; then continue; fi
|
||||
wrapProgram $i \
|
||||
--prefix PERL5LIB ':' $out/libexec/hydra/lib:$PERL5LIB \
|
||||
--prefix PATH ':' $out/bin:$hydraPath \
|
||||
--set HYDRA_RELEASE ${version} \
|
||||
--set HYDRA_HOME $out/libexec/hydra \
|
||||
--set NIX_RELEASE ${nix.name or "unknown"}
|
||||
done
|
||||
''; # */
|
||||
|
||||
dontStrip = true;
|
||||
|
||||
meta.description = "Build of Hydra on ${system}";
|
||||
passthru.perlDeps = perlDeps;
|
||||
});
|
||||
|
||||
manual = pkgs.runCommand "hydra-manual-${version}"
|
||||
{ build = build.x86_64-linux;
|
||||
}
|
||||
''
|
||||
mkdir -p $out/share
|
||||
cp -prvd $build/share/doc $out/share/
|
||||
|
||||
mkdir $out/nix-support
|
||||
echo "doc manual $out/share/doc/hydra" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
tests.install = genAttrs' (system:
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
|
||||
simpleTest {
|
||||
machine = hydraServer build.${system};
|
||||
testScript =
|
||||
''
|
||||
$machine->waitForJob("hydra-init");
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForJob("hydra-evaluator");
|
||||
$machine->waitForJob("hydra-queue-runner");
|
||||
$machine->waitForOpenPort("3000");
|
||||
$machine->succeed("curl --fail http://localhost:3000/");
|
||||
'';
|
||||
});
|
||||
|
||||
tests.api = genAttrs' (system:
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
|
||||
simpleTest {
|
||||
machine = hydraServer build.${system};
|
||||
testScript =
|
||||
let dbi = "dbi:Pg:dbname=hydra;user=root;"; in
|
||||
''
|
||||
$machine->waitForJob("hydra-init");
|
||||
|
||||
# Create an admin account and some other state.
|
||||
$machine->succeed
|
||||
( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
|
||||
, "mkdir /run/jobset /tmp/nix"
|
||||
, "chmod 755 /run/jobset /tmp/nix"
|
||||
, "cp ${./tests/api-test.nix} /run/jobset/default.nix"
|
||||
, "chmod 644 /run/jobset/default.nix"
|
||||
, "chown -R hydra /run/jobset /tmp/nix"
|
||||
);
|
||||
|
||||
$machine->succeed("systemctl stop hydra-evaluator hydra-queue-runner");
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForOpenPort("3000");
|
||||
|
||||
# Run the API tests.
|
||||
$machine->mustSucceed("su - hydra -c 'perl -I ${build.${system}.perlDeps}/lib/perl5/site_perl ${./tests/api-test.pl}' >&2");
|
||||
'';
|
||||
});
|
||||
|
||||
tests.notifications = genAttrs' (system:
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
|
||||
simpleTest {
|
||||
machine = { pkgs, ... }: {
|
||||
imports = [ (hydraServer build.${system}) ];
|
||||
services.hydra-dev.extraConfig = ''
|
||||
<influxdb>
|
||||
url = http://127.0.0.1:8086
|
||||
db = hydra
|
||||
</influxdb>
|
||||
'';
|
||||
services.influxdb.enable = true;
|
||||
};
|
||||
testScript = ''
|
||||
$machine->waitForJob("hydra-init");
|
||||
|
||||
# Create an admin account and some other state.
|
||||
$machine->succeed
|
||||
( "su - hydra -c \"hydra-create-user root --email-address 'alice\@example.org' --password foobar --role admin\""
|
||||
, "mkdir /run/jobset"
|
||||
, "chmod 755 /run/jobset"
|
||||
, "cp ${./tests/api-test.nix} /run/jobset/default.nix"
|
||||
, "chmod 644 /run/jobset/default.nix"
|
||||
, "chown -R hydra /run/jobset"
|
||||
);
|
||||
|
||||
# Wait until InfluxDB can receive web requests
|
||||
$machine->waitForJob("influxdb");
|
||||
$machine->waitForOpenPort("8086");
|
||||
|
||||
# Create an InfluxDB database where hydra will write to
|
||||
$machine->succeed(
|
||||
"curl -XPOST 'http://127.0.0.1:8086/query' \\
|
||||
--data-urlencode 'q=CREATE DATABASE hydra'");
|
||||
|
||||
# Wait until hydra-server can receive HTTP requests
|
||||
$machine->waitForJob("hydra-server");
|
||||
$machine->waitForOpenPort("3000");
|
||||
|
||||
# Setup the project and jobset
|
||||
$machine->mustSucceed(
|
||||
"su - hydra -c 'perl -I ${build.${system}.perlDeps}/lib/perl5/site_perl ${./tests/setup-notifications-jobset.pl}' >&2");
|
||||
|
||||
# Wait until hydra has build the job and
|
||||
# the InfluxDBNotification plugin uploaded its notification to InfluxDB
|
||||
$machine->waitUntilSucceeds(
|
||||
"curl -s -H 'Accept: application/csv' \\
|
||||
-G 'http://127.0.0.1:8086/query?db=hydra' \\
|
||||
--data-urlencode 'q=SELECT * FROM hydra_build_status' | grep success");
|
||||
'';
|
||||
});
|
||||
|
||||
/*
|
||||
tests.s3backup = genAttrs' (system:
|
||||
with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
|
||||
let hydra = build.${system}
|
||||
simpleTest {
|
||||
machine =
|
||||
{ config, pkgs, ... }:
|
||||
{ services.postgresql.enable = true;
|
||||
services.postgresql.package = pkgs.postgresql95;
|
||||
environment.systemPackages = [ hydra pkgs.rubyLibs.fakes3 ];
|
||||
virtualisation.memorySize = 2047;
|
||||
boot.kernelPackages = pkgs.linuxPackages_3_10;
|
||||
virtualisation.writableStore = true;
|
||||
networking.extraHosts = ''
|
||||
127.0.0.1 hydra.s3.amazonaws.com
|
||||
'';
|
||||
};
|
||||
|
||||
testScript =
|
||||
''
|
||||
$machine->waitForJob("postgresql");
|
||||
|
||||
# Initialise the database and the state.
|
||||
$machine->succeed
|
||||
( "createdb -O root hydra"
|
||||
, "psql hydra -f ${hydra}/libexec/hydra/sql/hydra-postgresql.sql"
|
||||
, "mkdir /var/lib/hydra"
|
||||
, "mkdir /tmp/jobs"
|
||||
, "cp ${./tests/s3-backup-test.pl} /tmp/s3-backup-test.pl"
|
||||
, "cp ${./tests/api-test.nix} /tmp/jobs/default.nix"
|
||||
);
|
||||
|
||||
# start fakes3
|
||||
$machine->succeed("fakes3 --root /tmp/s3 --port 80 &>/dev/null &");
|
||||
$machine->waitForOpenPort("80");
|
||||
|
||||
$machine->succeed("cd /tmp && LOGNAME=root AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=bar HYDRA_DBI='dbi:Pg:dbname=hydra;user=root;' HYDRA_CONFIG=${./tests/s3-backup-test.config} perl -I ${hydra}/libexec/hydra/lib -I ${hydra.perlDeps}/lib/perl5/site_perl ./s3-backup-test.pl >&2");
|
||||
'';
|
||||
});
|
||||
*/
|
||||
}
|
|
@ -1 +1,3 @@
|
|||
(import ./release.nix { shell = true; }).build.x86_64-linux
|
||||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||
src = builtins.fetchGit ./.;
|
||||
}).shellNix
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
#include "get-drvs.hh"
|
||||
#include "globals.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
#include "flake/flake.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "derivations.hh"
|
||||
|
||||
|
@ -28,6 +30,7 @@ static size_t maxMemorySize;
|
|||
struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||
{
|
||||
Path releaseExpr;
|
||||
bool flake = false;
|
||||
bool dryRun = false;
|
||||
|
||||
MyArgs() : MixCommonArgs("hydra-eval-jobs")
|
||||
|
@ -51,6 +54,11 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
|||
.description("don't create store derivations")
|
||||
.set(&dryRun, true);
|
||||
|
||||
mkFlag()
|
||||
.longName("flake")
|
||||
.description("build a flake")
|
||||
.set(&flake, true);
|
||||
|
||||
expectArg("expr", &releaseExpr);
|
||||
}
|
||||
};
|
||||
|
@ -89,7 +97,37 @@ static void worker(
|
|||
AutoCloseFD & from)
|
||||
{
|
||||
Value vTop;
|
||||
|
||||
if (myArgs.flake) {
|
||||
using namespace flake;
|
||||
|
||||
auto flakeRef = parseFlakeRef(myArgs.releaseExpr);
|
||||
|
||||
auto vFlake = state.allocValue();
|
||||
|
||||
auto lockedFlake = lockFlake(state, flakeRef,
|
||||
LockFlags {
|
||||
.updateLockFile = false,
|
||||
.useRegistries = false,
|
||||
.allowMutable = false,
|
||||
});
|
||||
|
||||
callFlake(state, lockedFlake, *vFlake);
|
||||
|
||||
auto vOutputs = vFlake->attrs->get(state.symbols.create("outputs"))->value;
|
||||
state.forceValue(*vOutputs);
|
||||
|
||||
auto aHydraJobs = vOutputs->attrs->get(state.symbols.create("hydraJobs"));
|
||||
if (!aHydraJobs)
|
||||
aHydraJobs = vOutputs->attrs->get(state.symbols.create("checks"));
|
||||
if (!aHydraJobs)
|
||||
throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef);
|
||||
|
||||
vTop = *aHydraJobs->value;
|
||||
|
||||
} else {
|
||||
state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
|
||||
}
|
||||
|
||||
auto vRoot = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, vTop, *vRoot);
|
||||
|
@ -109,7 +147,7 @@ static void worker(
|
|||
nlohmann::json reply;
|
||||
|
||||
try {
|
||||
auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot);
|
||||
auto vTmp = findAlongAttrPath(state, attrPath, autoArgs, *vRoot).first;
|
||||
|
||||
auto v = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, *vTmp, *v);
|
||||
|
@ -139,23 +177,23 @@ static void worker(
|
|||
|
||||
/* If this is an aggregate, then get its constituents. */
|
||||
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
|
||||
if (a && state.forceBool(*(*a)->value, *(*a)->pos)) {
|
||||
if (a && state.forceBool(*a->value, *a->pos)) {
|
||||
auto a = v->attrs->get(state.symbols.create("constituents"));
|
||||
if (!a)
|
||||
throw EvalError("derivation must have a ‘constituents’ attribute");
|
||||
|
||||
|
||||
PathSet context;
|
||||
state.coerceToString(*(*a)->pos, *(*a)->value, context, true, false);
|
||||
state.coerceToString(*a->pos, *a->value, context, true, false);
|
||||
for (auto & i : context)
|
||||
if (i.at(0) == '!') {
|
||||
size_t index = i.find("!", 1);
|
||||
job["constituents"].push_back(string(i, index + 1));
|
||||
}
|
||||
|
||||
state.forceList(*(*a)->value, *(*a)->pos);
|
||||
for (unsigned int n = 0; n < (*a)->value->listSize(); ++n) {
|
||||
auto v = (*a)->value->listElems()[n];
|
||||
state.forceList(*a->value, *a->pos);
|
||||
for (unsigned int n = 0; n < a->value->listSize(); ++n) {
|
||||
auto v = a->value->listElems()[n];
|
||||
state.forceValue(*v);
|
||||
if (v->type == tString)
|
||||
job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
|
||||
|
@ -245,6 +283,10 @@ int main(int argc, char * * argv)
|
|||
to the environment. */
|
||||
evalSettings.restrictEval = true;
|
||||
|
||||
/* When building a flake, use pure evaluation (no access to
|
||||
'getEnv', 'currentSystem' etc. */
|
||||
evalSettings.pureEval = myArgs.flake;
|
||||
|
||||
if (myArgs.dryRun) settings.readOnlyMode = true;
|
||||
|
||||
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||
|
|
|
@ -103,7 +103,7 @@ struct Evaluator
|
|||
}
|
||||
|
||||
if (evalOne && seen.empty()) {
|
||||
printError("the specified jobset does not exist");
|
||||
printError("the specified jobset does not exist or is disabled");
|
||||
std::_Exit(1);
|
||||
}
|
||||
|
||||
|
@ -458,14 +458,15 @@ int main(int argc, char * * argv)
|
|||
return true;
|
||||
});
|
||||
|
||||
|
||||
if (unlock)
|
||||
evaluator.unlock();
|
||||
else {
|
||||
if (!args.empty()) {
|
||||
if (args.size() != 2) throw UsageError("Syntax: hydra-evaluator [<project> <jobset>]");
|
||||
evaluator.evalOne = JobsetName(args[0], args[1]);
|
||||
}
|
||||
|
||||
if (unlock)
|
||||
evaluator.unlock();
|
||||
else
|
||||
evaluator.run();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -223,7 +223,19 @@ sub updateJobset {
|
|||
error($c, "Cannot rename jobset to ‘$jobsetName’ since that identifier is already taken.")
|
||||
if $jobsetName ne $oldName && defined $c->stash->{project}->jobsets->find({ name => $jobsetName });
|
||||
|
||||
my ($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
|
||||
my $type = int($c->stash->{params}->{"type"}) // 0;
|
||||
|
||||
my ($nixExprPath, $nixExprInput);
|
||||
my $flake;
|
||||
|
||||
if ($type == 0) {
|
||||
($nixExprPath, $nixExprInput) = nixExprPathFromParams $c;
|
||||
} elsif ($type == 1) {
|
||||
$flake = trim($c->stash->{params}->{"flakeref"});
|
||||
error($c, "Invalid flake URI ‘$flake’.") if $flake !~ /^[a-zA-Z]/;
|
||||
} else {
|
||||
error($c, "Invalid jobset type.");
|
||||
}
|
||||
|
||||
my $enabled = int($c->stash->{params}->{enabled});
|
||||
die if $enabled < 0 || $enabled > 3;
|
||||
|
@ -246,6 +258,8 @@ sub updateJobset {
|
|||
, checkinterval => $checkinterval
|
||||
, triggertime => ($enabled && $checkinterval > 0) ? $jobset->triggertime // time() : undef
|
||||
, schedulingshares => $shares
|
||||
, type => $type
|
||||
, flake => $flake
|
||||
});
|
||||
|
||||
$jobset->project->jobsetrenames->search({ from_ => $jobsetName })->delete;
|
||||
|
@ -255,6 +269,7 @@ sub updateJobset {
|
|||
# Set the inputs of this jobset.
|
||||
$jobset->jobsetinputs->delete;
|
||||
|
||||
if ($type == 0) {
|
||||
foreach my $name (keys %{$c->stash->{params}->{inputs}}) {
|
||||
my $inputData = $c->stash->{params}->{inputs}->{$name};
|
||||
my $type = $inputData->{type};
|
||||
|
@ -273,6 +288,7 @@ sub updateJobset {
|
|||
$value = checkInputValue($c, $name, $type, $value);
|
||||
$input->jobsetinputalts->create({altnr => 0, value => $value});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -30,6 +30,8 @@ sub updateDeclarativeJobset {
|
|||
my @allowed_keys = qw(
|
||||
enabled
|
||||
hidden
|
||||
type
|
||||
flake
|
||||
description
|
||||
nixexprinput
|
||||
nixexprpath
|
||||
|
|
|
@ -120,7 +120,7 @@ END;
|
|||
<b class="caret"></b>
|
||||
</a>
|
||||
<ul class="dropdown-menu">
|
||||
[% IF build.nixexprinput %]
|
||||
[% IF build.nixexprinput || eval.flake %]
|
||||
<li><a href="#reproduce" data-toggle="modal">Reproduce locally</a></li>
|
||||
[% END %]
|
||||
[% IF c.user_exists %]
|
||||
|
@ -530,18 +530,33 @@ END;
|
|||
|
||||
<div class="modal-body">
|
||||
|
||||
<p>You can reproduce this build on your own machine by downloading
|
||||
<a [% HTML.attributes(href => url) %]>a script</a> that checks out
|
||||
all inputs of the build and then invokes Nix to perform the build.
|
||||
This script requires that you have Nix on your system.</p>
|
||||
[% IF eval.flake %]
|
||||
|
||||
<p>If you have <a href='https://nixos.org/nix/download.html'>Nix
|
||||
installed</a>, you can reproduce this build on your own machine by
|
||||
running the following command:</p>
|
||||
|
||||
<pre>
|
||||
<span class="shell-prompt"># </span>nix build [% HTML.escape(eval.flake) %]#hydraJobs.[% HTML.escape(job.name) %]
|
||||
</pre>
|
||||
|
||||
[% ELSE %]
|
||||
|
||||
<p>If you have <a href='https://nixos.org/nix/download.html'>Nix
|
||||
installed</a>, you can reproduce this build on your own machine by
|
||||
downloading <a [% HTML.attributes(href => url) %]>a script</a>
|
||||
that checks out all inputs of the build and then invokes Nix to
|
||||
perform the build.</p>
|
||||
|
||||
<p>To download and execute the script from the command line, run the
|
||||
following command:</p>
|
||||
|
||||
<pre>
|
||||
<span class="shell-prompt">$ </span>curl <a [% HTML.attributes(href => url) %]>[% HTML.escape(url) %]</a> | bash
|
||||
<span class="shell-prompt"># </span>curl <a [% HTML.attributes(href => url) %]>[% HTML.escape(url) %]</a> | bash
|
||||
</pre>
|
||||
|
||||
[% END %]
|
||||
|
||||
</div>
|
||||
|
||||
<div class="modal-footer">
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
[% END %]
|
||||
|
||||
[% BLOCK renderJobsetInputs %]
|
||||
<table class="table table-striped table-condensed">
|
||||
<table class="table table-striped table-condensed show-on-legacy">
|
||||
<thead>
|
||||
<tr><th></th><th>Input name</th><th>Type</th><th style="width: 50%">Value</th><th>Notify committers</th></tr>
|
||||
</thead>
|
||||
|
@ -97,6 +97,24 @@
|
|||
</div>
|
||||
|
||||
<div class="control-group">
|
||||
<label class="control-label">Type</label>
|
||||
<div class="controls">
|
||||
<div class="btn-group" data-toggle="buttons-radio">
|
||||
<input type="hidden" id="type" name="type" value="[% jobset.type %]" />
|
||||
<button type="button" class="btn" value="1" id="type-flake">Flake</button>
|
||||
<button type="button" class="btn" value="0" id="type-legacy">Legacy</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group show-on-flake">
|
||||
<label class="control-label">Flake URI</label>
|
||||
<div class="controls">
|
||||
<input type="text" class="span3" name="flakeref" [% HTML.attributes(value => jobset.flake) %]/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="control-group show-on-legacy">
|
||||
<label class="control-label">Nix expression</label>
|
||||
<div class="controls">
|
||||
<input type="text" class="span3" name="nixexprpath" [% HTML.attributes(value => jobset.nixexprpath) %]/>
|
||||
|
@ -168,6 +186,21 @@
|
|||
$(document).ready(function() {
|
||||
var id = 0;
|
||||
|
||||
function update() {
|
||||
if ($("#type").val() == 0) {
|
||||
$(".show-on-legacy").show();
|
||||
$(".show-on-flake").hide();
|
||||
} else {
|
||||
$(".show-on-legacy").hide();
|
||||
$(".show-on-flake").show();
|
||||
}
|
||||
}
|
||||
|
||||
$("#type-flake").click(function() { update(); });
|
||||
$("#type-legacy").click(function() { update(); });
|
||||
|
||||
update();
|
||||
|
||||
$(".add-input").click(function() {
|
||||
var newid = "input-" + id++;
|
||||
var x = $("#input-template").clone(true).attr("id", "").insertBefore($(this).parents("tr")).show();
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<div class="control-group">
|
||||
<div class="controls">
|
||||
<label class="checkbox">
|
||||
<input type="checkbox" name="enabled" [% IF project.enabled; 'checked="checked"'; END %]/>Enabled
|
||||
<input type="checkbox" name="enabled" [% IF create || project.enabled; 'checked="checked"'; END %]/>Enabled
|
||||
</label>
|
||||
</div>
|
||||
<div class="controls">
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
</ul>
|
||||
</div>
|
||||
|
||||
<p>This evaluation was performed on [% INCLUDE renderDateTime
|
||||
<p>This evaluation was performed [% IF eval.flake %]from the flake
|
||||
<tt>[%HTML.escape(eval.flake)%]</tt>[%END%] on [% INCLUDE renderDateTime
|
||||
timestamp=eval.timestamp %]. Fetching the dependencies took [%
|
||||
eval.checkouttime %]s and evaluation took [% eval.evaltime %]s.</p>
|
||||
|
||||
|
|
|
@ -135,6 +135,15 @@
|
|||
<th>Description:</th>
|
||||
<td>[% HTML.escape(jobset.description) %]</td>
|
||||
</tr>
|
||||
[% IF jobset.type == 1 %]
|
||||
<tr>
|
||||
<th>Flake URI:</th>
|
||||
<td>
|
||||
<tt>[% HTML.escape(jobset.flake) %]</tt>
|
||||
</td>
|
||||
</tr>
|
||||
[% END %]
|
||||
[% IF jobset.type == 0 %]
|
||||
<tr>
|
||||
<th>Nix expression:</th>
|
||||
<td>
|
||||
|
@ -142,6 +151,7 @@
|
|||
<tt>[% HTML.escape(jobset.nixexprinput) %]</tt>
|
||||
</td>
|
||||
</tr>
|
||||
[% END %]
|
||||
<tr>
|
||||
<th>Check interval:</th>
|
||||
<td>[% jobset.checkinterval || "<em>disabled</em>" %]</td>
|
||||
|
@ -166,7 +176,9 @@
|
|||
</tr>
|
||||
</table>
|
||||
|
||||
[% IF jobset.type == 0 %]
|
||||
[% INCLUDE renderJobsetInputs %]
|
||||
[% END %]
|
||||
</div>
|
||||
|
||||
[% INCLUDE makeLazyTab tabName="tabs-jobs" uri=c.uri_for('/jobset' project.name jobset.name "jobs-tab") %]
|
||||
|
|
|
@ -328,16 +328,25 @@ sub inputsToArgs {
|
|||
|
||||
|
||||
sub evalJobs {
|
||||
my ($inputInfo, $nixExprInputName, $nixExprPath) = @_;
|
||||
my ($inputInfo, $nixExprInputName, $nixExprPath, $flakeRef) = @_;
|
||||
|
||||
my @cmd;
|
||||
|
||||
if (defined $flakeRef) {
|
||||
@cmd = ("hydra-eval-jobs",
|
||||
"--flake", $flakeRef,
|
||||
"--gc-roots-dir", getGCRootsDir,
|
||||
"--max-jobs", 1);
|
||||
} else {
|
||||
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
||||
or die "cannot find the input containing the job expression\n";
|
||||
|
||||
my @cmd = ("hydra-eval-jobs",
|
||||
@cmd = ("hydra-eval-jobs",
|
||||
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||
"--gc-roots-dir", getGCRootsDir,
|
||||
"-j", 1,
|
||||
"--max-jobs", 1,
|
||||
inputsToArgs($inputInfo));
|
||||
}
|
||||
|
||||
if (defined $ENV{'HYDRA_DEBUG'}) {
|
||||
sub escape {
|
||||
|
@ -356,7 +365,7 @@ sub evalJobs {
|
|||
|
||||
print STDERR "$stderr";
|
||||
|
||||
return (decode_json($jobsJSON), $nixExprInput);
|
||||
return decode_json($jobsJSON);
|
||||
}
|
||||
|
||||
|
||||
|
@ -374,7 +383,7 @@ sub getPrevJobsetEval {
|
|||
|
||||
# Check whether to add the build described by $buildInfo.
|
||||
sub checkBuild {
|
||||
my ($db, $jobset, $inputInfo, $nixExprInput, $buildInfo, $buildMap, $prevEval, $jobOutPathMap, $plugins) = @_;
|
||||
my ($db, $jobset, $inputInfo, $buildInfo, $buildMap, $prevEval, $jobOutPathMap, $plugins) = @_;
|
||||
|
||||
my @outputNames = sort keys %{$buildInfo->{outputs}};
|
||||
die unless scalar @outputNames;
|
||||
|
@ -577,6 +586,16 @@ sub checkJobsetWrapped {
|
|||
};
|
||||
my $fetchError = $@;
|
||||
|
||||
my $flakeRef = $jobset->flake;
|
||||
if (defined $flakeRef) {
|
||||
(my $res, my $json, my $stderr) = captureStdoutStderr(
|
||||
600, "nix", "flake", "info", "--tarball-ttl", 0, "--json", "--", $flakeRef);
|
||||
die "'nix flake info' returned " . ($res & 127 ? "signal $res" : "exit code " . ($res >> 8))
|
||||
. ":\n" . ($stderr ? decode("utf-8", $stderr) : "(no output)\n")
|
||||
if $res;
|
||||
$flakeRef = decode_json($json)->{'url'};
|
||||
}
|
||||
|
||||
Net::Statsd::increment("hydra.evaluator.checkouts");
|
||||
my $checkoutStop = clock_gettime(CLOCK_MONOTONIC);
|
||||
Net::Statsd::timing("hydra.evaluator.checkout_time", int(($checkoutStop - $checkoutStart) * 1000));
|
||||
|
@ -597,7 +616,7 @@ sub checkJobsetWrapped {
|
|||
my @args = ($jobset->nixexprinput, $jobset->nixexprpath, inputsToArgs($inputInfo));
|
||||
my $argsHash = sha256_hex("@args");
|
||||
my $prevEval = getPrevJobsetEval($db, $jobset, 0);
|
||||
if (defined $prevEval && $prevEval->hash eq $argsHash && !$dryRun && !$jobset->forceeval) {
|
||||
if (defined $prevEval && $prevEval->hash eq $argsHash && !$dryRun && !$jobset->forceeval && $prevEval->flake eq $flakeRef) {
|
||||
print STDERR " jobset is unchanged, skipping\n";
|
||||
Net::Statsd::increment("hydra.evaluator.unchanged_checkouts");
|
||||
txn_do($db, sub {
|
||||
|
@ -609,7 +628,7 @@ sub checkJobsetWrapped {
|
|||
|
||||
# Evaluate the job expression.
|
||||
my $evalStart = clock_gettime(CLOCK_MONOTONIC);
|
||||
my ($jobs, $nixExprInput) = evalJobs($inputInfo, $jobset->nixexprinput, $jobset->nixexprpath);
|
||||
my $jobs = evalJobs($inputInfo, $jobset->nixexprinput, $jobset->nixexprpath, $flakeRef);
|
||||
my $evalStop = clock_gettime(CLOCK_MONOTONIC);
|
||||
|
||||
if ($jobsetsJobset) {
|
||||
|
@ -654,7 +673,7 @@ sub checkJobsetWrapped {
|
|||
foreach my $job (permute(values %{$jobs})) {
|
||||
next if defined $job->{error};
|
||||
#print STDERR "considering job " . $project->name, ":", $jobset->name, ":", $job->{jobName} . "\n";
|
||||
checkBuild($db, $jobset, $inputInfo, $nixExprInput, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins);
|
||||
checkBuild($db, $jobset, $inputInfo, $job, \%buildMap, $prevEval, $jobOutPathMap, $plugins);
|
||||
}
|
||||
|
||||
# Have any builds been added or removed since last time?
|
||||
|
@ -669,6 +688,7 @@ sub checkJobsetWrapped {
|
|||
, evaltime => abs(int($evalStop - $evalStart))
|
||||
, hasnewbuilds => $jobsetChanged ? 1 : 0
|
||||
, nrbuilds => $jobsetChanged ? scalar(keys %buildMap) : undef
|
||||
, flake => $flakeRef
|
||||
});
|
||||
|
||||
$db->storage->dbh->do("notify eval_added, ?", undef,
|
||||
|
|
Loading…
Reference in a new issue