diff --git a/flake.nix b/flake.nix index 10b15c4..998772d 100644 --- a/flake.nix +++ b/flake.nix @@ -5,10 +5,19 @@ inputs.flake-utils.url = "github:numtide/flake-utils"; outputs = { self, nixpkgs, flake-utils }: - flake-utils.lib.eachDefaultSystem (system: { - packages.hydra-eval-jobs = nixpkgs.legacyPackages.${system}.callPackage ./hydra.nix { - srcDir = self; - }; - defaultPackage = self.packages.${system}.hydra-eval-jobs; - }); + flake-utils.lib.eachDefaultSystem (system: + let + pkgs = nixpkgs.legacyPackages.${system}; + in + rec { + packages.hydra-eval-jobs = pkgs.callPackage ./hydra.nix { + srcDir = self; + }; + defaultPackage = self.packages.${system}.hydra-eval-jobs; + devShell = defaultPackage.overrideAttrs (old: { + nativeBuildInputs = old.nativeBuildInputs ++ [ + pkgs.python3.pkgs.pytest + ]; + }); + }); } diff --git a/src/hydra-eval-jobs.cc b/src/hydra-eval-jobs.cc index 5a09790..bcfecb7 100644 --- a/src/hydra-eval-jobs.cc +++ b/src/hydra-eval-jobs.cc @@ -32,7 +32,6 @@ struct MyArgs : MixEvalArgs, MixCommonArgs Path releaseExpr; Path gcRootsDir; bool flake = false; - bool dryRun = false; size_t nrWorkers = 1; size_t maxMemorySize = 4096; pureEval evalMode = evalAuto; @@ -61,6 +60,7 @@ struct MyArgs : MixEvalArgs, MixCommonArgs evalMode = evalImpure; }}, }); + addFlag({ .longName = "gc-roots-dir", .description = "garbage collector roots directory", @@ -86,12 +86,6 @@ struct MyArgs : MixEvalArgs, MixCommonArgs }} }); - addFlag({ - .longName = "dry-run", - .description = "don't create store derivations", - .handler = {&dryRun, true} - }); - addFlag({ .longName = "flake", .description = "build a flake", @@ -129,6 +123,14 @@ static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const stri return concatStringsSep(", ", res); } +static nlohmann::json serializeStorePathSet(StorePathSet &paths, LocalFSStore &store) { + auto array = nlohmann::json::array(); + for (auto & p : paths) { + array.push_back(store.printStorePath(p)); + } + return array; +} + static void worker( EvalState & state, Bindings & autoArgs, @@ -244,17 +246,40 @@ static void worker( registers roots for jobs that we may have already done. */ auto localStore = state.store.dynamic_pointer_cast(); + auto storePath = localStore->parseStorePath(drvPath); if (gcRootsDir != "" && localStore) { Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath)); if (!pathExists(root)) - localStore->addPermRoot(localStore->parseStorePath(drvPath), root); + localStore->addPermRoot(storePath, root); } + uint64_t downloadSize, narSize; + StorePathSet willBuild, willSubstitute, unknown; + std::vector paths; + StringSet outputNames; + + for (auto & output : outputs) { + outputNames.insert(output.first); + } + paths.push_back({storePath, outputNames}); + + localStore->queryMissing(paths, + willBuild, + willSubstitute, + unknown, + downloadSize, + narSize); + nlohmann::json out; - for (auto & j : outputs) - out[j.first] = j.second; + for (auto & p : outputs) { + out[p.first] = p.second; + } job["outputs"] = std::move(out); + job["builds"] = serializeStorePathSet(willBuild, *localStore); + job["substitutes"] = serializeStorePathSet(willSubstitute, *localStore); + job["unknown"] = serializeStorePathSet(unknown, *localStore); + reply["job"] = std::move(job); } @@ -322,8 +347,6 @@ int main(int argc, char * * argv) 'getEnv', 'currentSystem' etc. */ evalSettings.pureEval = myArgs.evalMode == evalAuto ? myArgs.flake : myArgs.evalMode == evalPure; - if (myArgs.dryRun) settings.readOnlyMode = true; - if (myArgs.releaseExpr == "") throw UsageError("no expression specified"); if (myArgs.gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified"); @@ -477,43 +500,33 @@ int main(int argc, char * * argv) auto named = job.find("namedConstituents"); if (named == job.end()) continue; - if (myArgs.dryRun) { - for (std::string jobName2 : *named) { - auto job2 = state->jobs.find(jobName2); - if (job2 == state->jobs.end()) - throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2); - std::string drvPath2 = (*job2)["drvPath"]; - job["constituents"].push_back(drvPath2); - } - } else { - auto drvPath = store->parseStorePath((std::string) job["drvPath"]); - auto drv = store->readDerivation(drvPath); + auto drvPath = store->parseStorePath((std::string) job["drvPath"]); + auto drv = store->readDerivation(drvPath); - for (std::string jobName2 : *named) { - auto job2 = state->jobs.find(jobName2); - if (job2 == state->jobs.end()) - throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2); - auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]); - auto drv2 = store->readDerivation(drvPath2); - job["constituents"].push_back(store->printStorePath(drvPath2)); - drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first}; - } - - std::string drvName(drvPath.name()); - assert(hasSuffix(drvName, drvExtension)); - drvName.resize(drvName.size() - drvExtension.size()); - auto h = std::get(hashDerivationModulo(*store, drv, true)); - auto outPath = store->makeOutputPath("out", h, drvName); - drv.env["out"] = store->printStorePath(outPath); - drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } }); - auto newDrvPath = store->printStorePath(writeDerivation(*store, drv)); - - debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath); - - job["drvPath"] = newDrvPath; - job["outputs"]["out"] = store->printStorePath(outPath); + for (std::string jobName2 : *named) { + auto job2 = state->jobs.find(jobName2); + if (job2 == state->jobs.end()) + throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2); + auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]); + auto drv2 = store->readDerivation(drvPath2); + job["constituents"].push_back(store->printStorePath(drvPath2)); + drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first}; } + std::string drvName(drvPath.name()); + assert(hasSuffix(drvName, drvExtension)); + drvName.resize(drvName.size() - drvExtension.size()); + auto h = std::get(hashDerivationModulo(*store, drv, true)); + auto outPath = store->makeOutputPath("out", h, drvName); + drv.env["out"] = store->printStorePath(outPath); + drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } }); + auto newDrvPath = store->printStorePath(writeDerivation(*store, drv)); + + debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath); + + job["drvPath"] = newDrvPath; + job["outputs"]["out"] = store->printStorePath(outPath); + job.erase("namedConstituents"); } diff --git a/tests/assets/ci.nix b/tests/assets/ci.nix new file mode 100644 index 0000000..5e80362 --- /dev/null +++ b/tests/assets/ci.nix @@ -0,0 +1,5 @@ +with import {}; +{ + builtJob = pkgs.writeText "job1" "job1"; + substitutedJob = pkgs.hello; +} diff --git a/tests/assets/flake.lock b/tests/assets/flake.lock new file mode 100644 index 0000000..a1a80bf --- /dev/null +++ b/tests/assets/flake.lock @@ -0,0 +1,26 @@ +{ + "nodes": { + "nixpkgs": { + "locked": { + "lastModified": 1616345250, + "narHash": "sha256-WLbLFIJyKCklGyEMGwh9XDTzafafyO95s4+rJHOc/Ag=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "5e4a4e0c32f0ca0a5bd4ebbbf17aedd347de7f3e", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "nixpkgs": "nixpkgs" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/tests/assets/flake.nix b/tests/assets/flake.nix new file mode 100644 index 0000000..bbd81a6 --- /dev/null +++ b/tests/assets/flake.nix @@ -0,0 +1,12 @@ +{ + inputs.nixpkgs.url = "github:NixOS/nixpkgs"; + + outputs = { self, nixpkgs }: let + pkgs = nixpkgs.legacyPackages.x86_64-linux; + in { + hydraJobs = { + builtJob = pkgs.writeText "job1" "job1"; + substitutedJob = pkgs.hello; + }; + }; +} diff --git a/tests/test_eval.py b/tests/test_eval.py new file mode 100644 index 0000000..02eced9 --- /dev/null +++ b/tests/test_eval.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 + +import subprocess +import json +from tempfile import TemporaryDirectory +from pathlib import Path +from typing import List + +TEST_ROOT = Path(__file__).parent.resolve() +PROJECT_ROOT = TEST_ROOT.parent +BIN = PROJECT_ROOT.joinpath("build", "src", "hydra-eval-jobs") + + +def common_test(extra_args: List[str]) -> None: + with TemporaryDirectory() as tempdir: + cmd = [str(BIN), "--gc-roots-dir", tempdir] + extra_args + res = subprocess.run( + cmd, + cwd=TEST_ROOT.joinpath("assets"), + text=True, + check=True, + stdout=subprocess.PIPE, + ) + data = json.loads(res.stdout) + assert len(data["builtJob"]["builds"]) == 1 + assert len(data["substitutedJob"]["substitutes"]) >= 1 + + +def test_flake() -> None: + common_test(["--flake", ".#"]) + + +def test_expression() -> None: + common_test(["ci.nix"])