output missing/substitutable derivations in eval output
This commit is contained in:
parent
617d4ee151
commit
71cbe4eab4
21
flake.nix
21
flake.nix
|
@ -5,10 +5,19 @@
|
||||||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
|
||||||
outputs = { self, nixpkgs, flake-utils }:
|
outputs = { self, nixpkgs, flake-utils }:
|
||||||
flake-utils.lib.eachDefaultSystem (system: {
|
flake-utils.lib.eachDefaultSystem (system:
|
||||||
packages.hydra-eval-jobs = nixpkgs.legacyPackages.${system}.callPackage ./hydra.nix {
|
let
|
||||||
srcDir = self;
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
};
|
in
|
||||||
defaultPackage = self.packages.${system}.hydra-eval-jobs;
|
rec {
|
||||||
});
|
packages.hydra-eval-jobs = pkgs.callPackage ./hydra.nix {
|
||||||
|
srcDir = self;
|
||||||
|
};
|
||||||
|
defaultPackage = self.packages.${system}.hydra-eval-jobs;
|
||||||
|
devShell = defaultPackage.overrideAttrs (old: {
|
||||||
|
nativeBuildInputs = old.nativeBuildInputs ++ [
|
||||||
|
pkgs.python3.pkgs.pytest
|
||||||
|
];
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||||
Path releaseExpr;
|
Path releaseExpr;
|
||||||
Path gcRootsDir;
|
Path gcRootsDir;
|
||||||
bool flake = false;
|
bool flake = false;
|
||||||
bool dryRun = false;
|
|
||||||
size_t nrWorkers = 1;
|
size_t nrWorkers = 1;
|
||||||
size_t maxMemorySize = 4096;
|
size_t maxMemorySize = 4096;
|
||||||
pureEval evalMode = evalAuto;
|
pureEval evalMode = evalAuto;
|
||||||
|
@ -61,6 +60,7 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||||
evalMode = evalImpure;
|
evalMode = evalImpure;
|
||||||
}},
|
}},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "gc-roots-dir",
|
.longName = "gc-roots-dir",
|
||||||
.description = "garbage collector roots directory",
|
.description = "garbage collector roots directory",
|
||||||
|
@ -86,12 +86,6 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||||
}}
|
}}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
|
||||||
.longName = "dry-run",
|
|
||||||
.description = "don't create store derivations",
|
|
||||||
.handler = {&dryRun, true}
|
|
||||||
});
|
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "flake",
|
.longName = "flake",
|
||||||
.description = "build a flake",
|
.description = "build a flake",
|
||||||
|
@ -129,6 +123,14 @@ static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const stri
|
||||||
return concatStringsSep(", ", res);
|
return concatStringsSep(", ", res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static nlohmann::json serializeStorePathSet(StorePathSet &paths, LocalFSStore &store) {
|
||||||
|
auto array = nlohmann::json::array();
|
||||||
|
for (auto & p : paths) {
|
||||||
|
array.push_back(store.printStorePath(p));
|
||||||
|
}
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
static void worker(
|
static void worker(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
Bindings & autoArgs,
|
Bindings & autoArgs,
|
||||||
|
@ -244,17 +246,40 @@ static void worker(
|
||||||
registers roots for jobs that we may have already
|
registers roots for jobs that we may have already
|
||||||
done. */
|
done. */
|
||||||
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||||
|
auto storePath = localStore->parseStorePath(drvPath);
|
||||||
if (gcRootsDir != "" && localStore) {
|
if (gcRootsDir != "" && localStore) {
|
||||||
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
|
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
|
||||||
if (!pathExists(root))
|
if (!pathExists(root))
|
||||||
localStore->addPermRoot(localStore->parseStorePath(drvPath), root);
|
localStore->addPermRoot(storePath, root);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint64_t downloadSize, narSize;
|
||||||
|
StorePathSet willBuild, willSubstitute, unknown;
|
||||||
|
std::vector<nix::StorePathWithOutputs> paths;
|
||||||
|
StringSet outputNames;
|
||||||
|
|
||||||
|
for (auto & output : outputs) {
|
||||||
|
outputNames.insert(output.first);
|
||||||
|
}
|
||||||
|
paths.push_back({storePath, outputNames});
|
||||||
|
|
||||||
|
localStore->queryMissing(paths,
|
||||||
|
willBuild,
|
||||||
|
willSubstitute,
|
||||||
|
unknown,
|
||||||
|
downloadSize,
|
||||||
|
narSize);
|
||||||
|
|
||||||
nlohmann::json out;
|
nlohmann::json out;
|
||||||
for (auto & j : outputs)
|
for (auto & p : outputs) {
|
||||||
out[j.first] = j.second;
|
out[p.first] = p.second;
|
||||||
|
}
|
||||||
job["outputs"] = std::move(out);
|
job["outputs"] = std::move(out);
|
||||||
|
|
||||||
|
job["builds"] = serializeStorePathSet(willBuild, *localStore);
|
||||||
|
job["substitutes"] = serializeStorePathSet(willSubstitute, *localStore);
|
||||||
|
job["unknown"] = serializeStorePathSet(unknown, *localStore);
|
||||||
|
|
||||||
reply["job"] = std::move(job);
|
reply["job"] = std::move(job);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -322,8 +347,6 @@ int main(int argc, char * * argv)
|
||||||
'getEnv', 'currentSystem' etc. */
|
'getEnv', 'currentSystem' etc. */
|
||||||
evalSettings.pureEval = myArgs.evalMode == evalAuto ? myArgs.flake : myArgs.evalMode == evalPure;
|
evalSettings.pureEval = myArgs.evalMode == evalAuto ? myArgs.flake : myArgs.evalMode == evalPure;
|
||||||
|
|
||||||
if (myArgs.dryRun) settings.readOnlyMode = true;
|
|
||||||
|
|
||||||
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||||
|
|
||||||
if (myArgs.gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
if (myArgs.gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||||
|
@ -477,43 +500,33 @@ int main(int argc, char * * argv)
|
||||||
auto named = job.find("namedConstituents");
|
auto named = job.find("namedConstituents");
|
||||||
if (named == job.end()) continue;
|
if (named == job.end()) continue;
|
||||||
|
|
||||||
if (myArgs.dryRun) {
|
auto drvPath = store->parseStorePath((std::string) job["drvPath"]);
|
||||||
for (std::string jobName2 : *named) {
|
auto drv = store->readDerivation(drvPath);
|
||||||
auto job2 = state->jobs.find(jobName2);
|
|
||||||
if (job2 == state->jobs.end())
|
|
||||||
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
|
||||||
std::string drvPath2 = (*job2)["drvPath"];
|
|
||||||
job["constituents"].push_back(drvPath2);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
auto drvPath = store->parseStorePath((std::string) job["drvPath"]);
|
|
||||||
auto drv = store->readDerivation(drvPath);
|
|
||||||
|
|
||||||
for (std::string jobName2 : *named) {
|
for (std::string jobName2 : *named) {
|
||||||
auto job2 = state->jobs.find(jobName2);
|
auto job2 = state->jobs.find(jobName2);
|
||||||
if (job2 == state->jobs.end())
|
if (job2 == state->jobs.end())
|
||||||
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||||
auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]);
|
auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]);
|
||||||
auto drv2 = store->readDerivation(drvPath2);
|
auto drv2 = store->readDerivation(drvPath2);
|
||||||
job["constituents"].push_back(store->printStorePath(drvPath2));
|
job["constituents"].push_back(store->printStorePath(drvPath2));
|
||||||
drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first};
|
drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first};
|
||||||
}
|
|
||||||
|
|
||||||
std::string drvName(drvPath.name());
|
|
||||||
assert(hasSuffix(drvName, drvExtension));
|
|
||||||
drvName.resize(drvName.size() - drvExtension.size());
|
|
||||||
auto h = std::get<Hash>(hashDerivationModulo(*store, drv, true));
|
|
||||||
auto outPath = store->makeOutputPath("out", h, drvName);
|
|
||||||
drv.env["out"] = store->printStorePath(outPath);
|
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } });
|
|
||||||
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
|
|
||||||
|
|
||||||
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
|
|
||||||
|
|
||||||
job["drvPath"] = newDrvPath;
|
|
||||||
job["outputs"]["out"] = store->printStorePath(outPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string drvName(drvPath.name());
|
||||||
|
assert(hasSuffix(drvName, drvExtension));
|
||||||
|
drvName.resize(drvName.size() - drvExtension.size());
|
||||||
|
auto h = std::get<Hash>(hashDerivationModulo(*store, drv, true));
|
||||||
|
auto outPath = store->makeOutputPath("out", h, drvName);
|
||||||
|
drv.env["out"] = store->printStorePath(outPath);
|
||||||
|
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } });
|
||||||
|
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
|
||||||
|
|
||||||
|
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
|
||||||
|
|
||||||
|
job["drvPath"] = newDrvPath;
|
||||||
|
job["outputs"]["out"] = store->printStorePath(outPath);
|
||||||
|
|
||||||
job.erase("namedConstituents");
|
job.erase("namedConstituents");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
5
tests/assets/ci.nix
Normal file
5
tests/assets/ci.nix
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
with import <nixpkgs> {};
|
||||||
|
{
|
||||||
|
builtJob = pkgs.writeText "job1" "job1";
|
||||||
|
substitutedJob = pkgs.hello;
|
||||||
|
}
|
26
tests/assets/flake.lock
Normal file
26
tests/assets/flake.lock
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1616345250,
|
||||||
|
"narHash": "sha256-WLbLFIJyKCklGyEMGwh9XDTzafafyO95s4+rJHOc/Ag=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "5e4a4e0c32f0ca0a5bd4ebbbf17aedd347de7f3e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
12
tests/assets/flake.nix
Normal file
12
tests/assets/flake.nix
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs";
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs }: let
|
||||||
|
pkgs = nixpkgs.legacyPackages.x86_64-linux;
|
||||||
|
in {
|
||||||
|
hydraJobs = {
|
||||||
|
builtJob = pkgs.writeText "job1" "job1";
|
||||||
|
substitutedJob = pkgs.hello;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
34
tests/test_eval.py
Normal file
34
tests/test_eval.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
TEST_ROOT = Path(__file__).parent.resolve()
|
||||||
|
PROJECT_ROOT = TEST_ROOT.parent
|
||||||
|
BIN = PROJECT_ROOT.joinpath("build", "src", "hydra-eval-jobs")
|
||||||
|
|
||||||
|
|
||||||
|
def common_test(extra_args: List[str]) -> None:
|
||||||
|
with TemporaryDirectory() as tempdir:
|
||||||
|
cmd = [str(BIN), "--gc-roots-dir", tempdir] + extra_args
|
||||||
|
res = subprocess.run(
|
||||||
|
cmd,
|
||||||
|
cwd=TEST_ROOT.joinpath("assets"),
|
||||||
|
text=True,
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
data = json.loads(res.stdout)
|
||||||
|
assert len(data["builtJob"]["builds"]) == 1
|
||||||
|
assert len(data["substitutedJob"]["substitutes"]) >= 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_flake() -> None:
|
||||||
|
common_test(["--flake", ".#"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_expression() -> None:
|
||||||
|
common_test(["ci.nix"])
|
Loading…
Reference in a new issue