forked from lix-project/nix-eval-jobs
Merge pull request #3 from Mic92/ci
This commit is contained in:
commit
e55885c021
6
.github/workflows/test-flakes.yml
vendored
6
.github/workflows/test-flakes.yml
vendored
|
@ -26,4 +26,8 @@ jobs:
|
|||
- name: List flake structure
|
||||
run: nix flake show
|
||||
- name: Build
|
||||
run: nix build
|
||||
run: nix build --out-link result
|
||||
- name: Run tests
|
||||
run: |
|
||||
nix develop -c install -D ./result/bin/hydra-eval-jobs ./build/src/hydra-eval-jobs
|
||||
nix develop -c pytest ./tests
|
||||
|
|
10
.gitignore
vendored
10
.gitignore
vendored
|
@ -42,3 +42,13 @@ tmp/
|
|||
/build
|
||||
# nix-build
|
||||
/result
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
|
21
flake.nix
21
flake.nix
|
@ -5,10 +5,19 @@
|
|||
inputs.flake-utils.url = "github:numtide/flake-utils";
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }:
|
||||
flake-utils.lib.eachDefaultSystem (system: {
|
||||
packages.hydra-eval-jobs = nixpkgs.legacyPackages.${system}.callPackage ./hydra.nix {
|
||||
srcDir = self;
|
||||
};
|
||||
defaultPackage = self.packages.${system}.hydra-eval-jobs;
|
||||
});
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in
|
||||
rec {
|
||||
packages.hydra-eval-jobs = pkgs.callPackage ./hydra.nix {
|
||||
srcDir = self;
|
||||
};
|
||||
defaultPackage = self.packages.${system}.hydra-eval-jobs;
|
||||
devShell = defaultPackage.overrideAttrs (old: {
|
||||
nativeBuildInputs = old.nativeBuildInputs ++ [
|
||||
pkgs.python3.pkgs.pytest
|
||||
];
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -25,15 +25,16 @@
|
|||
|
||||
using namespace nix;
|
||||
|
||||
static Path gcRootsDir;
|
||||
typedef enum { evalAuto, evalImpure, evalPure } pureEval;
|
||||
|
||||
struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||
{
|
||||
Path releaseExpr;
|
||||
Path gcRootsDir;
|
||||
bool flake = false;
|
||||
bool dryRun = false;
|
||||
size_t nrWorkers = 1;
|
||||
size_t maxMemorySize = 4096;
|
||||
pureEval evalMode = evalAuto;
|
||||
|
||||
MyArgs() : MixCommonArgs("hydra-eval-jobs")
|
||||
{
|
||||
|
@ -48,6 +49,15 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
|||
}
|
||||
printf(" --%-20s %s\n", name.c_str(), flag->description.c_str());
|
||||
}
|
||||
::exit(0);
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "impure",
|
||||
.description = "set evaluation mode",
|
||||
.handler = {[&]() {
|
||||
evalMode = evalImpure;
|
||||
}},
|
||||
});
|
||||
|
||||
|
@ -76,12 +86,6 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
|||
}}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "dry-run",
|
||||
.description = "don't create store derivations",
|
||||
.handler = {&dryRun, true}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "flake",
|
||||
.description = "build a flake",
|
||||
|
@ -119,11 +123,20 @@ static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const stri
|
|||
return concatStringsSep(", ", res);
|
||||
}
|
||||
|
||||
static nlohmann::json serializeStorePathSet(StorePathSet &paths, LocalFSStore &store) {
|
||||
auto array = nlohmann::json::array();
|
||||
for (auto & p : paths) {
|
||||
array.push_back(store.printStorePath(p));
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
static void worker(
|
||||
EvalState & state,
|
||||
Bindings & autoArgs,
|
||||
AutoCloseFD & to,
|
||||
AutoCloseFD & from)
|
||||
AutoCloseFD & from,
|
||||
const Path &gcRootsDir)
|
||||
{
|
||||
Value vTop;
|
||||
|
||||
|
@ -233,17 +246,40 @@ static void worker(
|
|||
registers roots for jobs that we may have already
|
||||
done. */
|
||||
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||
auto storePath = localStore->parseStorePath(drvPath);
|
||||
if (gcRootsDir != "" && localStore) {
|
||||
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
|
||||
if (!pathExists(root))
|
||||
localStore->addPermRoot(localStore->parseStorePath(drvPath), root);
|
||||
localStore->addPermRoot(storePath, root);
|
||||
}
|
||||
|
||||
uint64_t downloadSize, narSize;
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
std::vector<nix::StorePathWithOutputs> paths;
|
||||
StringSet outputNames;
|
||||
|
||||
for (auto & output : outputs) {
|
||||
outputNames.insert(output.first);
|
||||
}
|
||||
paths.push_back({storePath, outputNames});
|
||||
|
||||
localStore->queryMissing(paths,
|
||||
willBuild,
|
||||
willSubstitute,
|
||||
unknown,
|
||||
downloadSize,
|
||||
narSize);
|
||||
|
||||
nlohmann::json out;
|
||||
for (auto & j : outputs)
|
||||
out[j.first] = j.second;
|
||||
for (auto & p : outputs) {
|
||||
out[p.first] = p.second;
|
||||
}
|
||||
job["outputs"] = std::move(out);
|
||||
|
||||
job["builds"] = serializeStorePathSet(willBuild, *localStore);
|
||||
job["substitutes"] = serializeStorePathSet(willSubstitute, *localStore);
|
||||
job["unknown"] = serializeStorePathSet(unknown, *localStore);
|
||||
|
||||
reply["job"] = std::move(job);
|
||||
}
|
||||
|
||||
|
@ -309,13 +345,11 @@ int main(int argc, char * * argv)
|
|||
|
||||
/* When building a flake, use pure evaluation (no access to
|
||||
'getEnv', 'currentSystem' etc. */
|
||||
evalSettings.pureEval = myArgs.flake;
|
||||
|
||||
if (myArgs.dryRun) settings.readOnlyMode = true;
|
||||
evalSettings.pureEval = myArgs.evalMode == evalAuto ? myArgs.flake : myArgs.evalMode == evalPure;
|
||||
|
||||
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||
|
||||
if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||
if (myArgs.gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||
|
||||
struct State
|
||||
{
|
||||
|
@ -352,7 +386,7 @@ int main(int argc, char * * argv)
|
|||
try {
|
||||
EvalState state(myArgs.searchPath, openStore());
|
||||
Bindings & autoArgs = *myArgs.getAutoArgs(state);
|
||||
worker(state, autoArgs, *to, *from);
|
||||
worker(state, autoArgs, *to, *from, myArgs.gcRootsDir);
|
||||
} catch (Error & e) {
|
||||
nlohmann::json err;
|
||||
auto msg = e.msg();
|
||||
|
@ -466,43 +500,33 @@ int main(int argc, char * * argv)
|
|||
auto named = job.find("namedConstituents");
|
||||
if (named == job.end()) continue;
|
||||
|
||||
if (myArgs.dryRun) {
|
||||
for (std::string jobName2 : *named) {
|
||||
auto job2 = state->jobs.find(jobName2);
|
||||
if (job2 == state->jobs.end())
|
||||
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||
std::string drvPath2 = (*job2)["drvPath"];
|
||||
job["constituents"].push_back(drvPath2);
|
||||
}
|
||||
} else {
|
||||
auto drvPath = store->parseStorePath((std::string) job["drvPath"]);
|
||||
auto drv = store->readDerivation(drvPath);
|
||||
auto drvPath = store->parseStorePath((std::string) job["drvPath"]);
|
||||
auto drv = store->readDerivation(drvPath);
|
||||
|
||||
for (std::string jobName2 : *named) {
|
||||
auto job2 = state->jobs.find(jobName2);
|
||||
if (job2 == state->jobs.end())
|
||||
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||
auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]);
|
||||
auto drv2 = store->readDerivation(drvPath2);
|
||||
job["constituents"].push_back(store->printStorePath(drvPath2));
|
||||
drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first};
|
||||
}
|
||||
|
||||
std::string drvName(drvPath.name());
|
||||
assert(hasSuffix(drvName, drvExtension));
|
||||
drvName.resize(drvName.size() - drvExtension.size());
|
||||
auto h = std::get<Hash>(hashDerivationModulo(*store, drv, true));
|
||||
auto outPath = store->makeOutputPath("out", h, drvName);
|
||||
drv.env["out"] = store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } });
|
||||
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
|
||||
|
||||
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
|
||||
|
||||
job["drvPath"] = newDrvPath;
|
||||
job["outputs"]["out"] = store->printStorePath(outPath);
|
||||
for (std::string jobName2 : *named) {
|
||||
auto job2 = state->jobs.find(jobName2);
|
||||
if (job2 == state->jobs.end())
|
||||
throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);
|
||||
auto drvPath2 = store->parseStorePath((std::string) (*job2)["drvPath"]);
|
||||
auto drv2 = store->readDerivation(drvPath2);
|
||||
job["constituents"].push_back(store->printStorePath(drvPath2));
|
||||
drv.inputDrvs[drvPath2] = {drv2.outputs.begin()->first};
|
||||
}
|
||||
|
||||
std::string drvName(drvPath.name());
|
||||
assert(hasSuffix(drvName, drvExtension));
|
||||
drvName.resize(drvName.size() - drvExtension.size());
|
||||
auto h = std::get<Hash>(hashDerivationModulo(*store, drv, true));
|
||||
auto outPath = store->makeOutputPath("out", h, drvName);
|
||||
drv.env["out"] = store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = outPath } });
|
||||
auto newDrvPath = store->printStorePath(writeDerivation(*store, drv));
|
||||
|
||||
debug("rewrote aggregate derivation %s -> %s", store->printStorePath(drvPath), newDrvPath);
|
||||
|
||||
job["drvPath"] = newDrvPath;
|
||||
job["outputs"]["out"] = store->printStorePath(outPath);
|
||||
|
||||
job.erase("namedConstituents");
|
||||
}
|
||||
|
||||
|
|
7
tests/assets/ci.nix
Normal file
7
tests/assets/ci.nix
Normal file
|
@ -0,0 +1,7 @@
|
|||
let
|
||||
pkgs = import (builtins.getFlake (toString ./.)).inputs.nixpkgs {};
|
||||
in
|
||||
{
|
||||
builtJob = pkgs.writeText "job1" "job1";
|
||||
substitutedJob = pkgs.hello;
|
||||
}
|
26
tests/assets/flake.lock
Normal file
26
tests/assets/flake.lock
Normal file
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1616345250,
|
||||
"narHash": "sha256-WLbLFIJyKCklGyEMGwh9XDTzafafyO95s4+rJHOc/Ag=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "5e4a4e0c32f0ca0a5bd4ebbbf17aedd347de7f3e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
12
tests/assets/flake.nix
Normal file
12
tests/assets/flake.nix
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs";
|
||||
|
||||
outputs = { self, nixpkgs }: let
|
||||
pkgs = nixpkgs.legacyPackages.x86_64-linux;
|
||||
in {
|
||||
hydraJobs = {
|
||||
builtJob = pkgs.writeText "job1" "job1";
|
||||
substitutedJob = pkgs.hello;
|
||||
};
|
||||
};
|
||||
}
|
34
tests/test_eval.py
Normal file
34
tests/test_eval.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import json
|
||||
from tempfile import TemporaryDirectory
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
TEST_ROOT = Path(__file__).parent.resolve()
|
||||
PROJECT_ROOT = TEST_ROOT.parent
|
||||
BIN = PROJECT_ROOT.joinpath("build", "src", "hydra-eval-jobs")
|
||||
|
||||
|
||||
def common_test(extra_args: List[str]) -> None:
|
||||
with TemporaryDirectory() as tempdir:
|
||||
cmd = [str(BIN), "--gc-roots-dir", tempdir] + extra_args
|
||||
res = subprocess.run(
|
||||
cmd,
|
||||
cwd=TEST_ROOT.joinpath("assets"),
|
||||
text=True,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
data = json.loads(res.stdout)
|
||||
assert len(data["builtJob"]["builds"]) == 1
|
||||
assert len(data["substitutedJob"]["substitutes"]) >= 1
|
||||
|
||||
|
||||
def test_flake() -> None:
|
||||
common_test(["--flake", ".#"])
|
||||
|
||||
|
||||
def test_expression() -> None:
|
||||
common_test(["ci.nix"])
|
Loading…
Reference in a new issue