Merge pull request #112 from nix-community/build-uncached

Add option to check cache status
This commit is contained in:
Jörg Thalheim 2022-09-15 11:07:19 +02:00 committed by GitHub
commit 40c4761a2d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 100 additions and 63 deletions

View file

@ -48,12 +48,14 @@ USAGE: nix-eval-jobs [options] expr
--arg Pass the value *expr* as the argument *name* to Nix functions. --arg Pass the value *expr* as the argument *name* to Nix functions.
--argstr Pass the string *string* as the argument *name* to Nix functions. --argstr Pass the string *string* as the argument *name* to Nix functions.
--check-cache-status Check if the derivations are present locally or in any configured substituters (i.e. binary cache). The information will be exposed in the `isCached` field of the JSON output.
--debug Set the logging verbosity level to 'debug'. --debug Set the logging verbosity level to 'debug'.
--eval-store The Nix store to use for evaluations. --eval-store The Nix store to use for evaluations.
--expr treat the argument as a Nix expression
--flake build a flake --flake build a flake
--gc-roots-dir garbage collector roots directory --gc-roots-dir garbage collector roots directory
--help show usage information --help show usage information
--impure set evaluation mode --impure allow impure expressions
--include Add *path* to the list of locations used to look up `<...>` file names. --include Add *path* to the list of locations used to look up `<...>` file names.
--log-format Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`. --log-format Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`.
--max-memory-size maximum evaluation memory size --max-memory-size maximum evaluation memory size
@ -61,6 +63,7 @@ USAGE: nix-eval-jobs [options] expr
--option Set the Nix configuration setting *name* to *value* (overriding `nix.conf`). --option Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).
--override-flake Override the flake registries, redirecting *original-ref* to *resolved-ref*. --override-flake Override the flake registries, redirecting *original-ref* to *resolved-ref*.
--quiet Decrease the logging verbosity level. --quiet Decrease the logging verbosity level.
--show-trace print out a stack trace in case of evaluation errors
--verbose Increase the logging verbosity level. --verbose Increase the logging verbosity level.
--workers number of evaluate workers --workers number of evaluate workers
``` ```
@ -83,7 +86,6 @@ single large log file. In the
we collect example ci configuration for various CIs. we collect example ci configuration for various CIs.
## Organisation of this repository ## Organisation of this repository
On the `main` branch we target nixUnstable. When a release of nix happens, we On the `main` branch we target nixUnstable. When a release of nix happens, we

View file

@ -44,6 +44,7 @@ struct MyArgs : MixEvalArgs, MixCommonArgs {
bool meta = false; bool meta = false;
bool showTrace = false; bool showTrace = false;
bool impure = false; bool impure = false;
bool checkCacheStatus = false;
size_t nrWorkers = 1; size_t nrWorkers = 1;
size_t maxMemorySize = 4096; size_t maxMemorySize = 4096;
@ -93,6 +94,15 @@ struct MyArgs : MixEvalArgs, MixCommonArgs {
.description = "include derivation meta field in output", .description = "include derivation meta field in output",
.handler = {&meta, true}}); .handler = {&meta, true}});
addFlag(
{.longName = "check-cache-status",
.description =
"Check if the derivations are present locally or in "
"any configured substituters (i.e. binary cache). The "
"information "
"will be exposed in the `isCached` field of the JSON output.",
.handler = {&checkCacheStatus, true}});
addFlag({.longName = "show-trace", addFlag({.longName = "show-trace",
.description = .description =
"print out a stack trace in case of evaluation errors", "print out a stack trace in case of evaluation errors",
@ -171,11 +181,26 @@ Value *topLevelValue(EvalState &state, Bindings &autoArgs) {
: releaseExprTopLevelValue(state, autoArgs); : releaseExprTopLevelValue(state, autoArgs);
} }
bool queryIsCached(Store &store, std::map<std::string, std::string> &outputs) {
uint64_t downloadSize, narSize;
StorePathSet willBuild, willSubstitute, unknown;
std::vector<StorePathWithOutputs> paths;
for (auto const &[key, val] : outputs) {
paths.push_back(followLinksToStorePathWithOutputs(store, val));
}
store.queryMissing(toDerivedPaths(paths), willBuild, willSubstitute,
unknown, downloadSize, narSize);
return willBuild.empty() && unknown.empty();
}
/* The fields of a derivation that are printed in json form */ /* The fields of a derivation that are printed in json form */
struct Drv { struct Drv {
std::string name; std::string name;
std::string system; std::string system;
std::string drvPath; std::string drvPath;
bool isCached;
std::map<std::string, std::string> outputs; std::map<std::string, std::string> outputs;
std::optional<nlohmann::json> meta; std::optional<nlohmann::json> meta;
@ -209,6 +234,9 @@ struct Drv {
} }
meta = meta_; meta = meta_;
} }
if (myArgs.checkCacheStatus) {
isCached = queryIsCached(*localStore, outputs);
}
name = drvInfo.queryName(); name = drvInfo.queryName();
system = drvInfo.querySystem(); system = drvInfo.querySystem();
@ -217,17 +245,20 @@ struct Drv {
}; };
static void to_json(nlohmann::json &json, const Drv &drv) { static void to_json(nlohmann::json &json, const Drv &drv) {
json = nlohmann::json{ json = nlohmann::json{{"name", drv.name},
{"name", drv.name},
{"system", drv.system}, {"system", drv.system},
{"drvPath", drv.drvPath}, {"drvPath", drv.drvPath},
{"outputs", drv.outputs}, {"outputs", drv.outputs}};
};
if (drv.meta.has_value()) if (drv.meta.has_value()) {
json["meta"] = drv.meta.value(); json["meta"] = drv.meta.value();
} }
if (myArgs.checkCacheStatus) {
json["isCached"] = drv.isCached;
}
}
std::string attrPathJoin(json input) { std::string attrPathJoin(json input) {
return std::accumulate(input.begin(), input.end(), std::string(), return std::accumulate(input.begin(), input.end(), std::string(),
[](std::string ss, std::string s) { [](std::string ss, std::string s) {
@ -266,8 +297,8 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
auto v = state.allocValue(); auto v = state.allocValue();
state.autoCallFunction(autoArgs, *vTmp, *v); state.autoCallFunction(autoArgs, *vTmp, *v);
if (v->type() == nAttrs) {
if (auto drvInfo = getDerivation(state, *v, false)) { if (auto drvInfo = getDerivation(state, *v, false)) {
auto drv = Drv(state, *drvInfo); auto drv = Drv(state, *drvInfo);
reply.update(drv); reply.update(drv);
@ -279,29 +310,29 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
std::string(baseNameOf(drv.drvPath)); std::string(baseNameOf(drv.drvPath));
if (!pathExists(root)) { if (!pathExists(root)) {
auto localStore = auto localStore =
state.store.dynamic_pointer_cast<LocalFSStore>(); state.store
.dynamic_pointer_cast<LocalFSStore>();
auto storePath = auto storePath =
localStore->parseStorePath(drv.drvPath); localStore->parseStorePath(drv.drvPath);
localStore->addPermRoot(storePath, root); localStore->addPermRoot(storePath, root);
} }
} }
} else {
}
else if (v->type() == nAttrs) {
auto attrs = nlohmann::json::array(); auto attrs = nlohmann::json::array();
bool recurse = bool recurse =
path.size() == 0; // Dont require `recurseForDerivations = path.size() == 0; // Dont require `recurseForDerivations
// true;` for top-level attrset // = true;` for top-level attrset
for (auto &i : v->attrs->lexicographicOrder(state.symbols)) { for (auto &i :
v->attrs->lexicographicOrder(state.symbols)) {
const std::string &name = state.symbols[i->name]; const std::string &name = state.symbols[i->name];
attrs.push_back(name); attrs.push_back(name);
if (name == "recurseForDerivations") { if (name == "recurseForDerivations") {
auto attrv = auto attrv =
v->attrs->get(state.sRecurseForDerivations); v->attrs->get(state.sRecurseForDerivations);
recurse = state.forceBool(*attrv->value, attrv->pos); recurse =
state.forceBool(*attrv->value, attrv->pos);
} }
} }
if (recurse) if (recurse)
@ -309,17 +340,12 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
else else
reply["attrs"] = nlohmann::json::array(); reply["attrs"] = nlohmann::json::array();
} }
} else {
else if (v->type() == nNull) // We ignore everything that cannot be build
; reply["attrs"] = nlohmann::json::array();
}
else
throw TypeError("attribute '%s' is %s, which is not supported",
path, showType(*v));
} catch (EvalError &e) { } catch (EvalError &e) {
auto err = e.info(); auto err = e.info();
std::ostringstream oss; std::ostringstream oss;
showErrorInfo(oss, err, loggerSettings.showTrace.get()); showErrorInfo(oss, err, loggerSettings.showTrace.get());
auto msg = oss.str(); auto msg = oss.str();

View file

@ -4,14 +4,14 @@ import subprocess
import json import json
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from pathlib import Path from pathlib import Path
from typing import List from typing import List, Dict, Any
TEST_ROOT = Path(__file__).parent.resolve() TEST_ROOT = Path(__file__).parent.resolve()
PROJECT_ROOT = TEST_ROOT.parent PROJECT_ROOT = TEST_ROOT.parent
BIN = PROJECT_ROOT.joinpath("build", "src", "nix-eval-jobs") BIN = PROJECT_ROOT.joinpath("build", "src", "nix-eval-jobs")
def common_test(extra_args: List[str]) -> None: def common_test(extra_args: List[str]) -> List[Dict[str, Any]]:
with TemporaryDirectory() as tempdir: with TemporaryDirectory() as tempdir:
cmd = [str(BIN), "--gc-roots-dir", tempdir, "--meta"] + extra_args cmd = [str(BIN), "--gc-roots-dir", tempdir, "--meta"] + extra_args
res = subprocess.run( res = subprocess.run(
@ -23,7 +23,7 @@ def common_test(extra_args: List[str]) -> None:
) )
results = [json.loads(r) for r in res.stdout.split("\n") if r] results = [json.loads(r) for r in res.stdout.split("\n") if r]
assert len(results) == 5 assert len(results) == 4
built_job = results[0] built_job = results[0]
assert built_job["attr"] == "builtJob" assert built_job["attr"] == "builtJob"
@ -40,21 +40,30 @@ def common_test(extra_args: List[str]) -> None:
assert recurse_drv["attr"] == "recurse.drvB" assert recurse_drv["attr"] == "recurse.drvB"
assert recurse_drv["name"] == "drvB" assert recurse_drv["name"] == "drvB"
recurse_recurse_bool = results[3] substituted_job = results[3]
assert "error" in recurse_recurse_bool
substituted_job = results[4]
assert substituted_job["attr"] == "substitutedJob" assert substituted_job["attr"] == "substitutedJob"
assert substituted_job["name"].startswith("hello-") assert substituted_job["name"].startswith("hello-")
assert substituted_job["meta"]["broken"] is False assert substituted_job["meta"]["broken"] is False
return results
def test_flake() -> None: def test_flake() -> None:
common_test(["--flake", ".#hydraJobs"]) results = common_test(["--flake", ".#hydraJobs"])
for result in results:
assert "isCached" not in result
def test_query_cache_status() -> None:
results = common_test(["--flake", ".#hydraJobs", "--check-cache-status"])
# FIXME in the nix sandbox we cannot query binary caches, this would need some local one
for result in results:
assert "isCached" in result
def test_expression() -> None: def test_expression() -> None:
common_test(["ci.nix"]) results = common_test(["ci.nix"])
for result in results:
assert "isCached" not in result
with open(TEST_ROOT.joinpath("assets/ci.nix"), "r") as ci_nix: with open(TEST_ROOT.joinpath("assets/ci.nix"), "r") as ci_nix:
common_test(["-E", ci_nix.read()]) common_test(["-E", ci_nix.read()])