forked from lix-project/nix-eval-jobs
Merge pull request #112 from nix-community/build-uncached
Add option to check cache status
This commit is contained in:
commit
40c4761a2d
|
@ -48,12 +48,14 @@ USAGE: nix-eval-jobs [options] expr
|
|||
|
||||
--arg Pass the value *expr* as the argument *name* to Nix functions.
|
||||
--argstr Pass the string *string* as the argument *name* to Nix functions.
|
||||
--check-cache-status Check if the derivations are present locally or in any configured substituters (i.e. binary cache). The information will be exposed in the `isCached` field of the JSON output.
|
||||
--debug Set the logging verbosity level to 'debug'.
|
||||
--eval-store The Nix store to use for evaluations.
|
||||
--expr treat the argument as a Nix expression
|
||||
--flake build a flake
|
||||
--gc-roots-dir garbage collector roots directory
|
||||
--help show usage information
|
||||
--impure set evaluation mode
|
||||
--impure allow impure expressions
|
||||
--include Add *path* to the list of locations used to look up `<...>` file names.
|
||||
--log-format Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`.
|
||||
--max-memory-size maximum evaluation memory size
|
||||
|
@ -61,6 +63,7 @@ USAGE: nix-eval-jobs [options] expr
|
|||
--option Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).
|
||||
--override-flake Override the flake registries, redirecting *original-ref* to *resolved-ref*.
|
||||
--quiet Decrease the logging verbosity level.
|
||||
--show-trace print out a stack trace in case of evaluation errors
|
||||
--verbose Increase the logging verbosity level.
|
||||
--workers number of evaluate workers
|
||||
```
|
||||
|
@ -83,7 +86,6 @@ single large log file. In the
|
|||
we collect example ci configuration for various CIs.
|
||||
|
||||
|
||||
|
||||
## Organisation of this repository
|
||||
|
||||
On the `main` branch we target nixUnstable. When a release of nix happens, we
|
||||
|
|
|
@ -44,6 +44,7 @@ struct MyArgs : MixEvalArgs, MixCommonArgs {
|
|||
bool meta = false;
|
||||
bool showTrace = false;
|
||||
bool impure = false;
|
||||
bool checkCacheStatus = false;
|
||||
size_t nrWorkers = 1;
|
||||
size_t maxMemorySize = 4096;
|
||||
|
||||
|
@ -93,6 +94,15 @@ struct MyArgs : MixEvalArgs, MixCommonArgs {
|
|||
.description = "include derivation meta field in output",
|
||||
.handler = {&meta, true}});
|
||||
|
||||
addFlag(
|
||||
{.longName = "check-cache-status",
|
||||
.description =
|
||||
"Check if the derivations are present locally or in "
|
||||
"any configured substituters (i.e. binary cache). The "
|
||||
"information "
|
||||
"will be exposed in the `isCached` field of the JSON output.",
|
||||
.handler = {&checkCacheStatus, true}});
|
||||
|
||||
addFlag({.longName = "show-trace",
|
||||
.description =
|
||||
"print out a stack trace in case of evaluation errors",
|
||||
|
@ -171,11 +181,26 @@ Value *topLevelValue(EvalState &state, Bindings &autoArgs) {
|
|||
: releaseExprTopLevelValue(state, autoArgs);
|
||||
}
|
||||
|
||||
bool queryIsCached(Store &store, std::map<std::string, std::string> &outputs) {
|
||||
uint64_t downloadSize, narSize;
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
|
||||
std::vector<StorePathWithOutputs> paths;
|
||||
for (auto const &[key, val] : outputs) {
|
||||
paths.push_back(followLinksToStorePathWithOutputs(store, val));
|
||||
}
|
||||
|
||||
store.queryMissing(toDerivedPaths(paths), willBuild, willSubstitute,
|
||||
unknown, downloadSize, narSize);
|
||||
return willBuild.empty() && unknown.empty();
|
||||
}
|
||||
|
||||
/* The fields of a derivation that are printed in json form */
|
||||
struct Drv {
|
||||
std::string name;
|
||||
std::string system;
|
||||
std::string drvPath;
|
||||
bool isCached;
|
||||
std::map<std::string, std::string> outputs;
|
||||
std::optional<nlohmann::json> meta;
|
||||
|
||||
|
@ -209,6 +234,9 @@ struct Drv {
|
|||
}
|
||||
meta = meta_;
|
||||
}
|
||||
if (myArgs.checkCacheStatus) {
|
||||
isCached = queryIsCached(*localStore, outputs);
|
||||
}
|
||||
|
||||
name = drvInfo.queryName();
|
||||
system = drvInfo.querySystem();
|
||||
|
@ -217,17 +245,20 @@ struct Drv {
|
|||
};
|
||||
|
||||
static void to_json(nlohmann::json &json, const Drv &drv) {
|
||||
json = nlohmann::json{
|
||||
{"name", drv.name},
|
||||
json = nlohmann::json{{"name", drv.name},
|
||||
{"system", drv.system},
|
||||
{"drvPath", drv.drvPath},
|
||||
{"outputs", drv.outputs},
|
||||
};
|
||||
{"outputs", drv.outputs}};
|
||||
|
||||
if (drv.meta.has_value())
|
||||
if (drv.meta.has_value()) {
|
||||
json["meta"] = drv.meta.value();
|
||||
}
|
||||
|
||||
if (myArgs.checkCacheStatus) {
|
||||
json["isCached"] = drv.isCached;
|
||||
}
|
||||
}
|
||||
|
||||
std::string attrPathJoin(json input) {
|
||||
return std::accumulate(input.begin(), input.end(), std::string(),
|
||||
[](std::string ss, std::string s) {
|
||||
|
@ -266,8 +297,8 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
|
|||
auto v = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, *vTmp, *v);
|
||||
|
||||
if (v->type() == nAttrs) {
|
||||
if (auto drvInfo = getDerivation(state, *v, false)) {
|
||||
|
||||
auto drv = Drv(state, *drvInfo);
|
||||
reply.update(drv);
|
||||
|
||||
|
@ -279,29 +310,29 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
|
|||
std::string(baseNameOf(drv.drvPath));
|
||||
if (!pathExists(root)) {
|
||||
auto localStore =
|
||||
state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||
state.store
|
||||
.dynamic_pointer_cast<LocalFSStore>();
|
||||
auto storePath =
|
||||
localStore->parseStorePath(drv.drvPath);
|
||||
localStore->addPermRoot(storePath, root);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
else if (v->type() == nAttrs) {
|
||||
} else {
|
||||
auto attrs = nlohmann::json::array();
|
||||
bool recurse =
|
||||
path.size() == 0; // Dont require `recurseForDerivations =
|
||||
// true;` for top-level attrset
|
||||
path.size() == 0; // Dont require `recurseForDerivations
|
||||
// = true;` for top-level attrset
|
||||
|
||||
for (auto &i : v->attrs->lexicographicOrder(state.symbols)) {
|
||||
for (auto &i :
|
||||
v->attrs->lexicographicOrder(state.symbols)) {
|
||||
const std::string &name = state.symbols[i->name];
|
||||
attrs.push_back(name);
|
||||
|
||||
if (name == "recurseForDerivations") {
|
||||
auto attrv =
|
||||
v->attrs->get(state.sRecurseForDerivations);
|
||||
recurse = state.forceBool(*attrv->value, attrv->pos);
|
||||
recurse =
|
||||
state.forceBool(*attrv->value, attrv->pos);
|
||||
}
|
||||
}
|
||||
if (recurse)
|
||||
|
@ -309,17 +340,12 @@ static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
|
|||
else
|
||||
reply["attrs"] = nlohmann::json::array();
|
||||
}
|
||||
|
||||
else if (v->type() == nNull)
|
||||
;
|
||||
|
||||
else
|
||||
throw TypeError("attribute '%s' is %s, which is not supported",
|
||||
path, showType(*v));
|
||||
|
||||
} else {
|
||||
// We ignore everything that cannot be build
|
||||
reply["attrs"] = nlohmann::json::array();
|
||||
}
|
||||
} catch (EvalError &e) {
|
||||
auto err = e.info();
|
||||
|
||||
std::ostringstream oss;
|
||||
showErrorInfo(oss, err, loggerSettings.showTrace.get());
|
||||
auto msg = oss.str();
|
||||
|
|
|
@ -4,14 +4,14 @@ import subprocess
|
|||
import json
|
||||
from tempfile import TemporaryDirectory
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Dict, Any
|
||||
|
||||
TEST_ROOT = Path(__file__).parent.resolve()
|
||||
PROJECT_ROOT = TEST_ROOT.parent
|
||||
BIN = PROJECT_ROOT.joinpath("build", "src", "nix-eval-jobs")
|
||||
|
||||
|
||||
def common_test(extra_args: List[str]) -> None:
|
||||
def common_test(extra_args: List[str]) -> List[Dict[str, Any]]:
|
||||
with TemporaryDirectory() as tempdir:
|
||||
cmd = [str(BIN), "--gc-roots-dir", tempdir, "--meta"] + extra_args
|
||||
res = subprocess.run(
|
||||
|
@ -23,7 +23,7 @@ def common_test(extra_args: List[str]) -> None:
|
|||
)
|
||||
|
||||
results = [json.loads(r) for r in res.stdout.split("\n") if r]
|
||||
assert len(results) == 5
|
||||
assert len(results) == 4
|
||||
|
||||
built_job = results[0]
|
||||
assert built_job["attr"] == "builtJob"
|
||||
|
@ -40,21 +40,30 @@ def common_test(extra_args: List[str]) -> None:
|
|||
assert recurse_drv["attr"] == "recurse.drvB"
|
||||
assert recurse_drv["name"] == "drvB"
|
||||
|
||||
recurse_recurse_bool = results[3]
|
||||
assert "error" in recurse_recurse_bool
|
||||
|
||||
substituted_job = results[4]
|
||||
substituted_job = results[3]
|
||||
assert substituted_job["attr"] == "substitutedJob"
|
||||
assert substituted_job["name"].startswith("hello-")
|
||||
assert substituted_job["meta"]["broken"] is False
|
||||
return results
|
||||
|
||||
|
||||
def test_flake() -> None:
|
||||
common_test(["--flake", ".#hydraJobs"])
|
||||
results = common_test(["--flake", ".#hydraJobs"])
|
||||
for result in results:
|
||||
assert "isCached" not in result
|
||||
|
||||
|
||||
def test_query_cache_status() -> None:
|
||||
results = common_test(["--flake", ".#hydraJobs", "--check-cache-status"])
|
||||
# FIXME in the nix sandbox we cannot query binary caches, this would need some local one
|
||||
for result in results:
|
||||
assert "isCached" in result
|
||||
|
||||
|
||||
def test_expression() -> None:
|
||||
common_test(["ci.nix"])
|
||||
results = common_test(["ci.nix"])
|
||||
for result in results:
|
||||
assert "isCached" not in result
|
||||
|
||||
with open(TEST_ROOT.joinpath("assets/ci.nix"), "r") as ci_nix:
|
||||
common_test(["-E", ci_nix.read()])
|
||||
|
|
Loading…
Reference in a new issue