forked from lix-project/hydra
hydra-eval-jobs -> nix eval-hydra-jobs
This commit is contained in:
parent
23c9ca3e94
commit
345512a6d0
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -18,7 +18,6 @@ Makefile.in
|
|||
/src/sql/hydra-postgresql.sql
|
||||
/src/sql/hydra-sqlite.sql
|
||||
/src/sql/tmp.sqlite
|
||||
/src/hydra-eval-jobs/hydra-eval-jobs
|
||||
/src/root/static/bootstrap
|
||||
/src/root/static/js/flot
|
||||
/doc/manual/images
|
||||
|
|
|
@ -64,7 +64,6 @@ AC_CONFIG_FILES([
|
|||
doc/manual/Makefile
|
||||
src/Makefile
|
||||
src/hydra-evaluator/Makefile
|
||||
src/hydra-eval-jobs/Makefile
|
||||
src/hydra-queue-runner/Makefile
|
||||
src/sql/Makefile
|
||||
src/ttf/Makefile
|
||||
|
|
|
@ -121,7 +121,7 @@
|
|||
configureFlags = [ "--with-docbook-xsl=${docbook_xsl}/xml/xsl/docbook" ];
|
||||
|
||||
shellHook = ''
|
||||
PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-eval-jobs:$(pwd)/src/hydra-queue-runner:$PATH
|
||||
PATH=$(pwd)/src/hydra-evaluator:$(pwd)/src/script:$(pwd)/src/hydra-queue-runner:$PATH
|
||||
PERL5LIB=$(pwd)/src/lib:$PERL5LIB
|
||||
'';
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
SUBDIRS = hydra-evaluator hydra-eval-jobs hydra-queue-runner sql script lib root ttf
|
||||
SUBDIRS = hydra-evaluator hydra-queue-runner sql script lib root ttf
|
||||
BOOTCLEAN_SUBDIRS = $(SUBDIRS)
|
||||
DIST_SUBDIRS = $(SUBDIRS)
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
bin_PROGRAMS = hydra-eval-jobs
|
||||
|
||||
hydra_eval_jobs_SOURCES = hydra-eval-jobs.cc
|
||||
hydra_eval_jobs_LDADD = $(NIX_LIBS) -lnixrust
|
||||
hydra_eval_jobs_CXXFLAGS = $(NIX_CFLAGS) -I ../libhydra
|
|
@ -1,281 +0,0 @@
|
|||
#include <map>
|
||||
#include <iostream>
|
||||
|
||||
#define GC_LINUX_THREADS 1
|
||||
#include <gc/gc_allocator.h>
|
||||
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "json.hh"
|
||||
#include "get-drvs.hh"
|
||||
#include "globals.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
#include "flake/flake.hh"
|
||||
|
||||
#include "hydra-config.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
|
||||
using namespace nix;
|
||||
|
||||
|
||||
static Path gcRootsDir;
|
||||
|
||||
|
||||
static void findJobs(EvalState & state, JSONObject & top,
|
||||
Bindings & autoArgs, Value & v, const string & attrPath);
|
||||
|
||||
|
||||
static string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
|
||||
{
|
||||
Strings res;
|
||||
std::function<void(Value & v)> rec;
|
||||
|
||||
rec = [&](Value & v) {
|
||||
state.forceValue(v);
|
||||
if (v.type == tString)
|
||||
res.push_back(v.string.s);
|
||||
else if (v.isList())
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
rec(*v.listElems()[n]);
|
||||
else if (v.type == tAttrs) {
|
||||
auto a = v.attrs->find(state.symbols.create(subAttribute));
|
||||
if (a != v.attrs->end())
|
||||
res.push_back(state.forceString(*a->value));
|
||||
}
|
||||
};
|
||||
|
||||
Value * v = drv.queryMeta(name);
|
||||
if (v) rec(*v);
|
||||
|
||||
return concatStringsSep(", ", res);
|
||||
}
|
||||
|
||||
|
||||
static void findJobsWrapped(EvalState & state, JSONObject & top,
|
||||
Bindings & autoArgs, Value & vIn, const string & attrPath)
|
||||
{
|
||||
debug(format("at path `%1%'") % attrPath);
|
||||
|
||||
checkInterrupt();
|
||||
|
||||
Value v;
|
||||
state.autoCallFunction(autoArgs, vIn, v);
|
||||
|
||||
if (v.type == tAttrs) {
|
||||
|
||||
auto drv = getDerivation(state, v, false);
|
||||
|
||||
if (drv) {
|
||||
Path drvPath;
|
||||
|
||||
DrvInfo::Outputs outputs = drv->queryOutputs();
|
||||
|
||||
if (drv->querySystem() == "unknown")
|
||||
throw EvalError("derivation must have a ‘system’ attribute");
|
||||
|
||||
{
|
||||
auto res = top.object(attrPath);
|
||||
res.attr("nixName", drv->queryName());
|
||||
res.attr("system", drv->querySystem());
|
||||
res.attr("drvPath", drvPath = drv->queryDrvPath());
|
||||
res.attr("description", drv->queryMetaString("description"));
|
||||
res.attr("license", queryMetaStrings(state, *drv, "license", "shortName"));
|
||||
res.attr("homepage", drv->queryMetaString("homepage"));
|
||||
res.attr("maintainers", queryMetaStrings(state, *drv, "maintainers", "email"));
|
||||
res.attr("schedulingPriority", drv->queryMetaInt("schedulingPriority", 100));
|
||||
res.attr("timeout", drv->queryMetaInt("timeout", 36000));
|
||||
res.attr("maxSilent", drv->queryMetaInt("maxSilent", 7200));
|
||||
res.attr("isChannel", drv->queryMetaBool("isHydraChannel", false));
|
||||
|
||||
/* If this is an aggregate, then get its constituents. */
|
||||
Bindings::iterator a = v.attrs->find(state.symbols.create("_hydraAggregate"));
|
||||
if (a != v.attrs->end() && state.forceBool(*a->value, *a->pos)) {
|
||||
Bindings::iterator a = v.attrs->find(state.symbols.create("constituents"));
|
||||
if (a == v.attrs->end())
|
||||
throw EvalError("derivation must have a ‘constituents’ attribute");
|
||||
PathSet context;
|
||||
state.coerceToString(*a->pos, *a->value, context, true, false);
|
||||
PathSet drvs;
|
||||
for (auto & i : context)
|
||||
if (i.at(0) == '!') {
|
||||
size_t index = i.find("!", 1);
|
||||
drvs.insert(string(i, index + 1));
|
||||
}
|
||||
res.attr("constituents", concatStringsSep(" ", drvs));
|
||||
}
|
||||
|
||||
/* Register the derivation as a GC root. !!! This
|
||||
registers roots for jobs that we may have already
|
||||
done. */
|
||||
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||
if (gcRootsDir != "" && localStore) {
|
||||
Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));
|
||||
if (!pathExists(root))
|
||||
localStore->addPermRoot(localStore->parseStorePath(drvPath), root, false);
|
||||
}
|
||||
|
||||
auto res2 = res.object("outputs");
|
||||
for (auto & j : outputs)
|
||||
res2.attr(j.first, j.second);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
else {
|
||||
if (!state.isDerivation(v)) {
|
||||
for (auto & i : v.attrs->lexicographicOrder()) {
|
||||
std::string name(i->name);
|
||||
|
||||
/* Skip jobs with dots in the name. */
|
||||
if (name.find('.') != std::string::npos) {
|
||||
printError("skipping job with illegal name '%s'", name);
|
||||
continue;
|
||||
}
|
||||
|
||||
findJobs(state, top, autoArgs, *i->value,
|
||||
(attrPath.empty() ? "" : attrPath + ".") + name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else if (v.type == tNull) {
|
||||
// allow null values, meaning 'do nothing'
|
||||
}
|
||||
|
||||
else
|
||||
throw TypeError(format("unsupported value: %1%") % v);
|
||||
}
|
||||
|
||||
|
||||
static void findJobs(EvalState & state, JSONObject & top,
|
||||
Bindings & autoArgs, Value & v, const string & attrPath)
|
||||
{
|
||||
try {
|
||||
findJobsWrapped(state, top, autoArgs, v, attrPath);
|
||||
} catch (EvalError & e) {
|
||||
auto res = top.object(attrPath);
|
||||
res.attr("error", filterANSIEscapes(e.msg(), true));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int main(int argc, char * * argv)
|
||||
{
|
||||
/* Prevent undeclared dependencies in the evaluation via
|
||||
$NIX_PATH. */
|
||||
unsetenv("NIX_PATH");
|
||||
|
||||
return handleExceptions(argv[0], [&]() {
|
||||
|
||||
auto config = std::make_unique<::Config>();
|
||||
|
||||
auto initialHeapSize = config->getStrOption("evaluator_initial_heap_size", "");
|
||||
if (initialHeapSize != "")
|
||||
setenv("GC_INITIAL_HEAP_SIZE", initialHeapSize.c_str(), 1);
|
||||
|
||||
initNix();
|
||||
initGC();
|
||||
|
||||
struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||
{
|
||||
Path releaseExpr;
|
||||
bool flake = false;
|
||||
|
||||
MyArgs() : MixCommonArgs("hydra-eval-jobs")
|
||||
{
|
||||
mkFlag()
|
||||
.longName("help")
|
||||
.description("show usage information")
|
||||
.handler([&]() {
|
||||
printHelp(programName, std::cout);
|
||||
throw Exit();
|
||||
});
|
||||
|
||||
mkFlag()
|
||||
.longName("gc-roots-dir")
|
||||
.description("garbage collector roots directory")
|
||||
.labels({"path"})
|
||||
.dest(&gcRootsDir);
|
||||
|
||||
mkFlag()
|
||||
.longName("dry-run")
|
||||
.description("don't create store derivations")
|
||||
.set(&settings.readOnlyMode, true);
|
||||
|
||||
mkFlag()
|
||||
.longName("flake")
|
||||
.description("build a flake")
|
||||
.set(&flake, true);
|
||||
|
||||
expectArg("expr", &releaseExpr);
|
||||
}
|
||||
};
|
||||
|
||||
MyArgs myArgs;
|
||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||
|
||||
JSONObject json(std::cout, true);
|
||||
std::cout.flush();
|
||||
|
||||
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */
|
||||
settings.builders = "";
|
||||
|
||||
/* Prevent access to paths outside of the Nix search path and
|
||||
to the environment. */
|
||||
evalSettings.restrictEval = true;
|
||||
|
||||
/* When building a flake, use pure evaluation (no access to
|
||||
'getEnv', 'currentSystem' etc. */
|
||||
evalSettings.pureEval = myArgs.flake;
|
||||
|
||||
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||
|
||||
if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||
|
||||
EvalState state(myArgs.searchPath, openStore());
|
||||
|
||||
Bindings & autoArgs = *myArgs.getAutoArgs(state);
|
||||
|
||||
Value v;
|
||||
|
||||
if (myArgs.flake) {
|
||||
using namespace flake;
|
||||
|
||||
auto flakeRef = parseFlakeRef(myArgs.releaseExpr);
|
||||
|
||||
auto vFlake = state.allocValue();
|
||||
|
||||
auto lockedFlake = lockFlake(state, flakeRef,
|
||||
LockFlags {
|
||||
.updateLockFile = false,
|
||||
.useRegistries = false,
|
||||
.allowMutable = false,
|
||||
});
|
||||
|
||||
callFlake(state, lockedFlake, *vFlake);
|
||||
|
||||
auto vOutputs = (*vFlake->attrs->get(state.symbols.create("outputs")))->value;
|
||||
state.forceValue(*vOutputs);
|
||||
|
||||
auto aHydraJobs = vOutputs->attrs->get(state.symbols.create("hydraJobs"));
|
||||
if (!aHydraJobs)
|
||||
aHydraJobs = vOutputs->attrs->get(state.symbols.create("checks"));
|
||||
if (!aHydraJobs)
|
||||
throw Error("flake '%s' does not provide any Hydra jobs or checks", flakeRef);
|
||||
|
||||
v = *(*aHydraJobs)->value;
|
||||
|
||||
} else {
|
||||
state.evalFile(lookupFileArg(state, myArgs.releaseExpr), v);
|
||||
}
|
||||
|
||||
findJobs(state, json, autoArgs, v, "");
|
||||
});
|
||||
}
|
|
@ -330,22 +330,27 @@ sub inputsToArgs {
|
|||
sub evalJobs {
|
||||
my ($inputInfo, $nixExprInputName, $nixExprPath, $flakeRef) = @_;
|
||||
|
||||
my @cmd;
|
||||
my @cmd = (
|
||||
"nix", "eval-hydra-jobs",
|
||||
"--json",
|
||||
"--gc-roots-dir", getGCRootsDir,
|
||||
"--max-jobs", 1,
|
||||
"--workers", $config->{evaluator_workers} // 1,
|
||||
"--max-memory-size", $config->{evaluator_max_memory_size} // 4096,
|
||||
"--builders", "",
|
||||
);
|
||||
|
||||
if (defined $flakeRef) {
|
||||
@cmd = ("hydra-eval-jobs",
|
||||
"--flake", $flakeRef,
|
||||
"--gc-roots-dir", getGCRootsDir,
|
||||
"--max-jobs", 1);
|
||||
push @cmd,
|
||||
($flakeRef,
|
||||
"--no-update-lock-file",
|
||||
"--no-registries");
|
||||
} else {
|
||||
my $nixExprInput = $inputInfo->{$nixExprInputName}->[0]
|
||||
or die "cannot find the input containing the job expression\n";
|
||||
|
||||
@cmd = ("hydra-eval-jobs",
|
||||
"<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||
"--gc-roots-dir", getGCRootsDir,
|
||||
"--max-jobs", 1,
|
||||
inputsToArgs($inputInfo));
|
||||
push @cmd,
|
||||
("--file", "<" . $nixExprInputName . "/" . $nixExprPath . ">",
|
||||
inputsToArgs($inputInfo));
|
||||
}
|
||||
|
||||
if (defined $ENV{'HYDRA_DEBUG'}) {
|
||||
|
|
|
@ -14,7 +14,7 @@ TESTS_ENVIRONMENT = \
|
|||
NIX_BUILD_HOOK= \
|
||||
PGHOST=/tmp \
|
||||
PERL5LIB="$(srcdir):$(abs_top_srcdir)/src/lib:$$PERL5LIB" \
|
||||
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-eval-jobs:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
|
||||
PATH=$(abs_top_srcdir)/src/hydra-evaluator:$(abs_top_srcdir)/src/script:$(abs_top_srcdir)/src/hydra-queue-runner:$$PATH \
|
||||
perl -w
|
||||
|
||||
EXTRA_DIST = \
|
||||
|
|
Loading…
Reference in a new issue