forked from lix-project/hydra
Merge pull request #1291 from NixOS/update-nix-nixpkgs
This commit is contained in:
commit
526e8bd744
|
@ -10,8 +10,6 @@ AC_PROG_LN_S
|
||||||
AC_PROG_LIBTOOL
|
AC_PROG_LIBTOOL
|
||||||
AC_PROG_CXX
|
AC_PROG_CXX
|
||||||
|
|
||||||
CXXFLAGS+=" -std=c++17"
|
|
||||||
|
|
||||||
AC_PATH_PROG([XSLTPROC], [xsltproc])
|
AC_PATH_PROG([XSLTPROC], [xsltproc])
|
||||||
|
|
||||||
AC_ARG_WITH([docbook-xsl],
|
AC_ARG_WITH([docbook-xsl],
|
||||||
|
|
46
flake.lock
46
flake.lock
|
@ -1,5 +1,21 @@
|
||||||
{
|
{
|
||||||
"nodes": {
|
"nodes": {
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1673956053,
|
||||||
|
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"lowdown-src": {
|
"lowdown-src": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
|
@ -18,37 +34,40 @@
|
||||||
},
|
},
|
||||||
"nix": {
|
"nix": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
"lowdown-src": "lowdown-src",
|
"lowdown-src": "lowdown-src",
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
],
|
||||||
"nixpkgs-regression": "nixpkgs-regression"
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1677045134,
|
"lastModified": 1686048923,
|
||||||
"narHash": "sha256-jUc2ccTR8f6MGY2pUKgujm+lxSPNGm/ZAP+toX+nMNc=",
|
"narHash": "sha256-/XCWa2osNFIpPC5MkxlX6qTZf/DaTLwS3LWN0SRFiuU=",
|
||||||
"owner": "nixos",
|
"owner": "NixOS",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"rev": "4acc684ef7b3117c6d6ac12837398a0008a53d85",
|
"rev": "84050709ea18f3285a85d729f40c8f8eddf5008e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "nixos",
|
"owner": "NixOS",
|
||||||
"ref": "2.13.3",
|
"ref": "2.16.1",
|
||||||
"repo": "nix",
|
"repo": "nix",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1670461440,
|
"lastModified": 1687379288,
|
||||||
"narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=",
|
"narHash": "sha256-cSuwfiqYfeVyqzCRkU9AvLTysmEuSal8nh6CYr+xWog=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425",
|
"rev": "ef0bc3976340dab9a4e087a0bcff661a8b2e87f3",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-22.11-small",
|
"ref": "nixos-23.05",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
@ -72,10 +91,7 @@
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nix": "nix",
|
"nix": "nix",
|
||||||
"nixpkgs": [
|
"nixpkgs": "nixpkgs"
|
||||||
"nix",
|
|
||||||
"nixpkgs"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
{
|
{
|
||||||
description = "A Nix-based continuous build system";
|
description = "A Nix-based continuous build system";
|
||||||
|
|
||||||
inputs.nixpkgs.follows = "nix/nixpkgs";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05";
|
||||||
inputs.nix.url = "github:nixos/nix/2.13.3";
|
inputs.nix.url = "github:NixOS/nix/2.16.1";
|
||||||
|
inputs.nix.inputs.nixpkgs.follows = "nixpkgs";
|
||||||
|
|
||||||
outputs = { self, nixpkgs, nix }:
|
outputs = { self, nixpkgs, nix }:
|
||||||
let
|
let
|
||||||
|
|
|
@ -25,7 +25,8 @@
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
void check_pid_status_nonblocking(pid_t check_pid) {
|
void check_pid_status_nonblocking(pid_t check_pid)
|
||||||
|
{
|
||||||
// Only check 'initialized' and known PID's
|
// Only check 'initialized' and known PID's
|
||||||
if (check_pid <= 0) { return; }
|
if (check_pid <= 0) { return; }
|
||||||
|
|
||||||
|
@ -100,7 +101,7 @@ static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const std:
|
||||||
else if (v.type() == nAttrs) {
|
else if (v.type() == nAttrs) {
|
||||||
auto a = v.attrs->find(state.symbols.create(subAttribute));
|
auto a = v.attrs->find(state.symbols.create(subAttribute));
|
||||||
if (a != v.attrs->end())
|
if (a != v.attrs->end())
|
||||||
res.push_back(std::string(state.forceString(*a->value)));
|
res.push_back(std::string(state.forceString(*a->value, a->pos, "while evaluating meta attributes")));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -197,26 +198,30 @@ static void worker(
|
||||||
|
|
||||||
/* If this is an aggregate, then get its constituents. */
|
/* If this is an aggregate, then get its constituents. */
|
||||||
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
|
auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));
|
||||||
if (a && state.forceBool(*a->value, a->pos)) {
|
if (a && state.forceBool(*a->value, a->pos, "while evaluating the `_hydraAggregate` attribute")) {
|
||||||
auto a = v->attrs->get(state.symbols.create("constituents"));
|
auto a = v->attrs->get(state.symbols.create("constituents"));
|
||||||
if (!a)
|
if (!a)
|
||||||
throw EvalError("derivation must have a ‘constituents’ attribute");
|
throw EvalError("derivation must have a ‘constituents’ attribute");
|
||||||
|
|
||||||
|
NixStringContext context;
|
||||||
|
state.coerceToString(a->pos, *a->value, context, "while evaluating the `constituents` attribute", true, false);
|
||||||
|
for (auto & c : context)
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&](const NixStringContextElem::Built & b) {
|
||||||
|
job["constituents"].push_back(state.store->printStorePath(b.drvPath));
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) {
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::DrvDeep & d) {
|
||||||
|
},
|
||||||
|
}, c.raw());
|
||||||
|
|
||||||
PathSet context;
|
state.forceList(*a->value, a->pos, "while evaluating the `constituents` attribute");
|
||||||
state.coerceToString(a->pos, *a->value, context, true, false);
|
|
||||||
for (auto & i : context)
|
|
||||||
if (i.at(0) == '!') {
|
|
||||||
size_t index = i.find("!", 1);
|
|
||||||
job["constituents"].push_back(std::string(i, index + 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
state.forceList(*a->value, a->pos);
|
|
||||||
for (unsigned int n = 0; n < a->value->listSize(); ++n) {
|
for (unsigned int n = 0; n < a->value->listSize(); ++n) {
|
||||||
auto v = a->value->listElems()[n];
|
auto v = a->value->listElems()[n];
|
||||||
state.forceValue(*v, noPos);
|
state.forceValue(*v, noPos);
|
||||||
if (v->type() == nString)
|
if (v->type() == nString)
|
||||||
job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
|
job["namedConstituents"].push_back(v->str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -116,12 +116,12 @@ static void copyClosureTo(std::timed_mutex & sendMutex, Store & destStore,
|
||||||
the remote host to substitute missing paths. */
|
the remote host to substitute missing paths. */
|
||||||
// FIXME: substitute output pollutes our build log
|
// FIXME: substitute output pollutes our build log
|
||||||
to << cmdQueryValidPaths << 1 << useSubstitutes;
|
to << cmdQueryValidPaths << 1 << useSubstitutes;
|
||||||
worker_proto::write(destStore, to, closure);
|
workerProtoWrite(destStore, to, closure);
|
||||||
to.flush();
|
to.flush();
|
||||||
|
|
||||||
/* Get back the set of paths that are already valid on the remote
|
/* Get back the set of paths that are already valid on the remote
|
||||||
host. */
|
host. */
|
||||||
auto present = worker_proto::read(destStore, from, Phantom<StorePathSet> {});
|
auto present = WorkerProto<StorePathSet>::read(destStore, from);
|
||||||
|
|
||||||
if (present.size() == closure.size()) return;
|
if (present.size() == closure.size()) return;
|
||||||
|
|
||||||
|
@ -367,7 +367,7 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (GET_PROTOCOL_MINOR(remoteVersion) >= 6) {
|
if (GET_PROTOCOL_MINOR(remoteVersion) >= 6) {
|
||||||
worker_proto::read(*localStore, from, Phantom<DrvOutputs> {});
|
WorkerProto<DrvOutputs>::read(*localStore, from);
|
||||||
}
|
}
|
||||||
switch ((BuildResult::Status) res) {
|
switch ((BuildResult::Status) res) {
|
||||||
case BuildResult::Built:
|
case BuildResult::Built:
|
||||||
|
@ -444,17 +444,17 @@ void State::buildRemote(ref<Store> destStore,
|
||||||
std::map<StorePath, ValidPathInfo> infos;
|
std::map<StorePath, ValidPathInfo> infos;
|
||||||
size_t totalNarSize = 0;
|
size_t totalNarSize = 0;
|
||||||
to << cmdQueryPathInfos;
|
to << cmdQueryPathInfos;
|
||||||
worker_proto::write(*localStore, to, outputs);
|
workerProtoWrite(*localStore, to, outputs);
|
||||||
to.flush();
|
to.flush();
|
||||||
while (true) {
|
while (true) {
|
||||||
auto storePathS = readString(from);
|
auto storePathS = readString(from);
|
||||||
if (storePathS == "") break;
|
if (storePathS == "") break;
|
||||||
auto deriver = readString(from); // deriver
|
auto deriver = readString(from); // deriver
|
||||||
auto references = worker_proto::read(*localStore, from, Phantom<StorePathSet> {});
|
auto references = WorkerProto<StorePathSet>::read(*localStore, from);
|
||||||
readLongLong(from); // download size
|
readLongLong(from); // download size
|
||||||
auto narSize = readLongLong(from);
|
auto narSize = readLongLong(from);
|
||||||
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
||||||
auto ca = parseContentAddressOpt(readString(from));
|
auto ca = ContentAddress::parseOpt(readString(from));
|
||||||
readStrings<StringSet>(from); // sigs
|
readStrings<StringSet>(from); // sigs
|
||||||
ValidPathInfo info(localStore->parseStorePath(storePathS), narHash);
|
ValidPathInfo info(localStore->parseStorePath(storePathS), narHash);
|
||||||
assert(outputs.count(info.path));
|
assert(outputs.count(info.path));
|
||||||
|
|
|
@ -323,7 +323,7 @@ State::StepResult State::doBuildStep(nix::ref<Store> destStore,
|
||||||
pqxx::work txn(*conn);
|
pqxx::work txn(*conn);
|
||||||
|
|
||||||
for (auto & b : direct) {
|
for (auto & b : direct) {
|
||||||
printMsg(lvlInfo, format("marking build %1% as succeeded") % b->id);
|
printInfo("marking build %1% as succeeded", b->id);
|
||||||
markSucceededBuild(txn, b, res, buildId != b->id || result.isCached,
|
markSucceededBuild(txn, b, res, buildId != b->id || result.isCached,
|
||||||
result.startTime, result.stopTime);
|
result.startTime, result.stopTime);
|
||||||
}
|
}
|
||||||
|
@ -451,7 +451,7 @@ void State::failStep(
|
||||||
/* Mark all builds that depend on this derivation as failed. */
|
/* Mark all builds that depend on this derivation as failed. */
|
||||||
for (auto & build : indirect) {
|
for (auto & build : indirect) {
|
||||||
if (build->finishedInDB) continue;
|
if (build->finishedInDB) continue;
|
||||||
printMsg(lvlError, format("marking build %1% as failed") % build->id);
|
printError("marking build %1% as failed", build->id);
|
||||||
txn.exec_params0
|
txn.exec_params0
|
||||||
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
("update Builds set finished = 1, buildStatus = $2, startTime = $3, stopTime = $4, isCachedBuild = $5, notificationPendingSince = $4 where id = $1 and finished = 0",
|
||||||
build->id,
|
build->id,
|
||||||
|
|
|
@ -52,7 +52,7 @@ void State::dispatcher()
|
||||||
{
|
{
|
||||||
auto dispatcherWakeup_(dispatcherWakeup.lock());
|
auto dispatcherWakeup_(dispatcherWakeup.lock());
|
||||||
if (!*dispatcherWakeup_) {
|
if (!*dispatcherWakeup_) {
|
||||||
printMsg(lvlDebug, format("dispatcher sleeping for %1%s") %
|
debug("dispatcher sleeping for %1%s",
|
||||||
std::chrono::duration_cast<std::chrono::seconds>(sleepUntil - std::chrono::system_clock::now()).count());
|
std::chrono::duration_cast<std::chrono::seconds>(sleepUntil - std::chrono::system_clock::now()).count());
|
||||||
dispatcherWakeup_.wait_until(dispatcherWakeupCV, sleepUntil);
|
dispatcherWakeup_.wait_until(dispatcherWakeupCV, sleepUntil);
|
||||||
}
|
}
|
||||||
|
@ -60,7 +60,7 @@ void State::dispatcher()
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, format("dispatcher: %1%") % e.what());
|
printError("dispatcher: %s", e.what());
|
||||||
sleep(1);
|
sleep(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,8 +80,8 @@ system_time State::doDispatch()
|
||||||
jobset.second->pruneSteps();
|
jobset.second->pruneSteps();
|
||||||
auto s2 = jobset.second->shareUsed();
|
auto s2 = jobset.second->shareUsed();
|
||||||
if (s1 != s2)
|
if (s1 != s2)
|
||||||
printMsg(lvlDebug, format("pruned scheduling window of ‘%1%:%2%’ from %3% to %4%")
|
debug("pruned scheduling window of ‘%1%:%2%’ from %3% to %4%",
|
||||||
% jobset.first.first % jobset.first.second % s1 % s2);
|
jobset.first.first, jobset.first.second, s1, s2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -161,9 +161,9 @@ void State::parseMachines(const std::string & contents)
|
||||||
same name. */
|
same name. */
|
||||||
auto i = oldMachines.find(machine->sshName);
|
auto i = oldMachines.find(machine->sshName);
|
||||||
if (i == oldMachines.end())
|
if (i == oldMachines.end())
|
||||||
printMsg(lvlChatty, format("adding new machine ‘%1%’") % machine->sshName);
|
printMsg(lvlChatty, "adding new machine ‘%1%’", machine->sshName);
|
||||||
else
|
else
|
||||||
printMsg(lvlChatty, format("updating machine ‘%1%’") % machine->sshName);
|
printMsg(lvlChatty, "updating machine ‘%1%’", machine->sshName);
|
||||||
machine->state = i == oldMachines.end()
|
machine->state = i == oldMachines.end()
|
||||||
? std::make_shared<Machine::State>()
|
? std::make_shared<Machine::State>()
|
||||||
: i->second->state;
|
: i->second->state;
|
||||||
|
@ -173,7 +173,7 @@ void State::parseMachines(const std::string & contents)
|
||||||
for (auto & m : oldMachines)
|
for (auto & m : oldMachines)
|
||||||
if (newMachines.find(m.first) == newMachines.end()) {
|
if (newMachines.find(m.first) == newMachines.end()) {
|
||||||
if (m.second->enabled)
|
if (m.second->enabled)
|
||||||
printMsg(lvlInfo, format("removing machine ‘%1%’") % m.first);
|
printInfo("removing machine ‘%1%’", m.first);
|
||||||
/* Add a disabled Machine object to make sure stats are
|
/* Add a disabled Machine object to make sure stats are
|
||||||
maintained. */
|
maintained. */
|
||||||
auto machine = std::make_shared<Machine>(*(m.second));
|
auto machine = std::make_shared<Machine>(*(m.second));
|
||||||
|
@ -928,7 +928,6 @@ int main(int argc, char * * argv)
|
||||||
});
|
});
|
||||||
|
|
||||||
settings.verboseBuild = true;
|
settings.verboseBuild = true;
|
||||||
settings.lockCPU = false;
|
|
||||||
|
|
||||||
State state{metricsAddrOpt};
|
State state{metricsAddrOpt};
|
||||||
if (status)
|
if (status)
|
||||||
|
|
|
@ -13,7 +13,7 @@ void State::queueMonitor()
|
||||||
try {
|
try {
|
||||||
queueMonitorLoop();
|
queueMonitorLoop();
|
||||||
} catch (std::exception & e) {
|
} catch (std::exception & e) {
|
||||||
printMsg(lvlError, format("queue monitor: %1%") % e.what());
|
printError("queue monitor: %s", e.what());
|
||||||
sleep(10); // probably a DB problem, so don't retry right away
|
sleep(10); // probably a DB problem, so don't retry right away
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,13 +142,13 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
createBuild = [&](Build::ptr build) {
|
createBuild = [&](Build::ptr build) {
|
||||||
prom.queue_build_loads.Increment();
|
prom.queue_build_loads.Increment();
|
||||||
printMsg(lvlTalkative, format("loading build %1% (%2%)") % build->id % build->fullJobName());
|
printMsg(lvlTalkative, "loading build %1% (%2%)", build->id, build->fullJobName());
|
||||||
nrAdded++;
|
nrAdded++;
|
||||||
newBuildsByID.erase(build->id);
|
newBuildsByID.erase(build->id);
|
||||||
|
|
||||||
if (!localStore->isValidPath(build->drvPath)) {
|
if (!localStore->isValidPath(build->drvPath)) {
|
||||||
/* Derivation has been GC'ed prematurely. */
|
/* Derivation has been GC'ed prematurely. */
|
||||||
printMsg(lvlError, format("aborting GC'ed build %1%") % build->id);
|
printError("aborting GC'ed build %1%", build->id);
|
||||||
if (!build->finishedInDB) {
|
if (!build->finishedInDB) {
|
||||||
auto mc = startDbUpdate();
|
auto mc = startDbUpdate();
|
||||||
pqxx::work txn(conn);
|
pqxx::work txn(conn);
|
||||||
|
@ -302,7 +302,7 @@ bool State::getQueuedBuilds(Connection & conn,
|
||||||
|
|
||||||
/* Add the new runnable build steps to ‘runnable’ and wake up
|
/* Add the new runnable build steps to ‘runnable’ and wake up
|
||||||
the builder threads. */
|
the builder threads. */
|
||||||
printMsg(lvlChatty, format("got %1% new runnable steps from %2% new builds") % newRunnable.size() % nrAdded);
|
printMsg(lvlChatty, "got %1% new runnable steps from %2% new builds", newRunnable.size(), nrAdded);
|
||||||
for (auto & r : newRunnable)
|
for (auto & r : newRunnable)
|
||||||
makeRunnable(r);
|
makeRunnable(r);
|
||||||
|
|
||||||
|
@ -358,13 +358,13 @@ void State::processQueueChange(Connection & conn)
|
||||||
for (auto i = builds_->begin(); i != builds_->end(); ) {
|
for (auto i = builds_->begin(); i != builds_->end(); ) {
|
||||||
auto b = currentIds.find(i->first);
|
auto b = currentIds.find(i->first);
|
||||||
if (b == currentIds.end()) {
|
if (b == currentIds.end()) {
|
||||||
printMsg(lvlInfo, format("discarding cancelled build %1%") % i->first);
|
printInfo("discarding cancelled build %1%", i->first);
|
||||||
i = builds_->erase(i);
|
i = builds_->erase(i);
|
||||||
// FIXME: ideally we would interrupt active build steps here.
|
// FIXME: ideally we would interrupt active build steps here.
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (i->second->globalPriority < b->second) {
|
if (i->second->globalPriority < b->second) {
|
||||||
printMsg(lvlInfo, format("priority of build %1% increased") % i->first);
|
printInfo("priority of build %1% increased", i->first);
|
||||||
i->second->globalPriority = b->second;
|
i->second->globalPriority = b->second;
|
||||||
i->second->propagatePriorities();
|
i->second->propagatePriorities();
|
||||||
}
|
}
|
||||||
|
@ -654,7 +654,7 @@ BuildOutput State::getBuildOutputCached(Connection & conn, nix::ref<nix::Store>
|
||||||
if (r.empty()) continue;
|
if (r.empty()) continue;
|
||||||
BuildID id = r[0][0].as<BuildID>();
|
BuildID id = r[0][0].as<BuildID>();
|
||||||
|
|
||||||
printMsg(lvlInfo, format("reusing build %d") % id);
|
printInfo("reusing build %d", id);
|
||||||
|
|
||||||
BuildOutput res;
|
BuildOutput res;
|
||||||
res.failed = r[0][1].as<int>() == bsFailedWithOutput;
|
res.failed = r[0][1].as<int>() == bsFailedWithOutput;
|
||||||
|
|
|
@ -57,8 +57,8 @@ subtest "Validate a run log was created" => sub {
|
||||||
ok($runlog->did_succeed(), "The process did succeed.");
|
ok($runlog->did_succeed(), "The process did succeed.");
|
||||||
is($runlog->job_matcher, "*:*:*", "An unspecified job matcher is defaulted to *:*:*");
|
is($runlog->job_matcher, "*:*:*", "An unspecified job matcher is defaulted to *:*:*");
|
||||||
is($runlog->command, 'cp "$HYDRA_JSON" "$HYDRA_DATA/joboutput.json"', "The executed command is saved.");
|
is($runlog->command, 'cp "$HYDRA_JSON" "$HYDRA_DATA/joboutput.json"', "The executed command is saved.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is also recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is also recent.");
|
||||||
is($runlog->exit_code, 0, "This command should have succeeded.");
|
is($runlog->exit_code, 0, "This command should have succeeded.");
|
||||||
|
|
||||||
subtest "Validate the run log file exists" => sub {
|
subtest "Validate the run log file exists" => sub {
|
||||||
|
|
|
@ -43,8 +43,8 @@ subtest "Validate a run log was created" => sub {
|
||||||
ok($runlog->did_fail_with_exec_error(), "The process failed to start due to an exec error.");
|
ok($runlog->did_fail_with_exec_error(), "The process failed to start due to an exec error.");
|
||||||
is($runlog->job_matcher, "*:*:*", "An unspecified job matcher is defaulted to *:*:*");
|
is($runlog->job_matcher, "*:*:*", "An unspecified job matcher is defaulted to *:*:*");
|
||||||
is($runlog->command, 'invalid-command-this-does-not-exist', "The executed command is saved.");
|
is($runlog->command, 'invalid-command-this-does-not-exist', "The executed command is saved.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is also recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is also recent.");
|
||||||
is($runlog->exit_code, undef, "This command should not have executed.");
|
is($runlog->exit_code, undef, "This command should not have executed.");
|
||||||
is($runlog->error_number, 2, "This command failed to exec.");
|
is($runlog->error_number, 2, "This command failed to exec.");
|
||||||
};
|
};
|
||||||
|
|
|
@ -55,7 +55,7 @@ subtest "Starting a process" => sub {
|
||||||
ok($runlog->is_running(), "The process is running.");
|
ok($runlog->is_running(), "The process is running.");
|
||||||
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
||||||
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, undef, "The end time is undefined.");
|
is($runlog->end_time, undef, "The end time is undefined.");
|
||||||
is($runlog->exit_code, undef, "The exit code is undefined.");
|
is($runlog->exit_code, undef, "The exit code is undefined.");
|
||||||
is($runlog->signal, undef, "The signal is undefined.");
|
is($runlog->signal, undef, "The signal is undefined.");
|
||||||
|
@ -70,8 +70,8 @@ subtest "The process completed (success)" => sub {
|
||||||
ok(!$runlog->is_running(), "The process is not running.");
|
ok(!$runlog->is_running(), "The process is not running.");
|
||||||
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
||||||
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is recent.");
|
||||||
is($runlog->error_number, undef, "The error number is undefined");
|
is($runlog->error_number, undef, "The error number is undefined");
|
||||||
is($runlog->exit_code, 0, "The exit code is 0.");
|
is($runlog->exit_code, 0, "The exit code is 0.");
|
||||||
is($runlog->signal, undef, "The signal is undefined.");
|
is($runlog->signal, undef, "The signal is undefined.");
|
||||||
|
@ -86,8 +86,8 @@ subtest "The process completed (errored)" => sub {
|
||||||
ok(!$runlog->is_running(), "The process is not running.");
|
ok(!$runlog->is_running(), "The process is not running.");
|
||||||
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
||||||
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is recent.");
|
||||||
is($runlog->error_number, undef, "The error number is undefined");
|
is($runlog->error_number, undef, "The error number is undefined");
|
||||||
is($runlog->exit_code, 85, "The exit code is 85.");
|
is($runlog->exit_code, 85, "The exit code is 85.");
|
||||||
is($runlog->signal, undef, "The signal is undefined.");
|
is($runlog->signal, undef, "The signal is undefined.");
|
||||||
|
@ -102,8 +102,8 @@ subtest "The process completed (status 15, child error 0)" => sub {
|
||||||
ok(!$runlog->is_running(), "The process is not running.");
|
ok(!$runlog->is_running(), "The process is not running.");
|
||||||
ok($runlog->did_fail_with_signal(), "The process was killed by a signal.");
|
ok($runlog->did_fail_with_signal(), "The process was killed by a signal.");
|
||||||
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is recent.");
|
||||||
is($runlog->error_number, undef, "The error number is undefined");
|
is($runlog->error_number, undef, "The error number is undefined");
|
||||||
is($runlog->exit_code, undef, "The exit code is undefined.");
|
is($runlog->exit_code, undef, "The exit code is undefined.");
|
||||||
is($runlog->signal, 15, "Signal 15 was sent.");
|
is($runlog->signal, 15, "Signal 15 was sent.");
|
||||||
|
@ -118,8 +118,8 @@ subtest "The process completed (signaled)" => sub {
|
||||||
ok(!$runlog->is_running(), "The process is not running.");
|
ok(!$runlog->is_running(), "The process is not running.");
|
||||||
ok($runlog->did_fail_with_signal(), "The process was killed by a signal.");
|
ok($runlog->did_fail_with_signal(), "The process was killed by a signal.");
|
||||||
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
ok(!$runlog->did_fail_with_exec_error(), "The process did not fail to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is recent.");
|
||||||
is($runlog->error_number, undef, "The error number is undefined");
|
is($runlog->error_number, undef, "The error number is undefined");
|
||||||
is($runlog->exit_code, undef, "The exit code is undefined.");
|
is($runlog->exit_code, undef, "The exit code is undefined.");
|
||||||
is($runlog->signal, 9, "The signal is 9.");
|
is($runlog->signal, 9, "The signal is 9.");
|
||||||
|
@ -134,8 +134,8 @@ subtest "The process failed to start" => sub {
|
||||||
ok(!$runlog->is_running(), "The process is running.");
|
ok(!$runlog->is_running(), "The process is running.");
|
||||||
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
ok(!$runlog->did_fail_with_signal(), "The process was not killed by a signal.");
|
||||||
ok($runlog->did_fail_with_exec_error(), "The process failed to start due to an exec error.");
|
ok($runlog->did_fail_with_exec_error(), "The process failed to start due to an exec error.");
|
||||||
is($runlog->start_time, within(time() - 1, 2), "The start time is recent.");
|
is($runlog->start_time, within(time() - 1, 5), "The start time is recent.");
|
||||||
is($runlog->end_time, within(time() - 1, 2), "The end time is recent.");
|
is($runlog->end_time, within(time() - 1, 5), "The end time is recent.");
|
||||||
is($runlog->error_number, 2, "The error number is saved");
|
is($runlog->error_number, 2, "The error number is saved");
|
||||||
is($runlog->exit_code, undef, "The exit code is undefined.");
|
is($runlog->exit_code, undef, "The exit code is undefined.");
|
||||||
is($runlog->signal, undef, "The signal is undefined.");
|
is($runlog->signal, undef, "The signal is undefined.");
|
||||||
|
|
|
@ -25,11 +25,11 @@ subtest "requeue" => sub {
|
||||||
|
|
||||||
$task->requeue();
|
$task->requeue();
|
||||||
is($task->attempts, 2, "We should have stored a second retry");
|
is($task->attempts, 2, "We should have stored a second retry");
|
||||||
is($task->retry_at, within(time() + 4, 2), "Delayed two exponential backoff step");
|
is($task->retry_at, within(time() + 4, 5), "Delayed two exponential backoff step");
|
||||||
|
|
||||||
$task->requeue();
|
$task->requeue();
|
||||||
is($task->attempts, 3, "We should have stored a third retry");
|
is($task->attempts, 3, "We should have stored a third retry");
|
||||||
is($task->retry_at, within(time() + 8, 2), "Delayed a third exponential backoff step");
|
is($task->retry_at, within(time() + 8, 5), "Delayed a third exponential backoff step");
|
||||||
};
|
};
|
||||||
|
|
||||||
done_testing;
|
done_testing;
|
||||||
|
|
|
@ -101,7 +101,7 @@ subtest "save_task" => sub {
|
||||||
is($retry->pluginname, "FooPluginName", "Plugin name should match");
|
is($retry->pluginname, "FooPluginName", "Plugin name should match");
|
||||||
is($retry->payload, "1", "Payload should match");
|
is($retry->payload, "1", "Payload should match");
|
||||||
is($retry->attempts, 1, "We've had one attempt");
|
is($retry->attempts, 1, "We've had one attempt");
|
||||||
is($retry->retry_at, within(time() + 1, 2), "The retry at should be approximately one second away");
|
is($retry->retry_at, within(time() + 1, 5), "The retry at should be approximately one second away");
|
||||||
};
|
};
|
||||||
|
|
||||||
done_testing;
|
done_testing;
|
||||||
|
|
Loading…
Reference in a new issue