Merge pull request #65 from nix-community/treefmt-clang-format
Add treefmt with clang-format, nixpkgs-fmt, prettier (toml) & black (python)
This commit is contained in:
commit
796f752cb8
|
@ -1,24 +0,0 @@
|
|||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.{cc,hh,hpp,pl,pm,sh,t}]
|
||||
indent_style = space
|
||||
intend_size = 4
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
|
||||
[*.nix]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# Match diffs, avoid to trim trailing whitespace
|
||||
[*.{diff,patch}]
|
||||
trim_trailing_whitespace = false
|
3
.prettierrc.js
Normal file
3
.prettierrc.js
Normal file
|
@ -0,0 +1,3 @@
|
|||
module.exports = {
|
||||
"plugins": ["prettier-plugin-toml"],
|
||||
};
|
44
flake.nix
44
flake.nix
|
@ -8,6 +8,7 @@
|
|||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
inherit (pkgs) stdenv;
|
||||
drvArgs = { srcDir = self; };
|
||||
in
|
||||
rec {
|
||||
|
@ -16,9 +17,7 @@
|
|||
checks =
|
||||
let
|
||||
mkVariant = nix: (packages.nix-eval-jobs.override {
|
||||
# TODO: fix to stable after next nix release
|
||||
nix = pkgs.nix;
|
||||
#inherit nix;
|
||||
inherit nix;
|
||||
}).overrideAttrs (_: {
|
||||
name = "nix-eval-jobs-${nix.version}";
|
||||
inherit (nix) version;
|
||||
|
@ -26,32 +25,31 @@
|
|||
in
|
||||
{
|
||||
|
||||
editorconfig = pkgs.runCommand "editorconfig-check"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
pkgs.editorconfig-checker
|
||||
];
|
||||
} ''
|
||||
editorconfig-checker ${self}
|
||||
touch $out
|
||||
'';
|
||||
treefmt =
|
||||
let
|
||||
devShell = devShells.default;
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
name = "treefmt-check";
|
||||
src = self;
|
||||
nativeBuildInputs = devShell.nativeBuildInputs;
|
||||
dontConfigure = true;
|
||||
|
||||
nixpkgs-fmt = pkgs.runCommand "fmt-check"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
pkgs.nixpkgs-fmt
|
||||
];
|
||||
} ''
|
||||
nixpkgs-fmt --check .
|
||||
touch $out
|
||||
'';
|
||||
inherit (devShell) NODE_PATH;
|
||||
|
||||
buildPhase = ''
|
||||
env HOME=$(mktemp -d) treefmt --fail-on-change
|
||||
'';
|
||||
|
||||
installPhase = "touch $out";
|
||||
};
|
||||
|
||||
build = mkVariant pkgs.nix;
|
||||
build-unstable = mkVariant pkgs.nixUnstable;
|
||||
};
|
||||
|
||||
defaultPackage = self.packages.${system}.nix-eval-jobs;
|
||||
devShell = pkgs.callPackage ./shell.nix drvArgs;
|
||||
packages.default = self.packages.${system}.nix-eval-jobs;
|
||||
devShells.default = pkgs.callPackage ./shell.nix drvArgs;
|
||||
|
||||
}
|
||||
);
|
||||
|
|
|
@ -26,16 +26,20 @@ in
|
|||
|
||||
nativeBuildInputs = old.nativeBuildInputs ++ [
|
||||
|
||||
pkgs.editorconfig-checker
|
||||
|
||||
pkgs.treefmt
|
||||
pkgs.llvmPackages.clang # clang-format
|
||||
pkgs.nixpkgs-fmt
|
||||
pkgs.nodePackages.prettier
|
||||
|
||||
(pkgs.python3.withPackages (ps: [
|
||||
ps.pytest
|
||||
ps.black
|
||||
]))
|
||||
|
||||
];
|
||||
|
||||
NODE_PATH = "${pkgs.nodePackages.prettier-plugin-toml}/lib/node_modules";
|
||||
|
||||
shellHook = lib.optionalString stdenv.isLinux ''
|
||||
export NIX_DEBUG_INFO_DIRS="${pkgs.curl.debug}/lib/debug:${nix.debug}/lib/debug''${NIX_DEBUG_INFO_DIRS:+:$NIX_DEBUG_INFO_DIRS}"
|
||||
'';
|
||||
|
|
|
@ -33,10 +33,12 @@ using namespace nlohmann;
|
|||
typedef enum { evalAuto, evalImpure, evalPure } pureEval;
|
||||
|
||||
// Safe to ignore - the args will be static.
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic ignored "-Wnon-virtual-dtor"
|
||||
#elif __clang__
|
||||
#pragma clang diagnostic ignored "-Wnon-virtual-dtor"
|
||||
struct MyArgs : MixEvalArgs, MixCommonArgs
|
||||
{
|
||||
#endif
|
||||
struct MyArgs : MixEvalArgs, MixCommonArgs {
|
||||
Path releaseExpr;
|
||||
Path gcRootsDir;
|
||||
bool flake = false;
|
||||
|
@ -46,18 +48,18 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
|||
size_t maxMemorySize = 4096;
|
||||
pureEval evalMode = evalAuto;
|
||||
|
||||
MyArgs() : MixCommonArgs("nix-eval-jobs")
|
||||
{
|
||||
MyArgs() : MixCommonArgs("nix-eval-jobs") {
|
||||
addFlag({
|
||||
.longName = "help",
|
||||
.description = "show usage information",
|
||||
.handler = {[&]() {
|
||||
printf("USAGE: nix-eval-jobs [options] expr\n\n");
|
||||
for (const auto & [name, flag] : longFlags) {
|
||||
for (const auto &[name, flag] : longFlags) {
|
||||
if (hiddenCategories.count(flag->category)) {
|
||||
continue;
|
||||
}
|
||||
printf(" --%-20s %s\n", name.c_str(), flag->description.c_str());
|
||||
printf(" --%-20s %s\n", name.c_str(),
|
||||
flag->description.c_str());
|
||||
}
|
||||
::exit(0);
|
||||
}},
|
||||
|
@ -66,63 +68,51 @@ struct MyArgs : MixEvalArgs, MixCommonArgs
|
|||
addFlag({
|
||||
.longName = "impure",
|
||||
.description = "set evaluation mode",
|
||||
.handler = {[&]() {
|
||||
evalMode = evalImpure;
|
||||
}},
|
||||
.handler = {[&]() { evalMode = evalImpure; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "gc-roots-dir",
|
||||
.description = "garbage collector roots directory",
|
||||
.labels = {"path"},
|
||||
.handler = {&gcRootsDir}
|
||||
});
|
||||
addFlag({.longName = "gc-roots-dir",
|
||||
.description = "garbage collector roots directory",
|
||||
.labels = {"path"},
|
||||
.handler = {&gcRootsDir}});
|
||||
|
||||
addFlag({
|
||||
.longName = "workers",
|
||||
.description = "number of evaluate workers",
|
||||
.labels = {"workers"},
|
||||
.handler = {[=](std::string s) {
|
||||
nrWorkers = std::stoi(s);
|
||||
}}
|
||||
});
|
||||
addFlag(
|
||||
{.longName = "workers",
|
||||
.description = "number of evaluate workers",
|
||||
.labels = {"workers"},
|
||||
.handler = {[=](std::string s) { nrWorkers = std::stoi(s); }}});
|
||||
|
||||
addFlag({
|
||||
.longName = "max-memory-size",
|
||||
.description = "maximum evaluation memory size",
|
||||
.labels = {"size"},
|
||||
.handler = {[=](std::string s) {
|
||||
maxMemorySize = std::stoi(s);
|
||||
}}
|
||||
});
|
||||
addFlag({.longName = "max-memory-size",
|
||||
.description = "maximum evaluation memory size",
|
||||
.labels = {"size"},
|
||||
.handler = {
|
||||
[=](std::string s) { maxMemorySize = std::stoi(s); }}});
|
||||
|
||||
addFlag({
|
||||
.longName = "flake",
|
||||
.description = "build a flake",
|
||||
.handler = {&flake, true}
|
||||
});
|
||||
addFlag({.longName = "flake",
|
||||
.description = "build a flake",
|
||||
.handler = {&flake, true}});
|
||||
|
||||
addFlag({
|
||||
.longName = "meta",
|
||||
.description = "include derivation meta field in output",
|
||||
.handler = {&meta, true}
|
||||
});
|
||||
addFlag({.longName = "meta",
|
||||
.description = "include derivation meta field in output",
|
||||
.handler = {&meta, true}});
|
||||
|
||||
addFlag({
|
||||
.longName = "show-trace",
|
||||
.description = "print out a stack trace in case of evaluation errors",
|
||||
.handler = {&showTrace, true}
|
||||
});
|
||||
addFlag({.longName = "show-trace",
|
||||
.description =
|
||||
"print out a stack trace in case of evaluation errors",
|
||||
.handler = {&showTrace, true}});
|
||||
|
||||
expectArg("expr", &releaseExpr);
|
||||
}
|
||||
};
|
||||
#pragma GCC diagnostic warning "-Wnon-virtual-dtor"
|
||||
#pragma clang diagnostic warning "-Wnon-virtual-dtor"
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic ignored "-Wnon-virtual-dtor"
|
||||
#elif __clang__
|
||||
#pragma clang diagnostic ignored "-Wnon-virtual-dtor"
|
||||
#endif
|
||||
|
||||
static MyArgs myArgs;
|
||||
|
||||
static Value* releaseExprTopLevelValue(EvalState & state, Bindings & autoArgs) {
|
||||
static Value *releaseExprTopLevelValue(EvalState &state, Bindings &autoArgs) {
|
||||
Value vTop;
|
||||
|
||||
state.evalFile(lookupFileArg(state, myArgs.releaseExpr), vTop);
|
||||
|
@ -134,19 +124,20 @@ static Value* releaseExprTopLevelValue(EvalState & state, Bindings & autoArgs) {
|
|||
return vRoot;
|
||||
}
|
||||
|
||||
static Value* flakeTopLevelValue(EvalState & state, Bindings & autoArgs) {
|
||||
static Value *flakeTopLevelValue(EvalState &state, Bindings &autoArgs) {
|
||||
using namespace flake;
|
||||
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(myArgs.releaseExpr, absPath("."));
|
||||
auto [flakeRef, fragment] =
|
||||
parseFlakeRefWithFragment(myArgs.releaseExpr, absPath("."));
|
||||
|
||||
auto vFlake = state.allocValue();
|
||||
|
||||
auto lockedFlake = lockFlake(state, flakeRef,
|
||||
LockFlags {
|
||||
.updateLockFile = false,
|
||||
.useRegistries = false,
|
||||
.allowMutable = false,
|
||||
});
|
||||
LockFlags{
|
||||
.updateLockFile = false,
|
||||
.useRegistries = false,
|
||||
.allowMutable = false,
|
||||
});
|
||||
|
||||
callFlake(state, lockedFlake, *vFlake);
|
||||
|
||||
|
@ -155,7 +146,7 @@ static Value* flakeTopLevelValue(EvalState & state, Bindings & autoArgs) {
|
|||
auto vTop = *vOutputs;
|
||||
|
||||
if (fragment.length() > 0) {
|
||||
Bindings & bindings(*state.allocBindings(0));
|
||||
Bindings &bindings(*state.allocBindings(0));
|
||||
auto [nTop, pos] = findAlongAttrPath(state, fragment, bindings, vTop);
|
||||
if (!nTop)
|
||||
throw Error("error: attribute '%s' missing", nTop);
|
||||
|
@ -168,10 +159,9 @@ static Value* flakeTopLevelValue(EvalState & state, Bindings & autoArgs) {
|
|||
return vRoot;
|
||||
}
|
||||
|
||||
Value * topLevelValue(EvalState & state, Bindings & autoArgs) {
|
||||
return myArgs.flake
|
||||
? flakeTopLevelValue(state, autoArgs)
|
||||
: releaseExprTopLevelValue(state, autoArgs);
|
||||
Value *topLevelValue(EvalState &state, Bindings &autoArgs) {
|
||||
return myArgs.flake ? flakeTopLevelValue(state, autoArgs)
|
||||
: releaseExprTopLevelValue(state, autoArgs);
|
||||
}
|
||||
|
||||
/* The fields of a derivation that are printed in json form */
|
||||
|
@ -182,7 +172,7 @@ struct Drv {
|
|||
std::map<std::string, std::string> outputs;
|
||||
std::optional<nlohmann::json> meta;
|
||||
|
||||
Drv (EvalState & state, DrvInfo & drvInfo) {
|
||||
Drv(EvalState &state, DrvInfo &drvInfo) {
|
||||
if (drvInfo.querySystem() == "unknown")
|
||||
throw EvalError("derivation must have a 'system' attribute");
|
||||
|
||||
|
@ -191,12 +181,11 @@ struct Drv {
|
|||
for (auto out : drvInfo.queryOutputs(true)) {
|
||||
if (out.second)
|
||||
outputs[out.first] = localStore->printStorePath(*out.second);
|
||||
|
||||
}
|
||||
|
||||
if (myArgs.meta) {
|
||||
nlohmann::json meta_;
|
||||
for (auto & name : drvInfo.queryMetaNames()) {
|
||||
for (auto &name : drvInfo.queryMetaNames()) {
|
||||
PathSet context;
|
||||
std::stringstream ss;
|
||||
|
||||
|
@ -220,17 +209,16 @@ struct Drv {
|
|||
}
|
||||
};
|
||||
|
||||
static void to_json(nlohmann::json & json, const Drv & drv) {
|
||||
static void to_json(nlohmann::json &json, const Drv &drv) {
|
||||
json = nlohmann::json{
|
||||
{ "name", drv.name },
|
||||
{ "system", drv.system },
|
||||
{ "drvPath", drv.drvPath },
|
||||
{ "outputs", drv.outputs },
|
||||
{"name", drv.name},
|
||||
{"system", drv.system},
|
||||
{"drvPath", drv.drvPath},
|
||||
{"outputs", drv.outputs},
|
||||
};
|
||||
|
||||
if (drv.meta.has_value())
|
||||
json["meta"] = drv.meta.value();
|
||||
|
||||
}
|
||||
|
||||
std::string attrPathJoin(json input) {
|
||||
|
@ -244,12 +232,8 @@ std::string attrPathJoin(json input) {
|
|||
});
|
||||
}
|
||||
|
||||
static void worker(
|
||||
EvalState & state,
|
||||
Bindings & autoArgs,
|
||||
AutoCloseFD & to,
|
||||
AutoCloseFD & from)
|
||||
{
|
||||
static void worker(EvalState &state, Bindings &autoArgs, AutoCloseFD &to,
|
||||
AutoCloseFD &from) {
|
||||
auto vRoot = topLevelValue(state, autoArgs);
|
||||
|
||||
while (true) {
|
||||
|
@ -257,17 +241,20 @@ static void worker(
|
|||
writeLine(to.get(), "next");
|
||||
|
||||
auto s = readLine(from.get());
|
||||
if (s == "exit") break;
|
||||
if (!hasPrefix(s, "do ")) abort();
|
||||
if (s == "exit")
|
||||
break;
|
||||
if (!hasPrefix(s, "do "))
|
||||
abort();
|
||||
auto path = json::parse(s.substr(3));
|
||||
auto attrPathS = attrPathJoin(path);
|
||||
|
||||
debug("worker process %d at '%s'", getpid(), path);
|
||||
|
||||
/* Evaluate it and send info back to the collector. */
|
||||
json reply = json{ {"attr", attrPathS }, {"attrPath", path} };
|
||||
json reply = json{{"attr", attrPathS}, {"attrPath", path}};
|
||||
try {
|
||||
auto vTmp = findAlongAttrPath(state, attrPathS, autoArgs, *vRoot).first;
|
||||
auto vTmp =
|
||||
findAlongAttrPath(state, attrPathS, autoArgs, *vRoot).first;
|
||||
|
||||
auto v = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, *vTmp, *v);
|
||||
|
@ -281,42 +268,49 @@ static void worker(
|
|||
registers roots for jobs that we may have already
|
||||
done. */
|
||||
if (myArgs.gcRootsDir != "") {
|
||||
Path root = myArgs.gcRootsDir + "/" + std::string(baseNameOf(drv.drvPath));
|
||||
Path root = myArgs.gcRootsDir + "/" +
|
||||
std::string(baseNameOf(drv.drvPath));
|
||||
if (!pathExists(root)) {
|
||||
auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||
auto storePath = localStore->parseStorePath(drv.drvPath);
|
||||
auto localStore =
|
||||
state.store.dynamic_pointer_cast<LocalFSStore>();
|
||||
auto storePath =
|
||||
localStore->parseStorePath(drv.drvPath);
|
||||
localStore->addPermRoot(storePath, root);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
else if (v->type() == nAttrs)
|
||||
{
|
||||
else if (v->type() == nAttrs) {
|
||||
auto attrs = nlohmann::json::array();
|
||||
bool recurse = path.size() == 0; // Dont require `recurseForDerivations = true;` for top-level attrset
|
||||
bool recurse =
|
||||
path.size() == 0; // Dont require `recurseForDerivations =
|
||||
// true;` for top-level attrset
|
||||
|
||||
for (auto & i : v->attrs->lexicographicOrder()) {
|
||||
for (auto &i : v->attrs->lexicographicOrder()) {
|
||||
std::string name(i->name);
|
||||
attrs.push_back(name);
|
||||
|
||||
if (name == "recurseForDerivations") {
|
||||
auto attrv = v->attrs->get(state.sRecurseForDerivations);
|
||||
recurse = state.forceBool(*attrv->value, *attrv->pos);
|
||||
auto attrv =
|
||||
v->attrs->get(state.sRecurseForDerivations);
|
||||
recurse = state.forceBool(*attrv->value, *attrv->pos);
|
||||
}
|
||||
}
|
||||
if (recurse)
|
||||
reply["attrs"] = std::move(attrs);
|
||||
reply["attrs"] = std::move(attrs);
|
||||
else
|
||||
reply["attrs"] = nlohmann::json::array();
|
||||
reply["attrs"] = nlohmann::json::array();
|
||||
}
|
||||
|
||||
else if (v->type() == nNull)
|
||||
;
|
||||
|
||||
else throw TypeError("attribute '%s' is %s, which is not supported", path, showType(*v));
|
||||
else
|
||||
throw TypeError("attribute '%s' is %s, which is not supported",
|
||||
path, showType(*v));
|
||||
|
||||
} catch (EvalError & e) {
|
||||
} catch (EvalError &e) {
|
||||
auto err = e.info();
|
||||
|
||||
std::ostringstream oss;
|
||||
|
@ -337,13 +331,15 @@ static void worker(
|
|||
start a new process. */
|
||||
struct rusage r;
|
||||
getrusage(RUSAGE_SELF, &r);
|
||||
if ((size_t) r.ru_maxrss > myArgs.maxMemorySize * 1024) break;
|
||||
if ((size_t)r.ru_maxrss > myArgs.maxMemorySize * 1024)
|
||||
break;
|
||||
}
|
||||
|
||||
writeLine(to.get(), "restart");
|
||||
}
|
||||
|
||||
typedef std::function<void(EvalState & state, Bindings & autoArgs, AutoCloseFD & to, AutoCloseFD & from)>
|
||||
typedef std::function<void(EvalState &state, Bindings &autoArgs,
|
||||
AutoCloseFD &to, AutoCloseFD &from)>
|
||||
Processor;
|
||||
|
||||
/* Auto-cleanup of fork's process and fds. */
|
||||
|
@ -351,22 +347,21 @@ struct Proc {
|
|||
AutoCloseFD to, from;
|
||||
Pid pid;
|
||||
|
||||
Proc(const Processor & proc) {
|
||||
Proc(const Processor &proc) {
|
||||
Pipe toPipe, fromPipe;
|
||||
toPipe.create();
|
||||
fromPipe.create();
|
||||
auto p = startProcess(
|
||||
[&,
|
||||
to{std::make_shared<AutoCloseFD>(std::move(fromPipe.writeSide))},
|
||||
from{std::make_shared<AutoCloseFD>(std::move(toPipe.readSide))}
|
||||
]()
|
||||
{
|
||||
from{
|
||||
std::make_shared<AutoCloseFD>(std::move(toPipe.readSide))}]() {
|
||||
debug("created worker process %d", getpid());
|
||||
try {
|
||||
EvalState state(myArgs.searchPath, openStore());
|
||||
Bindings & autoArgs = *myArgs.getAutoArgs(state);
|
||||
Bindings &autoArgs = *myArgs.getAutoArgs(state);
|
||||
proc(state, autoArgs, *to, *from);
|
||||
} catch (Error & e) {
|
||||
} catch (Error &e) {
|
||||
nlohmann::json err;
|
||||
auto msg = e.msg();
|
||||
err["error"] = filterANSIEscapes(msg, true);
|
||||
|
@ -377,33 +372,32 @@ struct Proc {
|
|||
writeLine(to->get(), "restart");
|
||||
}
|
||||
},
|
||||
ProcessOptions { .allowVfork = false });
|
||||
ProcessOptions{.allowVfork = false});
|
||||
|
||||
to = std::move(toPipe.writeSide);
|
||||
from = std::move(fromPipe.readSide);
|
||||
pid = p;
|
||||
}
|
||||
|
||||
~Proc() { }
|
||||
~Proc() {}
|
||||
};
|
||||
|
||||
struct State
|
||||
{
|
||||
std::set<json> todo = json::array({ json::array() });
|
||||
std::set<json> active;
|
||||
std::exception_ptr exc;
|
||||
struct State {
|
||||
std::set<json> todo = json::array({json::array()});
|
||||
std::set<json> active;
|
||||
std::exception_ptr exc;
|
||||
};
|
||||
|
||||
std::function<void()> collector(Sync<State> & state_, std::condition_variable & wakeup) {
|
||||
std::function<void()> collector(Sync<State> &state_,
|
||||
std::condition_variable &wakeup) {
|
||||
return [&]() {
|
||||
try {
|
||||
std::optional<std::unique_ptr<Proc>> proc_;
|
||||
|
||||
while (true) {
|
||||
|
||||
auto proc = proc_.has_value()
|
||||
? std::move(proc_.value())
|
||||
: std::make_unique<Proc>(worker);
|
||||
auto proc = proc_.has_value() ? std::move(proc_.value())
|
||||
: std::make_unique<Proc>(worker);
|
||||
|
||||
/* Check whether the existing worker process is still there. */
|
||||
auto s = readLine(proc->from.get());
|
||||
|
@ -412,7 +406,7 @@ std::function<void()> collector(Sync<State> & state_, std::condition_variable &
|
|||
continue;
|
||||
} else if (s != "next") {
|
||||
auto json = json::parse(s);
|
||||
throw Error("worker error: %s", (std::string) json["error"]);
|
||||
throw Error("worker error: %s", (std::string)json["error"]);
|
||||
}
|
||||
|
||||
/* Wait for a job name to become available. */
|
||||
|
@ -421,7 +415,8 @@ std::function<void()> collector(Sync<State> & state_, std::condition_variable &
|
|||
while (true) {
|
||||
checkInterrupt();
|
||||
auto state(state_.lock());
|
||||
if ((state->todo.empty() && state->active.empty()) || state->exc) {
|
||||
if ((state->todo.empty() && state->active.empty()) ||
|
||||
state->exc) {
|
||||
writeLine(proc->to.get(), "exit");
|
||||
return;
|
||||
}
|
||||
|
@ -444,10 +439,10 @@ std::function<void()> collector(Sync<State> & state_, std::condition_variable &
|
|||
/* Handle the response. */
|
||||
std::vector<json> newAttrs;
|
||||
if (response.find("attrs") != response.end()) {
|
||||
for (auto & i : response["attrs"]) {
|
||||
json newAttr = json(response["attrPath"]);
|
||||
newAttr.emplace_back(i);
|
||||
newAttrs.push_back(newAttr);
|
||||
for (auto &i : response["attrs"]) {
|
||||
json newAttr = json(response["attrPath"]);
|
||||
newAttr.emplace_back(i);
|
||||
newAttrs.push_back(newAttr);
|
||||
}
|
||||
} else {
|
||||
auto state(state_.lock());
|
||||
|
@ -474,8 +469,7 @@ std::function<void()> collector(Sync<State> & state_, std::condition_variable &
|
|||
};
|
||||
}
|
||||
|
||||
int main(int argc, char * * argv)
|
||||
{
|
||||
int main(int argc, char **argv) {
|
||||
/* Prevent undeclared dependencies in the evaluation via
|
||||
$NIX_PATH. */
|
||||
unsetenv("NIX_PATH");
|
||||
|
@ -489,7 +483,8 @@ int main(int argc, char * * argv)
|
|||
|
||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||
|
||||
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */
|
||||
/* FIXME: The build hook in conjunction with import-from-derivation is
|
||||
* causing "unexpected EOF" during eval */
|
||||
settings.builders = "";
|
||||
|
||||
/* Prevent access to paths outside of the Nix search path and
|
||||
|
@ -498,11 +493,15 @@ int main(int argc, char * * argv)
|
|||
|
||||
/* When building a flake, use pure evaluation (no access to
|
||||
'getEnv', 'currentSystem' etc. */
|
||||
evalSettings.pureEval = myArgs.evalMode == evalAuto ? myArgs.flake : myArgs.evalMode == evalPure;
|
||||
evalSettings.pureEval = myArgs.evalMode == evalAuto
|
||||
? myArgs.flake
|
||||
: myArgs.evalMode == evalPure;
|
||||
|
||||
if (myArgs.releaseExpr == "") throw UsageError("no expression specified");
|
||||
if (myArgs.releaseExpr == "")
|
||||
throw UsageError("no expression specified");
|
||||
|
||||
if (myArgs.gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||
if (myArgs.gcRootsDir == "")
|
||||
printMsg(lvlError, "warning: `--gc-roots-dir' not specified");
|
||||
|
||||
if (myArgs.showTrace) {
|
||||
loggerSettings.showTrace.assign(true);
|
||||
|
@ -516,13 +515,12 @@ int main(int argc, char * * argv)
|
|||
for (size_t i = 0; i < myArgs.nrWorkers; i++)
|
||||
threads.emplace_back(std::thread(collector(state_, wakeup)));
|
||||
|
||||
for (auto & thread : threads)
|
||||
for (auto &thread : threads)
|
||||
thread.join();
|
||||
|
||||
auto state(state_.lock());
|
||||
|
||||
if (state->exc)
|
||||
std::rethrow_exception(state->exc);
|
||||
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
{
|
||||
pkgs ? import (builtins.getFlake (toString ./.)).inputs.nixpkgs { }
|
||||
, system ? pkgs.system
|
||||
{ pkgs ? import (builtins.getFlake (toString ./.)).inputs.nixpkgs { }
|
||||
, system ? pkgs.system
|
||||
}:
|
||||
|
||||
{
|
||||
|
|
|
@ -30,11 +30,11 @@ def common_test(extra_args: List[str]) -> None:
|
|||
assert built_job["name"] == "job1"
|
||||
assert built_job["outputs"]["out"].startswith("/nix/store")
|
||||
assert built_job["drvPath"].endswith(".drv")
|
||||
assert built_job["meta"]['broken'] is False
|
||||
assert built_job["meta"]["broken"] is False
|
||||
|
||||
dotted_job = results[1]
|
||||
assert dotted_job["attr"] == "\"dotted.attr\""
|
||||
assert dotted_job["attrPath"] == [ "dotted.attr" ]
|
||||
assert dotted_job["attr"] == '"dotted.attr"'
|
||||
assert dotted_job["attrPath"] == ["dotted.attr"]
|
||||
|
||||
recurse_drv = results[2]
|
||||
assert recurse_drv["attr"] == "recurse.drvB"
|
||||
|
@ -46,7 +46,7 @@ def common_test(extra_args: List[str]) -> None:
|
|||
substituted_job = results[4]
|
||||
assert substituted_job["attr"] == "substitutedJob"
|
||||
assert substituted_job["name"].startswith("hello-")
|
||||
assert substituted_job["meta"]['broken'] is False
|
||||
assert substituted_job["meta"]["broken"] is False
|
||||
|
||||
|
||||
def test_flake() -> None:
|
||||
|
|
21
treefmt.toml
Normal file
21
treefmt.toml
Normal file
|
@ -0,0 +1,21 @@
|
|||
[formatter."c++"]
|
||||
command = "clang-format"
|
||||
options = [
|
||||
"-i",
|
||||
"-style",
|
||||
"{BasedOnStyle: llvm, IndentWidth: 4, SortIncludes: false}"
|
||||
]
|
||||
includes = ["*.c", "*.cpp", "*.cc", "*.h", "*.hpp"]
|
||||
|
||||
[formatter.nix]
|
||||
command = "nixpkgs-fmt"
|
||||
includes = ["*.nix"]
|
||||
|
||||
[formatter.toml]
|
||||
command = "prettier"
|
||||
options = ["--write"]
|
||||
includes = ["*.toml"]
|
||||
|
||||
[formatter.python]
|
||||
command = "black"
|
||||
includes = ["*.py"]
|
Loading…
Reference in a new issue