Merge remote-tracking branch 'upstream/master' into derivation-header-include-order

This commit is contained in:
John Ericson 2020-07-05 21:49:01 +00:00
commit a38ab99d57
80 changed files with 1124 additions and 546 deletions

6
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View file

@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: cachix/install-nix-action@v8 - uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings - run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings

View file

@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system: installation on your system:
</para> </para>
<screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen> <screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para> <para>
The multi-user installation of Nix will create build users between The multi-user installation of Nix will create build users between
@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with: is a bit of a misnomer). To use this approach, just install Nix with:
</para> </para>
<screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen> <screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para> <para>
If you don't like the sound of this, you'll want to weigh the If you don't like the sound of this, you'll want to weigh the
@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script: NixOS.org installation script:
<screen> <screen>
sh &lt;(curl https://nixos.org/nix/install) sh &lt;(curl -L https://nixos.org/nix/install)
</screen> </screen>
</para> </para>

28
mk/run_test.sh Executable file
View file

@ -0,0 +1,28 @@
#!/bin/sh
set -u
red=""
green=""
yellow=""
normal=""
post_run_msg="ran test $1..."
if [ -t 1 ]; then
red=""
green=""
yellow=""
normal=""
fi
(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
status=$?
if [ $status -eq 0 ]; then
echo "$post_run_msg [${green}PASS$normal]"
elif [ $status -eq 99 ]; then
echo "$post_run_msg [${yellow}SKIP$normal]"
else
echo "$post_run_msg [${red}FAIL$normal]"
echo "$log" | sed 's/^/ /'
exit "$status"
fi

View file

@ -1,45 +1,15 @@
# Run program $1 as part of make installcheck. # Run program $1 as part of make installcheck.
test-deps =
define run-install-test define run-install-test
installcheck: $1 installcheck: $1.test
_installcheck-list += $1 .PHONY: $1.test
$1.test: $1 $(test-deps)
@env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1
endef endef
# Color code from https://unix.stackexchange.com/a/10065
installcheck:
@total=0; failed=0; \
red=""; \
green=""; \
yellow=""; \
normal=""; \
if [ -t 1 ]; then \
red=""; \
green=""; \
yellow=""; \
normal=""; \
fi; \
for i in $(_installcheck-list); do \
total=$$((total + 1)); \
printf "running test $$i..."; \
log="$$(cd $$(dirname $$i) && $(tests-environment) $$(basename $$i) 2>&1)"; \
status=$$?; \
if [ $$status -eq 0 ]; then \
echo " [$${green}PASS$$normal]"; \
elif [ $$status -eq 99 ]; then \
echo " [$${yellow}SKIP$$normal]"; \
else \
echo " [$${red}FAIL$$normal]"; \
echo "$$log" | sed 's/^/ /'; \
failed=$$((failed + 1)); \
fi; \
done; \
if [ "$$failed" != 0 ]; then \
echo "$${red}$$failed out of $$total tests failed $$normal"; \
exit 1; \
else \
echo "$${green}All tests succeeded$$normal"; \
fi
.PHONY: check installcheck .PHONY: check installcheck

View file

@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is exactly what commands it is running and why. However, the script is
run in a headless fashion, like this: run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh $ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity. verbose output in the interest of brevity.
@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to If you would like to
see the output, try like this: see the output, try like this:
$ curl -o install-nix https://nixos.org/nix/install $ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix $ sh ./install-nix
EOF EOF

View file

@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
( (
echo "" echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume." echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually." echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation" echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo "" echo ""
) >&2 ) >&2

View file

@ -130,7 +130,7 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Symbol file = state.symbols.create(filename); Symbol file = state.symbols.create(filename);
return { file, lineno, 0 }; return { foFile, file, lineno, 0 };
} }

View file

@ -78,7 +78,7 @@ public:
if (!a) if (!a)
throw Error({ throw Error({
.hint = hintfmt("attribute '%s' missing", name), .hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
return *a; return *a;

View file

@ -11,7 +11,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s), .hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -25,7 +25,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s, showType(v)), .hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -529,7 +529,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s, s2), .hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -542,7 +542,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s, s2, s3), .hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -551,7 +551,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
// p1 is where the error occurred; p2 is a position mentioned in the message. // p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({ throw EvalError({
.hint = hintfmt(s, sym, p2), .hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 } .errPos = p1
}); });
} }
@ -559,7 +559,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s), .hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -572,7 +572,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2), .hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -580,7 +580,7 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
{ {
throw AssertionError({ throw AssertionError({
.hint = hintfmt(s, s1), .hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -588,23 +588,18 @@ LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char *
{ {
throw UndefinedVarError({ throw UndefinedVarError({
.hint = hintfmt(s, s1), .hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2)) LocalNoInline(void addErrorTrace(Error & e, const char * s, const string & s2))
{ {
e.addPrefix(format(s) % s2); e.addTrace(std::nullopt, s, s2);
} }
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const ExprLambda & fun, const Pos & pos)) LocalNoInline(void addErrorTrace(Error & e, const Pos & pos, const char * s, const string & s2))
{ {
e.addPrefix(format(s) % fun.showNamePos() % pos); e.addTrace(pos, s, s2);
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2, const Pos & pos))
{
e.addPrefix(format(s) % s2 % pos);
} }
@ -818,7 +813,7 @@ void EvalState::evalFile(const Path & path_, Value & v)
try { try {
eval(e, v); eval(e, v);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating the file '%1%':\n", path2); addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw; throw;
} }
@ -1068,8 +1063,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
} catch (Error & e) { } catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix) if (pos2 && pos2->file != state.sDerivationNix)
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath), *pos2); showAttrPath(state, env, attrPath));
throw; throw;
} }
@ -1237,11 +1232,15 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
/* Evaluate the body. This is conditional on showTrace, because /* Evaluate the body. This is conditional on showTrace, because
catching exceptions makes this function not tail-recursive. */ catching exceptions makes this function not tail-recursive. */
if (settings.showTrace) if (loggerSettings.showTrace.get())
try { try {
lambda.body->eval(*this, env2, v); lambda.body->eval(*this, env2, v);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating %1%, called from %2%:\n", lambda, pos); addErrorTrace(e, lambda.pos, "while evaluating %s",
(lambda.name.set()
? "'" + (string) lambda.name + "'"
: "anonymous lambdaction"));
addErrorTrace(e, pos, "from call site%s", "");
throw; throw;
} }
else else
@ -1516,7 +1515,7 @@ void EvalState::forceValueDeep(Value & v)
try { try {
recurse(*i.value); recurse(*i.value);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", i.name, *i.pos); addErrorTrace(e, *i.pos, "while evaluating the attribute '%1%'", i.name);
throw; throw;
} }
} }
@ -1936,7 +1935,7 @@ string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, boo
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()), .hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -250,7 +250,7 @@ private:
friend struct ExprAttrs; friend struct ExprAttrs;
friend struct ExprLet; friend struct ExprLet;
Expr * parse(const char * text, const Path & path, Expr * parse(const char * text, FileOrigin origin, const Path & path,
const Path & basePath, StaticEnv & staticEnv); const Path & basePath, StaticEnv & staticEnv);
public: public:

View file

@ -197,7 +197,22 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
if (!pos) if (!pos)
str << "undefined position"; str << "undefined position";
else else
str << (format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%") % (string) pos.file % pos.line % pos.column).str(); {
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
switch (pos.origin) {
case foFile:
f % (string) pos.file;
break;
case foStdin:
case foString:
f % "(string)";
break;
default:
throw Error("unhandled Pos origin!");
}
str << (f % pos.line % pos.column).str();
}
return str; return str;
} }
@ -270,7 +285,7 @@ void ExprVar::bindVars(const StaticEnv & env)
if (withLevel == -1) if (withLevel == -1)
throw UndefinedVarError({ throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name), .hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
fromWith = true; fromWith = true;
this->level = withLevel; this->level = withLevel;

View file

@ -24,11 +24,12 @@ MakeError(RestrictedPathError, Error);
struct Pos struct Pos
{ {
FileOrigin origin;
Symbol file; Symbol file;
unsigned int line, column; unsigned int line, column;
Pos() : line(0), column(0) { }; Pos() : origin(foString), line(0), column(0) { };
Pos(const Symbol & file, unsigned int line, unsigned int column) Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
: file(file), line(line), column(column) { }; : origin(origin), file(file), line(line), column(column) { };
operator bool() const operator bool() const
{ {
return line != 0; return line != 0;
@ -238,7 +239,7 @@ struct ExprLambda : Expr
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end()) if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({ throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg), .hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
}; };
void setName(Symbol & name); void setName(Symbol & name);

View file

@ -30,7 +30,8 @@ namespace nix {
SymbolTable & symbols; SymbolTable & symbols;
Expr * result; Expr * result;
Path basePath; Path basePath;
Symbol path; Symbol file;
FileOrigin origin;
ErrorInfo error; ErrorInfo error;
Symbol sLetBody; Symbol sLetBody;
ParseData(EvalState & state) ParseData(EvalState & state)
@ -67,16 +68,15 @@ static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prev
throw ParseError({ throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", .hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos), showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
} }
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos) static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{ {
throw ParseError({ throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos), .hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
} }
@ -148,7 +148,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
throw ParseError({ throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", .hint = hintfmt("duplicate formal function argument '%1%'",
formal.name), formal.name),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
formals->formals.push_front(formal); formals->formals.push_front(formal);
} }
@ -246,7 +246,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data) static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
{ {
return Pos(data->path, loc.first_line, loc.first_column); return Pos(data->origin, data->file, loc.first_line, loc.first_column);
} }
#define CUR_POS makeCurPos(*yylocp, data) #define CUR_POS makeCurPos(*yylocp, data)
@ -259,7 +259,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
{ {
data->error = { data->error = {
.hint = hintfmt(error), .hint = hintfmt(error),
.nixCode = NixCode { .errPos = makeCurPos(*loc, data) } .errPos = makeCurPos(*loc, data)
}; };
} }
@ -339,7 +339,7 @@ expr_function
{ if (!$2->dynamicAttrs.empty()) { if (!$2->dynamicAttrs.empty())
throw ParseError({ throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"), .hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS }, .errPos = CUR_POS
}); });
$$ = new ExprLet($2, $4); $$ = new ExprLet($2, $4);
} }
@ -419,7 +419,7 @@ expr_simple
if (noURLLiterals) if (noURLLiterals)
throw ParseError({ throw ParseError({
.hint = hintfmt("URL literals are disabled"), .hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS } .errPos = CUR_POS
}); });
$$ = new ExprString(data->symbols.create($1)); $$ = new ExprString(data->symbols.create($1));
} }
@ -492,7 +492,7 @@ attrs
} else } else
throw ParseError({ throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"), .hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) }, .errPos = makeCurPos(@2, data)
}); });
} }
| { $$ = new AttrPath; } | { $$ = new AttrPath; }
@ -569,13 +569,24 @@ formal
namespace nix { namespace nix {
Expr * EvalState::parse(const char * text, Expr * EvalState::parse(const char * text, FileOrigin origin,
const Path & path, const Path & basePath, StaticEnv & staticEnv) const Path & path, const Path & basePath, StaticEnv & staticEnv)
{ {
yyscan_t scanner; yyscan_t scanner;
ParseData data(*this); ParseData data(*this);
data.origin = origin;
switch (origin) {
case foFile:
data.file = data.symbols.create(path);
break;
case foStdin:
case foString:
data.file = data.symbols.create(text);
break;
default:
assert(false);
}
data.basePath = basePath; data.basePath = basePath;
data.path = data.symbols.create(path);
yylex_init(&scanner); yylex_init(&scanner);
yy_scan_string(text, scanner); yy_scan_string(text, scanner);
@ -625,13 +636,13 @@ Expr * EvalState::parseExprFromFile(const Path & path)
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv) Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
{ {
return parse(readFile(path).c_str(), path, dirOf(path), staticEnv); return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
} }
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv) Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
{ {
return parse(s.data(), "(string)", basePath, staticEnv); return parse(s.data(), foString, "", basePath, staticEnv);
} }
@ -644,7 +655,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseStdin() Expr * EvalState::parseStdin()
{ {
//Activity act(*logger, lvlTalkative, format("parsing standard input")); //Activity act(*logger, lvlTalkative, format("parsing standard input"));
return parseExprFromString(drainFD(0), absPath(".")); return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
} }
@ -693,7 +704,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path), path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -96,7 +96,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -177,7 +177,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
.hint = hintfmt( .hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid", "cannot import '%1%', since path '%2%' is not valid",
path, e.path), path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -215,7 +215,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (count == 0) { if (count == 0) {
throw EvalError({ throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"), .hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
PathSet context; PathSet context;
@ -230,7 +230,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid", .hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path), program, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -239,13 +239,13 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try { try {
parsed = state.parseExprFromString(output, pos.file); parsed = state.parseExprFromString(output, pos.file);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("While parsing the output from '%1%', at %2%\n", program, pos)); e.addTrace(pos, "While parsing the output from '%1%'", program);
throw; throw;
} }
try { try {
state.eval(parsed, v); state.eval(parsed, v);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("While evaluating the output from '%1%', at %2%\n", program, pos)); e.addTrace(pos, "While evaluating the output from '%1%'", program);
throw; throw;
} }
} }
@ -385,7 +385,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (startSet == args[0]->attrs->end()) if (startSet == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'startSet' required"), .hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceList(*startSet->value, pos); state.forceList(*startSet->value, pos);
@ -399,7 +399,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (op == args[0]->attrs->end()) if (op == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'operator' required"), .hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*op->value, pos); state.forceValue(*op->value, pos);
@ -421,7 +421,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (key == e->attrs->end()) if (key == e->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'key' required"), .hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*key->value, pos); state.forceValue(*key->value, pos);
@ -471,7 +471,7 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
v = *args[1]; v = *args[1];
} catch (Error & e) { } catch (Error & e) {
PathSet context; PathSet context;
e.addPrefix(format("%1%\n") % state.coerceToString(pos, *args[0], context)); e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context));
throw; throw;
} }
} }
@ -556,14 +556,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (attr == args[0]->attrs->end()) if (attr == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'name' missing"), .hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
string drvName; string drvName;
Pos & posDrvName(*attr->pos); Pos & posDrvName(*attr->pos);
try { try {
drvName = state.forceStringNoCtx(*attr->value, pos); drvName = state.forceStringNoCtx(*attr->value, pos);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while evaluating the derivation attribute 'name' at %1%:\n", posDrvName)); e.addTrace(posDrvName, "while evaluating the derivation attribute 'name'");
throw; throw;
} }
@ -603,7 +603,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), .hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
}; };
@ -613,7 +613,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.find(j) != outputs.end()) if (outputs.find(j) != outputs.end())
throw EvalError({ throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j), .hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
/* !!! Check whether j is a valid attribute /* !!! Check whether j is a valid attribute
name. */ name. */
@ -623,14 +623,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (j == "drv") if (j == "drv")
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ), .hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
outputs.insert(j); outputs.insert(j);
} }
if (outputs.empty()) if (outputs.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"), .hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
}; };
@ -696,8 +696,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
} }
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(format("while evaluating the attribute '%1%' of the derivation '%2%' at %3%:\n") e.addTrace(posDrvName,
% key % drvName % posDrvName); "while evaluating the attribute '%1%' of the derivation '%2%'",
key, drvName);
throw; throw;
} }
} }
@ -745,20 +746,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (drv.builder == "") if (drv.builder == "")
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"), .hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
if (drv.platform == "") if (drv.platform == "")
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'system' missing"), .hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
/* Check whether the derivation name is valid. */ /* Check whether the derivation name is valid. */
if (isDerivation(drvName)) if (isDerivation(drvName))
throw EvalError({ throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension), .hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
if (outputHash) { if (outputHash) {
@ -766,7 +767,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.size() != 1 || *(outputs.begin()) != "out") if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({ throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"), .hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo); std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
@ -776,7 +777,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput { drv.outputs.insert_or_assign("out", DerivationOutput {
.path = std::move(outPath), .path = std::move(outPath),
.hash = DerivationOutputHash { .hash = FixedOutputHash {
.method = ingestionMethod, .method = ingestionMethod,
.hash = std::move(h), .hash = std::move(h),
}, },
@ -795,7 +796,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.path = StorePath::dummy, .path = StorePath::dummy,
.hash = std::optional<DerivationOutputHash> {}, .hash = std::optional<FixedOutputHash> {},
}); });
} }
@ -807,7 +808,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.path = std::move(outPath), .path = std::move(outPath),
.hash = std::optional<DerivationOutputHash>(), .hash = std::optional<FixedOutputHash>(),
}); });
} }
} }
@ -880,7 +881,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
if (!state.store->isInStore(path)) if (!state.store->isInStore(path))
throw EvalError({ throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path), .hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
Path path2 = state.store->toStorePath(path); Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode) if (!settings.readOnlyMode)
@ -901,7 +902,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
.hint = hintfmt( .hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid", "cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path), path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -947,7 +948,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
string s = readFile(state.checkSourcePath(state.toRealPath(path, context))); string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
@ -978,7 +979,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i == v2.attrs->end()) if (i == v2.attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'path' missing"), .hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; PathSet context;
@ -989,7 +990,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -1009,7 +1010,7 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
if (!ht) if (!ht)
throw Error({ throw Error({
.hint = hintfmt("unknown hash type '%1%'", type), .hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; // discarded PathSet context; // discarded
@ -1028,7 +1029,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -1104,7 +1105,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
"in 'toFile': the file named '%1%' must not contain a reference " "in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)", "to a derivation but contains (%2%)",
name, path), name, path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
refs.insert(state.store->parseStorePath(path)); refs.insert(state.store->parseStorePath(path));
} }
@ -1175,7 +1176,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
if (!context.empty()) if (!context.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path), .hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
@ -1184,7 +1185,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.hint = hintfmt( .hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%", "first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])), showType(*args[0])),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v); addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
@ -1207,7 +1208,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (!context.empty()) if (!context.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path), .hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} else if (attr.name == state.sName) } else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos); name = state.forceStringNoCtx(*attr.value, *attr.pos);
@ -1221,13 +1222,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name), .hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (path.empty()) if (path.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'path' required"), .hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (name.empty()) if (name.empty())
name = baseNameOf(path); name = baseNameOf(path);
@ -1288,7 +1289,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (i == args[1]->attrs->end()) if (i == args[1]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr), .hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
// !!! add to stack trace? // !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++; if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
@ -1371,7 +1372,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j == v2.attrs->end()) if (j == v2.attrs->end())
throw TypeError({ throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"), .hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
string name = state.forceStringNoCtx(*j->value, pos); string name = state.forceStringNoCtx(*j->value, pos);
@ -1381,7 +1382,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j2 == v2.attrs->end()) if (j2 == v2.attrs->end())
throw TypeError({ throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"), .hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
v.attrs->push_back(Attr(sym, j2->value, j2->pos)); v.attrs->push_back(Attr(sym, j2->value, j2->pos));
} }
@ -1457,7 +1458,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
if (args[0]->type != tLambda) if (args[0]->type != tLambda)
throw TypeError({ throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"), .hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (!args[0]->lambda.fun->matchAttrs) { if (!args[0]->lambda.fun->matchAttrs) {
@ -1513,7 +1514,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
if (n < 0 || (unsigned int) n >= list.listSize()) if (n < 0 || (unsigned int) n >= list.listSize())
throw Error({ throw Error({
.hint = hintfmt("list index %1% is out of bounds", n), .hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*list.listElems()[n], pos); state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n]; v = *list.listElems()[n];
@ -1543,7 +1544,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->listSize() == 0) if (args[0]->listSize() == 0)
throw Error({ throw Error({
.hint = hintfmt("'tail' called on an empty list"), .hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.mkList(v, args[0]->listSize() - 1); state.mkList(v, args[0]->listSize() - 1);
@ -1688,7 +1689,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
if (len < 0) if (len < 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len), .hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.mkList(v, len); state.mkList(v, len);
@ -1850,7 +1851,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (f2 == 0) if (f2 == 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("division by zero"), .hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (args[0]->type == tFloat || args[1]->type == tFloat) { if (args[0]->type == tFloat || args[1]->type == tFloat) {
@ -1862,7 +1863,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1) if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError({ throw EvalError({
.hint = hintfmt("overflow in integer division"), .hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
mkInt(v, i1 / i2); mkInt(v, i1 / i2);
@ -1923,7 +1924,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
if (start < 0) if (start < 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("negative start position in 'substring'"), .hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context); mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
@ -1946,7 +1947,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
if (!ht) if (!ht)
throw Error({ throw Error({
.hint = hintfmt("unknown hash type '%1%'", type), .hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; // discarded PathSet context; // discarded
@ -1992,12 +1993,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({ throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re), .hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else { } else {
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re), .hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }
@ -2065,12 +2066,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({ throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re), .hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else { } else {
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re), .hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }
@ -2104,7 +2105,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
if (args[0]->listSize() != args[1]->listSize()) if (args[0]->listSize() != args[1]->listSize())
throw EvalError({ throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"), .hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
vector<string> from; vector<string> from;

View file

@ -148,7 +148,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!state.store->isStorePath(i.name)) if (!state.store->isStorePath(i.name))
throw EvalError({ throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name), .hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
if (!settings.readOnlyMode) if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name)); state.store->ensurePath(state.store->parseStorePath(i.name));
@ -165,7 +165,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!isDerivation(i.name)) { if (!isDerivation(i.name)) {
throw EvalError({ throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name), .hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
} }
context.insert("=" + string(i.name)); context.insert("=" + string(i.name));
@ -178,7 +178,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter->value->listSize() && !isDerivation(i.name)) { if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError({ throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name), .hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
} }
for (unsigned int n = 0; n < iter->value->listSize(); ++n) { for (unsigned int n = 0; n < iter->value->listSize(); ++n) {

View file

@ -37,14 +37,14 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name), .hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (url.empty()) if (url.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else

View file

@ -40,14 +40,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name), .hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (url.empty()) if (url.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else

View file

@ -68,7 +68,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
if (!attrs.count("type")) if (!attrs.count("type"))
throw Error({ throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), .hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
input = fetchers::inputFromAttrs(attrs); input = fetchers::inputFromAttrs(attrs);
@ -112,14 +112,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who), .hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (!url) if (!url)
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else
url = state.forceStringNoCtx(*args[0], pos); url = state.forceStringNoCtx(*args[0], pos);

View file

@ -83,7 +83,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
} catch (std::runtime_error & e) { } catch (std::runtime_error & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()), .hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }

View file

@ -6,6 +6,8 @@ namespace nix::fetchers {
struct Cache struct Cache
{ {
virtual ~Cache() { }
virtual void add( virtual void add(
ref<Store> store, ref<Store> store,
const Attrs & inAttrs, const Attrs & inAttrs,

View file

@ -70,7 +70,10 @@ DownloadFileResult downloadFile(
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name)); ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(htSHA256, *sink.s); info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash); info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = hash,
};
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
store->addToStore(info, source, NoRepair, NoCheckSigs); store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path); storePath = std::move(info.path);

View file

@ -26,7 +26,7 @@ Logger * makeDefaultLogger() {
case LogFormat::rawWithLogs: case LogFormat::rawWithLogs:
return makeSimpleLogger(true); return makeSimpleLogger(true);
case LogFormat::internalJson: case LogFormat::internalJson:
return makeJSONLogger(*makeSimpleLogger()); return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar: case LogFormat::bar:
return makeProgressBar(); return makeProgressBar();
case LogFormat::barWithLogs: case LogFormat::barWithLogs:

View file

@ -131,7 +131,7 @@ public:
auto state(state_.lock()); auto state(state_.lock());
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(*state, ei.level, oss.str()); log(*state, ei.level, oss.str());
} }

View file

@ -323,10 +323,8 @@ int handleExceptions(const string & programName, std::function<void()> fun)
printError("Try '%1% --help' for more information.", programName); printError("Try '%1% --help' for more information.", programName);
return 1; return 1;
} catch (BaseError & e) { } catch (BaseError & e) {
if (settings.showTrace && e.prefix() != "")
printError(e.prefix());
logError(e.info()); logError(e.info());
if (e.prefix() != "" && !settings.showTrace) if (e.hasTrace() && !loggerSettings.showTrace.get())
printError("(use '--show-trace' to show detailed location information)"); printError("(use '--show-trace' to show detailed location information)");
return e.status; return e.status;
} catch (std::bad_alloc & e) { } catch (std::bad_alloc & e) {

View file

@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a /* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just 'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case? */ bind-mount in this case?
if (errno != EMLINK)
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from); throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to); copyPath(from, to);
} }
@ -2750,8 +2753,8 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ } { }
StorePathSet queryDerivationOutputs(const StorePath & path) override OutputPathMap queryDerivationOutputMap(const StorePath & path) override
{ throw Error("queryDerivationOutputs"); } { throw Error("queryDerivationOutputMap"); }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); } { throw Error("queryPathFromHashPart"); }
@ -3714,7 +3717,7 @@ void DerivationGoal::registerOutputs()
/* Check that fixed-output derivations produced the right /* Check that fixed-output derivations produced the right
outputs (i.e., the content hash should match the specified outputs (i.e., the content hash should match the specified
hash). */ hash). */
std::string ca; std::optional<ContentAddress> ca;
if (fixedOutput) { if (fixedOutput) {
@ -3764,7 +3767,10 @@ void DerivationGoal::registerOutputs()
else else
assert(worker.store.parseStorePath(path) == dest); assert(worker.store.parseStorePath(path) == dest);
ca = makeFixedOutputCA(i.second.hash->method, h2); ca = FixedOutputHash {
.method = i.second.hash->method,
.hash = h2,
};
} }
/* Get rid of all weird permissions. This also checks that /* Get rid of all weird permissions. This also checks that
@ -3837,7 +3843,10 @@ void DerivationGoal::registerOutputs()
info.ca = ca; info.ca = ca;
worker.store.signPathInfo(info); worker.store.signPathInfo(info);
if (!info.references.empty()) info.ca.clear(); if (!info.references.empty()) {
// FIXME don't we have an experimental feature for fixed output with references?
info.ca = {};
}
infos.emplace(i.first, std::move(info)); infos.emplace(i.first, std::move(info));
} }

View file

@ -58,13 +58,16 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
} }
}; };
/* We always have one output, and if it's a fixed-output derivation (as
checked below) it must be the only output */
auto & output = drv.outputs.begin()->second;
/* Try the hashed mirrors first. */ /* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat") if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get()) for (auto hashedMirror : settings.hashedMirrors.get())
try { try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo")); auto & h = output.hash->hash;
auto h = Hash(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false)); fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
return; return;
} catch (Error & e) { } catch (Error & e) {

View file

@ -0,0 +1,85 @@
#include "content-address.hh"
namespace nix {
std::string FixedOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
}
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
switch (m) {
case FileIngestionMethod::Flat:
return "";
case FileIngestionMethod::Recursive:
return "r:";
default:
throw Error("impossible, caught both cases");
}
}
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
{
return "fixed:"
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base32, true);
}
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string renderContentAddress(ContentAddress ca) {
return std::visit(overloaded {
[](TextHash th) {
return "text:" + th.hash.to_string(Base32, true);
},
[](FixedOutputHash fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto prefixSeparator = rawCa.find(':');
if (prefixSeparator != string::npos) {
auto prefix = string(rawCa, 0, prefixSeparator);
if (prefix == "text") {
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
Hash hash = Hash(string(hashTypeAndHash));
if (*hash.type != htSHA256) {
throw Error("parseContentAddress: the text hash should have type SHA256");
}
return TextHash { hash };
} else if (prefix == "fixed") {
// This has to be an inverse of makeFixedOutputCA
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
if (methodAndHash.substr(0,2) == "r:") {
std::string_view hashRaw = methodAndHash.substr(2,string::npos);
return FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = Hash(string(hashRaw)),
};
} else {
std::string_view hashRaw = methodAndHash;
return FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash(string(hashRaw)),
};
}
} else {
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
}
} else {
throw Error("Not a content address because it lacks an appropriate prefix");
}
};
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
return ca ? renderContentAddress(*ca) : "";
}
}

View file

@ -0,0 +1,56 @@
#pragma once
#include <variant>
#include "hash.hh"
namespace nix {
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct TextHash {
Hash hash;
};
/// Pair of a hash, and how the file system was ingested
struct FixedOutputHash {
FileIngestionMethod method;
Hash hash;
std::string printMethodAlgo() const;
};
/*
We've accumulated several types of content-addressed paths over the years;
fixed-output derivations support multiple hash algorithms and serialisation
methods (flat file vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
*/
typedef std::variant<
TextHash, // for paths computed by makeTextPath() / addTextToStore
FixedOutputHash // for path computed by makeFixedOutputPath
> ContentAddress;
/* Compute the prefix to the hash algorithm which indicates how the files were
ingested. */
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
std::string renderContentAddress(ContentAddress ca);
std::string renderContentAddress(std::optional<ContentAddress> ca);
ContentAddress parseContentAddress(std::string_view rawCa);
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
}

View file

@ -78,10 +78,10 @@ struct TunnelLogger : public Logger
if (ei.level > verbosity) return; if (ei.level > verbosity) return;
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, false);
StringSink buf; StringSink buf;
buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n"); buf << STDERR_NEXT << oss.str() << "\n";
enqueueMsg(*buf.s); enqueueMsg(*buf.s);
} }
@ -347,6 +347,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break; break;
} }
case wopQueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
OutputPathMap outputs = store->queryDerivationOutputMap(path);
logger->stopWork();
writeOutputPathMap(*store, to, outputs);
break;
}
case wopQueryDeriver: { case wopQueryDeriver: {
auto path = store->parseStorePath(readString(from)); auto path = store->parseStorePath(readString(from));
logger->startWork(); logger->startWork();
@ -652,7 +661,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate to << info->ultimate
<< info->sigs << info->sigs
<< info->ca; << renderContentAddress(info->ca);
} }
} else { } else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
@ -710,7 +719,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.references = readStorePaths<StorePathSet>(*store, from); info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
from >> info.ca >> repair >> dontCheckSigs; info.ca = parseContentAddressOpt(readString(from));
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs) if (!trusted && dontCheckSigs)
dontCheckSigs = false; dontCheckSigs = false;
if (!trusted) if (!trusted)

View file

@ -7,11 +7,6 @@
namespace nix { namespace nix {
std::string DerivationOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
}
const StorePath & BasicDerivation::findOutput(const string & id) const const StorePath & BasicDerivation::findOutput(const string & id) const
{ {
auto i = outputs.find(id); auto i = outputs.find(id);
@ -112,7 +107,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
expect(str, ","); const auto hash = parseString(str); expect(str, ","); const auto hash = parseString(str);
expect(str, ")"); expect(str, ")");
std::optional<DerivationOutputHash> fsh; std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") { if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") { if (string(hashAlgo, 0, 2) == "r:") {
@ -120,7 +115,7 @@ static DerivationOutput parseDerivationOutput(const Store & store, istringstream
hashAlgo = string(hashAlgo, 2); hashAlgo = string(hashAlgo, 2);
} }
const HashType hashType = parseHashType(hashAlgo); const HashType hashType = parseHashType(hashAlgo);
fsh = DerivationOutputHash { fsh = FixedOutputHash {
.method = std::move(method), .method = std::move(method),
.hash = Hash(hash, hashType), .hash = Hash(hash, hashType),
}; };
@ -380,17 +375,17 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{ {
auto path = store.parseStorePath(readString(in)); auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in); auto hashAlgo = readString(in);
const auto hash = readString(in); auto hash = readString(in);
std::optional<DerivationOutputHash> fsh; std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") { if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") { if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2); hashAlgo = string(hashAlgo, 2);
} }
const HashType hashType = parseHashType(hashAlgo); auto hashType = parseHashType(hashAlgo);
fsh = DerivationOutputHash { fsh = FixedOutputHash {
.method = std::move(method), .method = std::move(method),
.hash = Hash(hash, hashType), .hash = Hash(hash, hashType),
}; };
@ -439,11 +434,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputs) for (auto & i : drv.outputs) {
out << i.first out << i.first
<< store.printStorePath(i.second.path) << store.printStorePath(i.second.path);
<< i.second.hash->printMethodAlgo() if (i.second.hash) {
out << i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false); << i.second.hash->hash.to_string(Base16, false);
} else {
out << "" << "";
}
}
writeStorePaths(store, out, drv.inputSrcs); writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;
out << drv.env.size(); out << drv.env.size();

View file

@ -3,6 +3,7 @@
#include "path.hh" #include "path.hh"
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh"
#include <map> #include <map>
@ -12,18 +13,10 @@ namespace nix {
/* Abstract syntax of derivations. */ /* Abstract syntax of derivations. */
/// Pair of a hash, and how the file system was ingested
struct DerivationOutputHash {
FileIngestionMethod method;
Hash hash;
std::string printMethodAlgo() const;
};
struct DerivationOutput struct DerivationOutput
{ {
StorePath path; StorePath path;
std::optional<DerivationOutputHash> hash; /* hash used for expected hash computation */ std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
}; };
typedef std::map<string, DerivationOutput> DerivationOutputs; typedef std::map<string, DerivationOutput> DerivationOutputs;

View file

@ -35,7 +35,7 @@ Settings::Settings()
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR))) , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR))) , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR)) , nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH)) , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
{ {
buildUsersGroup = getuid() == 0 ? "nixbld" : ""; buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1"; lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";

View file

@ -196,10 +196,6 @@ public:
/* Whether to lock the Nix client and worker to the same CPU. */ /* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU; bool lockCPU;
/* Whether to show a stack trace if Nix evaluation fails. */
Setting<bool> showTrace{this, false, "show-trace",
"Whether to show a stack trace on evaluation errors."};
Setting<SandboxMode> sandboxMode{this, Setting<SandboxMode> sandboxMode{this,
#if __linux__ #if __linux__
smEnabled smEnabled

View file

@ -114,7 +114,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from); auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s); info->narHash = s.empty() ? Hash() : Hash(s);
conn->from >> info->ca; info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
} }
@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
<< info.narSize << info.narSize
<< info.ultimate << info.ultimate
<< info.sigs << info.sigs
<< info.ca; << renderContentAddress(info.ca);
try { try {
copyNAR(source, conn->to); copyNAR(source, conn->to);
} catch (...) { } catch (...) {

View file

@ -580,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
uint64_t LocalStore::addValidPath(State & state, uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs) const ValidPathInfo & info, bool checkOutputs)
{ {
if (info.ca != "" && !info.isContentAddressed(*this)) if (info.ca.has_value() && !info.isContentAddressed(*this))
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
printStorePath(info.path)); printStorePath(info.path));
@ -592,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty()) (renderContentAddress(info.ca), (bool) info.ca)
.exec(); .exec();
uint64_t id = sqlite3_last_insert_rowid(state.db); uint64_t id = sqlite3_last_insert_rowid(state.db);
@ -666,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
if (s) info->sigs = tokenizeString<StringSet>(s, " "); if (s) info->sigs = tokenizeString<StringSet>(s, " ");
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7); s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
if (s) info->ca = s; if (s) info->ca = parseContentAddressOpt(s);
/* Get the references. */ /* Get the references. */
auto useQueryReferences(state->stmtQueryReferences.use()(info->id)); auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
@ -689,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
(info.narHash.to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty()) (renderContentAddress(info.ca), (bool) info.ca)
(printStorePath(info.path)) (printStorePath(info.path))
.exec(); .exec();
} }
@ -774,17 +774,20 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
} }
StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path) OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
{ {
return retrySQLite<StorePathSet>([&]() { return retrySQLite<OutputPathMap>([&]() {
auto state(_state.lock()); auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path))); (queryValidPathId(*state, path)));
StorePathSet outputs; OutputPathMap outputs;
while (useQueryDerivationOutputs.next()) while (useQueryDerivationOutputs.next())
outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1))); outputs.emplace(
useQueryDerivationOutputs.getStr(0),
parseStorePath(useQueryDerivationOutputs.getStr(1))
);
return outputs; return outputs;
}); });
@ -985,15 +988,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath); deletePath(realPath);
if (info.ca != "" && // text hashing has long been allowed to have non-self-references because it is used for drv files.
!((hasPrefix(info.ca, "text:") && !info.references.count(info.path)) bool refersToSelf = info.references.count(info.path) > 0;
|| info.references.empty())) if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references"); settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash /* While restoring the path from the NAR, compute the hash
of the NAR. */ of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info.ca == "" || !info.references.count(info.path)) if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256); hashSink = std::make_unique<HashSink>(htSHA256);
else else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart())); hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
@ -1079,7 +1082,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
ValidPathInfo info(dstPath); ValidPathInfo info(dstPath);
info.narHash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
info.ca = makeFixedOutputCA(method, h); info.ca = FixedOutputHash { .method = method, .hash = h };
registerValidPath(info); registerValidPath(info);
} }
@ -1143,7 +1146,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.references = references; info.references = references;
info.ca = "text:" + hash.to_string(Base32, true); info.ca = TextHash { .hash = hash };
registerValidPath(info); registerValidPath(info);
} }
@ -1254,7 +1257,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "" || !info->references.count(info->path)) if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type); hashSink = std::make_unique<HashSink>(*info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));

View file

@ -130,7 +130,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override; StorePathSet queryValidDerivers(const StorePath & path) override;
StorePathSet queryDerivationOutputs(const StorePath & path) override; OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override; std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;

View file

@ -203,7 +203,7 @@ public:
narInfo->deriver = StorePath(queryNAR.getStr(9)); narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " ")) for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig); narInfo->sigs.insert(sig);
narInfo->ca = queryNAR.getStr(11); narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
return {oValid, narInfo}; return {oValid, narInfo};
}); });
@ -237,7 +237,7 @@ public:
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
(concatStringsSep(" ", info->sigs)) (concatStringsSep(" ", info->sigs))
(info->ca) (renderContentAddress(info->ca))
(time(0)).exec(); (time(0)).exec();
} else { } else {

View file

@ -67,8 +67,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig") else if (name == "Sig")
sigs.insert(value); sigs.insert(value);
else if (name == "CA") { else if (name == "CA") {
if (!ca.empty()) corrupt(); if (ca) corrupt();
ca = value; // FIXME: allow blank ca or require skipping field?
ca = parseContentAddressOpt(value);
} }
pos = eol + 1; pos = eol + 1;
@ -104,8 +105,8 @@ std::string NarInfo::to_string(const Store & store) const
for (auto sig : sigs) for (auto sig : sigs)
res += "Sig: " + sig + "\n"; res += "Sig: " + sig + "\n";
if (!ca.empty()) if (ca)
res += "CA: " + ca + "\n"; res += "CA: " + renderContentAddress(*ca) + "\n";
return res; return res;
} }

View file

@ -1,5 +1,6 @@
#pragma once #pragma once
#include "content-address.hh"
#include "types.hh" #include "types.hh"
namespace nix { namespace nix {
@ -61,15 +62,11 @@ public:
typedef std::set<StorePath> StorePathSet; typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths; typedef std::vector<StorePath> StorePaths;
typedef std::map<string, StorePath> OutputPathMap;
/* Extension of derivations in the Nix store. */ /* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv"; const std::string drvExtension = ".drv";
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct StorePathWithOutputs struct StorePathWithOutputs
{ {
StorePath path; StorePath path;

View file

@ -8,6 +8,7 @@
#include "derivations.hh" #include "derivations.hh"
#include "pool.hh" #include "pool.hh"
#include "finally.hh" #include "finally.hh"
#include "logging.hh"
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
@ -38,6 +39,29 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i); out << store.printStorePath(i);
} }
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
{
std::map<string, StorePath> pathMap;
auto rawInput = readStrings<Strings>(from);
if (rawInput.size() % 2)
throw Error("got an odd number of elements from the daemon when trying to read a output path map");
auto curInput = rawInput.begin();
while (curInput != rawInput.end()) {
auto thisKey = *curInput++;
auto thisValue = *curInput++;
pathMap.emplace(thisKey, store.parseStorePath(thisValue));
}
return pathMap;
}
void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
{
out << 2*pathMap.size();
for (auto & i : pathMap) {
out << i.first;
out << store.printStorePath(i.second);
}
}
/* TODO: Separate these store impls into different files, give them better names */ /* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params) RemoteStore::RemoteStore(const Params & params)
@ -197,7 +221,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.maxSilentTime.name); overrides.erase(settings.maxSilentTime.name);
overrides.erase(settings.buildCores.name); overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name); overrides.erase(settings.useSubstitutes.name);
overrides.erase(settings.showTrace.name); overrides.erase(loggerSettings.showTrace.name);
conn.to << overrides.size(); conn.to << overrides.size();
for (auto & i : overrides) for (auto & i : overrides)
conn.to << i.first << i.second.value; conn.to << i.first << i.second.value;
@ -381,7 +405,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate; conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
conn->from >> info->ca; info->ca = parseContentAddressOpt(readString(conn->from));
} }
} }
callback(std::move(info)); callback(std::move(info));
@ -412,12 +436,24 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
{ {
auto conn(getConnection()); auto conn(getConnection());
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0x16) {
return Store::queryDerivationOutputs(path);
}
conn->to << wopQueryDerivationOutputs << printStorePath(path); conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return readStorePaths<StorePathSet>(*this, conn->from);
} }
OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
return readOutputPathMap(*this, conn->from);
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart) std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{ {
auto conn(getConnection()); auto conn(getConnection());
@ -465,7 +501,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << info.ca << info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs; << repair << !checkSigs;
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21; bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
if (!tunnel) copyNAR(source, conn->to); if (!tunnel) copyNAR(source, conn->to);

View file

@ -51,6 +51,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override; StorePathSet queryDerivationOutputs(const StorePath & path) override;
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override; std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;

View file

@ -242,6 +242,16 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl; return std::chrono::steady_clock::now() < time_point + ttl;
} }
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
{
auto outputMap = this->queryDerivationOutputMap(path);
StorePathSet outputPaths;
for (auto & i: outputMap) {
outputPaths.emplace(std::move(i.second));
}
return outputPaths;
}
bool Store::isValidPath(const StorePath & storePath) bool Store::isValidPath(const StorePath & storePath)
{ {
std::string hashPart(storePath.hashPart()); std::string hashPart(storePath.hashPart());
@ -471,8 +481,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
jsonRefs.elem(printStorePath(ref)); jsonRefs.elem(printStorePath(ref));
} }
if (info->ca != "") if (info->ca)
jsonPath.attr("ca", info->ca); jsonPath.attr("ca", renderContentAddress(info->ca));
std::pair<uint64_t, uint64_t> closureSizes; std::pair<uint64_t, uint64_t> closureSizes;
@ -757,41 +767,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
sigs.insert(secretKey.signDetached(fingerprint(store))); sigs.insert(secretKey.signDetached(fingerprint(store)));
} }
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
bool ValidPathInfo::isContentAddressed(const Store & store) const bool ValidPathInfo::isContentAddressed(const Store & store) const
{ {
auto warn = [&]() { if (! ca) return false;
logWarning(
ErrorInfo{
.name = "Path not content-addressed",
.hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
});
};
if (hasPrefix(ca, "text:")) { auto caPath = std::visit(overloaded {
Hash hash(ca.substr(5)); [&](TextHash th) {
if (store.makeTextPath(path.name(), hash, references) == path) return store.makeTextPath(path.name(), th.hash, references);
return true; },
else [&](FixedOutputHash fsh) {
warn();
}
else if (hasPrefix(ca, "fixed:")) {
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = references; auto refs = references;
bool hasSelfReference = false; bool hasSelfReference = false;
if (refs.count(path)) { if (refs.count(path)) {
hasSelfReference = true; hasSelfReference = true;
refs.erase(path); refs.erase(path);
} }
if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path) return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
return true;
else
warn();
} }
}, *ca);
return false; bool res = caPath == path;
if (!res)
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
return res;
} }
@ -822,25 +826,6 @@ Strings ValidPathInfo::shortRefs() const
} }
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
switch (m) {
case FileIngestionMethod::Flat:
return "";
case FileIngestionMethod::Recursive:
return "r:";
default:
throw Error("impossible, caught both cases");
}
}
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
{
return "fixed:"
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base32, true);
}
Derivation Store::derivationFromPath(const StorePath & drvPath) Derivation Store::derivationFromPath(const StorePath & drvPath)
{ {
ensurePath(drvPath); ensurePath(drvPath);

View file

@ -2,6 +2,7 @@
#include "path.hh" #include "path.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh"
#include "serialise.hh" #include "serialise.hh"
#include "crypto.hh" #include "crypto.hh"
#include "lru-cache.hh" #include "lru-cache.hh"
@ -18,6 +19,7 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include <chrono> #include <chrono>
#include <variant>
namespace nix { namespace nix {
@ -109,7 +111,6 @@ struct SubstitutablePathInfo
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos; typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo struct ValidPathInfo
{ {
StorePath path; StorePath path;
@ -138,21 +139,11 @@ struct ValidPathInfo
that a particular output path was produced by a derivation; the that a particular output path was produced by a derivation; the
path then implies the contents.) path then implies the contents.)
Ideally, the content-addressability assertion would just be a Ideally, the content-addressability assertion would just be a Boolean,
Boolean, and the store path would be computed from and the store path would be computed from the name component, narHash
the name component, narHash and references. However, and references. However, we support many types of content addresses.
1) we've accumulated several types of content-addressed paths
over the years; and 2) fixed-output derivations support
multiple hash algorithms and serialisation methods (flat file
vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
*/ */
std::string ca; std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const bool operator == (const ValidPathInfo & i) const
{ {
@ -427,8 +418,11 @@ public:
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; }; virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
/* Query the outputs of the derivation denoted by `path'. */ /* Query the outputs of the derivation denoted by `path'. */
virtual StorePathSet queryDerivationOutputs(const StorePath & path) virtual StorePathSet queryDerivationOutputs(const StorePath & path);
{ unsupported("queryDerivationOutputs"); }
/* Query the mapping outputName=>outputPath for the given derivation */
virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
{ unsupported("queryDerivationOutputMap"); }
/* Query the full store path given the hash part of a valid store /* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */ path, or empty if the path doesn't exist. */
@ -837,15 +831,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
std::istream & str, std::istream & str,
bool hashGiven = false); bool hashGiven = false);
/* Compute the prefix to the hash algorithm which indicates how the files were
ingested. */
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
/* Split URI into protocol+hierarchy part and its parameter set. */ /* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri); std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x115 #define PROTOCOL_VERSION 0x116
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -30,7 +30,7 @@ typedef enum {
wopSetOptions = 19, wopSetOptions = 19,
wopCollectGarbage = 20, wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21, wopQuerySubstitutablePathInfo = 21,
wopQueryDerivationOutputs = 22, wopQueryDerivationOutputs = 22, // obsolete
wopQueryAllValidPaths = 23, wopQueryAllValidPaths = 23,
wopQueryFailedPaths = 24, wopQueryFailedPaths = 24,
wopClearFailedPaths = 25, wopClearFailedPaths = 25,
@ -49,6 +49,7 @@ typedef enum {
wopNarFromPath = 38, wopNarFromPath = 38,
wopAddToStoreNar = 39, wopAddToStoreNar = 39,
wopQueryMissing = 40, wopQueryMissing = 40,
wopQueryDerivationOutputMap = 41,
} WorkerOp; } WorkerOp;
@ -69,5 +70,6 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths); void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
} }

View file

@ -262,7 +262,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
names[name] = 0; names[name] = 0;
} }
} else if (s == "node") { } else if (s == "node") {
if (s.empty()) throw badArchive("entry name missing"); if (name.empty()) throw badArchive("entry name missing");
parse(sink, source, path + "/" + name); parse(sink, source, path + "/" + name);
} else } else
throw badArchive("unknown field " + s); throw badArchive("unknown field " + s);

View file

@ -7,14 +7,11 @@
namespace nix { namespace nix {
const std::string nativeSystem = SYSTEM; const std::string nativeSystem = SYSTEM;
// addPrefix is used for show-trace. Strings added with addPrefix BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
// will print ahead of the error itself.
BaseError & BaseError::addPrefix(const FormatOrString & fs)
{ {
prefix_ = fs.s + prefix_; err.traces.push_front(Trace { .pos = e, .hint = hint});
return *this; return *this;
} }
@ -28,7 +25,7 @@ const string& BaseError::calcWhat() const
err.name = sname(); err.name = sname();
std::ostringstream oss; std::ostringstream oss;
oss << err; showErrorInfo(oss, err, false);
what_ = oss.str(); what_ = oss.str();
return *what_; return *what_;
@ -56,28 +53,114 @@ string showErrPos(const ErrPos &errPos)
} }
} }
// if nixCode contains lines of code, print them to the ostream, indicating the error column. std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixCode) {
if (errPos.line <= 0)
return std::nullopt;
if (errPos.origin == foFile) {
LinesOfCode loc;
try {
AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
if (!fd) {
logError(SysError("opening file '%1%'", errPos.file).info());
return std::nullopt;
}
else
{
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
do
{
line = readLine(fd.get());
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
} while (true);
return loc;
}
}
catch (EndOfFile &eof) {
if (loc.errLineOfCode.has_value())
return loc;
else
return std::nullopt;
}
catch (std::exception &e) {
printError("error reading nix file: %s\n%s", errPos.file, e.what());
return std::nullopt;
}
} else {
std::istringstream iss(errPos.file);
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
LinesOfCode loc;
do
{
std::getline(iss, line);
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
if (!iss.good())
break;
} while (true);
return loc;
}
}
// print lines of code to the ostream, indicating the error column.
void printCodeLines(std::ostream &out,
const string &prefix,
const ErrPos &errPos,
const LinesOfCode &loc)
{ {
// previous line of code. // previous line of code.
if (nixCode.prevLineOfCode.has_value()) { if (loc.prevLineOfCode.has_value()) {
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line - 1), (errPos.line - 1),
*nixCode.prevLineOfCode); *loc.prevLineOfCode);
} }
if (nixCode.errLineOfCode.has_value()) { if (loc.errLineOfCode.has_value()) {
// line of code containing the error. // line of code containing the error.
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line), (errPos.line),
*nixCode.errLineOfCode); *loc.errLineOfCode);
// error arrows for the column range. // error arrows for the column range.
if (nixCode.errPos.column > 0) { if (errPos.column > 0) {
int start = nixCode.errPos.column; int start = errPos.column;
std::string spaces; std::string spaces;
for (int i = 0; i < start; ++i) { for (int i = 0; i < start; ++i) {
spaces.append(" "); spaces.append(" ");
@ -94,16 +177,42 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
} }
// next line of code. // next line of code.
if (nixCode.nextLineOfCode.has_value()) { if (loc.nextLineOfCode.has_value()) {
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line + 1), (errPos.line + 1),
*nixCode.nextLineOfCode); *loc.nextLineOfCode);
} }
} }
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo) void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
{
if (pos)
{
switch (pos.origin) {
case foFile: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " in file: " << ANSI_NORMAL << pos.file;
break;
}
case foString: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from string" << ANSI_NORMAL;
break;
}
case foStdin: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from stdin" << ANSI_NORMAL;
break;
}
default:
throw Error("invalid FileOrigin in errPos");
}
}
}
std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace)
{ {
auto errwidth = std::max<size_t>(getWindowSize().second, 20); auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = ""; string prefix = "";
@ -158,8 +267,12 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
} }
} }
auto ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length(); auto ndl = prefix.length()
auto dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl; + filterANSIEscapes(levelString, true).length()
+ 7
+ einfo.name.length()
+ einfo.programName.value_or("").length();
auto dashwidth = std::max<int>(errwidth - ndl, 3);
std::string dashes(dashwidth, '-'); std::string dashes(dashwidth, '-');
@ -179,16 +292,9 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
einfo.programName.value_or("")); einfo.programName.value_or(""));
bool nl = false; // intersperse newline between sections. bool nl = false; // intersperse newline between sections.
if (einfo.nixCode.has_value()) { if (einfo.errPos.has_value() && (*einfo.errPos)) {
if (einfo.nixCode->errPos.file != "") { out << prefix << std::endl;
// filename, line, column. printAtPos(prefix, *einfo.errPos, out);
out << std::endl << fmt("%1%in file: " ANSI_BLUE "%2% %3%" ANSI_NORMAL,
prefix,
einfo.nixCode->errPos.file,
showErrPos(einfo.nixCode->errPos));
} else {
out << std::endl << fmt("%1%from command line argument", prefix);
}
nl = true; nl = true;
} }
@ -200,13 +306,17 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true; nl = true;
} }
if (einfo.errPos.has_value() && (*einfo.errPos)) {
auto loc = getCodeLines(*einfo.errPos);
// lines of code. // lines of code.
if (einfo.nixCode.has_value() && einfo.nixCode->errLineOfCode.has_value()) { if (loc.has_value()) {
if (nl) if (nl)
out << std::endl << prefix; out << std::endl << prefix;
printCodeLines(out, prefix, *einfo.nixCode); printCodeLines(out, prefix, *einfo.errPos, *loc);
nl = true; nl = true;
} }
}
// hint // hint
if (einfo.hint.has_value()) { if (einfo.hint.has_value()) {
@ -216,6 +326,59 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true; nl = true;
} }
// traces
if (showTrace && !einfo.traces.empty())
{
const string tracetitle(" show-trace ");
int fill = errwidth - tracetitle.length();
int lw = 0;
int rw = 0;
const int min_dashes = 3;
if (fill > min_dashes * 2) {
if (fill % 2 != 0) {
lw = fill / 2;
rw = lw + 1;
}
else
{
lw = rw = fill / 2;
}
}
else
lw = rw = min_dashes;
if (nl)
out << std::endl << prefix;
out << ANSI_BLUE << std::string(lw, '-') << tracetitle << std::string(rw, '-') << ANSI_NORMAL;
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
{
try {
out << std::endl << prefix;
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
nl = true;
if (*iter->pos) {
auto pos = iter->pos.value();
out << std::endl << prefix;
printAtPos(prefix, pos, out);
auto loc = getCodeLines(pos);
if (loc.has_value())
{
out << std::endl << prefix;
printCodeLines(out, prefix, pos, *loc);
out << std::endl << prefix;
}
}
} catch(const std::bad_optional_access& e) {
out << iter->hint.str() << std::endl;
}
}
}
return out; return out;
} }
} }

View file

@ -50,11 +50,25 @@ typedef enum {
lvlVomit lvlVomit
} Verbosity; } Verbosity;
typedef enum {
foFile,
foStdin,
foString
} FileOrigin;
// the lines of code surrounding an error.
struct LinesOfCode {
std::optional<string> prevLineOfCode;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
};
// ErrPos indicates the location of an error in a nix file. // ErrPos indicates the location of an error in a nix file.
struct ErrPos { struct ErrPos {
int line = 0; int line = 0;
int column = 0; int column = 0;
string file; string file;
FileOrigin origin;
operator bool() const operator bool() const
{ {
@ -65,9 +79,14 @@ struct ErrPos {
template <class P> template <class P>
ErrPos& operator=(const P &pos) ErrPos& operator=(const P &pos)
{ {
origin = pos.origin;
line = pos.line; line = pos.line;
column = pos.column; column = pos.column;
// is file symbol null?
if (pos.file.set())
file = pos.file; file = pos.file;
else
file = "";
return *this; return *this;
} }
@ -78,11 +97,9 @@ struct ErrPos {
} }
}; };
struct NixCode { struct Trace {
ErrPos errPos; std::optional<ErrPos> pos;
std::optional<string> prevLineOfCode; hintformat hint;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
}; };
struct ErrorInfo { struct ErrorInfo {
@ -90,19 +107,19 @@ struct ErrorInfo {
string name; string name;
string description; string description;
std::optional<hintformat> hint; std::optional<hintformat> hint;
std::optional<NixCode> nixCode; std::optional<ErrPos> errPos;
std::list<Trace> traces;
static std::optional<string> programName; static std::optional<string> programName;
}; };
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo); std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace);
/* BaseError should generally not be caught, as it has Interrupted as /* BaseError should generally not be caught, as it has Interrupted as
a subclass. Catch Error instead. */ a subclass. Catch Error instead. */
class BaseError : public std::exception class BaseError : public std::exception
{ {
protected: protected:
string prefix_; // used for location traces etc.
mutable ErrorInfo err; mutable ErrorInfo err;
mutable std::optional<string> what_; mutable std::optional<string> what_;
@ -150,10 +167,17 @@ public:
#endif #endif
const string & msg() const { return calcWhat(); } const string & msg() const { return calcWhat(); }
const string & prefix() const { return prefix_; }
BaseError & addPrefix(const FormatOrString & fs);
const ErrorInfo & info() { calcWhat(); return err; } const ErrorInfo & info() { calcWhat(); return err; }
template<typename... Args>
BaseError & addTrace(std::optional<ErrPos> e, const string &fs, const Args & ... args)
{
return addTrace(e, hintfmt(fs, args...));
}
BaseError & addTrace(std::optional<ErrPos> e, hintformat hint);
bool hasTrace() const { return !err.traces.empty(); }
}; };
#define MakeError(newClass, superClass) \ #define MakeError(newClass, superClass) \

View file

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <boost/format.hpp> #include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string> #include <string>
#include "ansicolor.hh" #include "ansicolor.hh"
@ -103,7 +104,9 @@ class hintformat
public: public:
hintformat(const string &format) :fmt(format) hintformat(const string &format) :fmt(format)
{ {
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
} }
hintformat(const hintformat &hf) hintformat(const hintformat &hf)
@ -117,6 +120,13 @@ public:
return *this; return *this;
} }
template<class T>
hintformat& operator%(const normaltxt<T> &value)
{
fmt % value.value;
return *this;
}
std::string str() const std::string str() const
{ {
return fmt.str(); return fmt.str();
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f; return f;
} }
inline hintformat hintfmt(std::string plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
}
} }

View file

@ -19,7 +19,7 @@ namespace nix {
void Hash::init() void Hash::init()
{ {
if (!type) abort(); assert(type);
switch (*type) { switch (*type) {
case htMD5: hashSize = md5HashSize; break; case htMD5: hashSize = md5HashSize; break;
case htSHA1: hashSize = sha1HashSize; break; case htSHA1: hashSize = sha1HashSize; break;
@ -101,15 +101,15 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash) string printHash16or32(const Hash & hash)
{ {
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
} }
HashType assertInitHashType(const Hash & h) { HashType assertInitHashType(const Hash & h)
if (h.type) {
assert(h.type);
return *h.type; return *h.type;
else
abort();
} }
std::string Hash::to_string(Base base, bool includeType) const std::string Hash::to_string(Base base, bool includeType) const
@ -223,7 +223,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
if (!ht) if (!ht)
throw BadHash("empty hash requires explicit hash type"); throw BadHash("empty hash requires explicit hash type");
Hash h(*ht); Hash h(*ht);
warn("found empty hash, assuming '%s'", h.to_string(Base::SRI, true)); warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h; return h;
} else } else
return Hash(hashStr, ht); return Hash(hashStr, ht);
@ -363,14 +363,15 @@ HashType parseHashType(const string & s)
string printHashType(HashType ht) string printHashType(HashType ht)
{ {
switch (ht) { switch (ht) {
case htMD5: return "md5"; break; case htMD5: return "md5";
case htSHA1: return "sha1"; break; case htSHA1: return "sha1";
case htSHA256: return "sha256"; break; case htSHA256: return "sha256";
case htSHA512: return "sha512"; break; case htSHA512: return "sha512";
} default:
// illegal hash type enum value internally, as opposed to external input // illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message. // which should be validated with nice error message.
abort(); assert(false);
}
} }
} }

View file

@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error); MakeError(BadHash, Error);
enum HashType : char { htMD5, htSHA1, htSHA256, htSHA512 }; enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16; const int md5HashSize = 16;

View file

@ -1,5 +1,6 @@
#include "logging.hh" #include "logging.hh"
#include "util.hh" #include "util.hh"
#include "config.hh"
#include <atomic> #include <atomic>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -7,6 +8,10 @@
namespace nix { namespace nix {
LoggerSettings loggerSettings;
static GlobalConfig::Register r1(&loggerSettings);
static thread_local ActivityId curActivity = 0; static thread_local ActivityId curActivity = 0;
ActivityId getCurActivity() ActivityId getCurActivity()
@ -72,11 +77,12 @@ public:
void logEI(const ErrorInfo & ei) override void logEI(const ErrorInfo & ei) override
{ {
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(ei.level, oss.str()); log(ei.level, oss.str());
} }
void startActivity(ActivityId act, Verbosity lvl, ActivityType type, void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
const std::string & s, const Fields & fields, ActivityId parent) const std::string & s, const Fields & fields, ActivityId parent)
override override
@ -173,7 +179,7 @@ struct JSONLogger : Logger {
void logEI(const ErrorInfo & ei) override void logEI(const ErrorInfo & ei) override
{ {
std::ostringstream oss; std::ostringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
nlohmann::json json; nlohmann::json json;
json["action"] = "msg"; json["action"] = "msg";

View file

@ -2,6 +2,7 @@
#include "types.hh" #include "types.hh"
#include "error.hh" #include "error.hh"
#include "config.hh"
namespace nix { namespace nix {
@ -34,6 +35,16 @@ typedef enum {
typedef uint64_t ActivityId; typedef uint64_t ActivityId;
struct LoggerSettings : Config
{
Setting<bool> showTrace{this,
false,
"show-trace",
"Whether to show a stack trace on evaluation errors."};
};
extern LoggerSettings loggerSettings;
class Logger class Logger
{ {
friend struct Activity; friend struct Activity;

View file

@ -1,6 +1,7 @@
#include "logging.hh" #include "logging.hh"
#include "nixexpr.hh" #include "nixexpr.hh"
#include "util.hh" #include "util.hh"
#include <fstream>
#include <gtest/gtest.h> #include <gtest/gtest.h>
@ -10,6 +11,13 @@ namespace nix {
* logEI * logEI
* --------------------------------------------------------------------------*/ * --------------------------------------------------------------------------*/
const char *test_file =
"previous line of code\n"
"this is the problem line of code\n"
"next line of code\n";
const char *one_liner =
"this is the other problem line of code";
TEST(logEI, catpuresBasicProperties) { TEST(logEI, catpuresBasicProperties) {
MakeError(TestError, Error); MakeError(TestError, Error);
@ -42,7 +50,7 @@ namespace nix {
logger->logEI(ei); logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
} }
} }
@ -60,8 +68,7 @@ namespace nix {
logError(e.info()); logError(e.info());
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
} }
} }
@ -137,7 +144,6 @@ namespace nix {
* logError * logError
* --------------------------------------------------------------------------*/ * --------------------------------------------------------------------------*/
TEST(logError, logErrorWithoutHintOrCode) { TEST(logError, logErrorWithoutHintOrCode) {
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -152,7 +158,7 @@ namespace nix {
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) { TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -162,21 +168,16 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foString, problem_file, 02, 13),
.errPos = Pos(problem_file, 40, 13), });
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
} }
TEST(logError, logErrorWithoutLinesOfCode) { TEST(logError, logErrorWithInvalidFile) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create("invalid filename");
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logError({ logError({
@ -185,28 +186,23 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foFile, problem_file, 02, 13)
.errPos = Pos(problem_file, 40, 13) });
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1minvalid filename\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
} }
TEST(logError, logErrorWithOnlyHintAndName) { TEST(logError, logErrorWithOnlyHintAndName) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logError({ logError({
.name = "error name", .name = "error name",
.hint = hintfmt("hint %1%", "only"), .hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode { });
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
} }
@ -219,18 +215,18 @@ namespace nix {
logWarning({ logWarning({
.name = "name", .name = "name",
.description = "error description", .description = "warning description",
.hint = hintfmt("there was a %1%", "warning"), .hint = hintfmt("there was a %1%", "warning"),
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
} }
TEST(logWarning, logWarningWithFileLineNumAndCode) { TEST(logWarning, logWarningWithFileLineNumAndCode) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -240,16 +236,123 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foStdin, problem_file, 2, 13),
.errPos = Pos(problem_file, 40, 13), });
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
/* ----------------------------------------------------------------------------
* traces
* --------------------------------------------------------------------------*/
TEST(addTrace, showTracesWithShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(true);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n");
}
TEST(addTrace, hideTracesWithoutShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(false);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
}
/* ----------------------------------------------------------------------------
* hintfmt
* --------------------------------------------------------------------------*/
TEST(hintfmt, percentStringWithoutArgs) {
const char *teststr = "this is 100%s correct!";
ASSERT_STREQ(
hintfmt(teststr).str().c_str(),
teststr);
}
TEST(hintfmt, fmtToHintfmt) {
ASSERT_STREQ(
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
"the color of this this text is not yellow");
}
TEST(hintfmt, tooFewArguments) {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
}
TEST(hintfmt, tooManyArguments) {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
/* ----------------------------------------------------------------------------
* ErrPos
* --------------------------------------------------------------------------*/
TEST(errpos, invalidPos) {
// contains an invalid symbol, which we should not dereference!
Pos invalid;
// constructing without access violation.
ErrPos ep(invalid);
// assignment without access violation.
ep = invalid;
} }
} }

View file

@ -593,7 +593,7 @@ static void upgradeDerivations(Globals & globals,
} else newElems.push_back(i); } else newElems.push_back(i);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while trying to find an upgrade for '%s':\n", i.queryName())); e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
throw; throw;
} }
} }
@ -1185,7 +1185,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} catch (AssertionError & e) { } catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName()); printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName())); e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
throw; throw;
} }
} }

View file

@ -864,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << info->ca << info->sigs; out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
} catch (InvalidPath &) { } catch (InvalidPath &) {
} }
} }
@ -952,7 +952,7 @@ static void opServe(Strings opFlags, Strings opArgs)
info.references = readStorePaths<StorePathSet>(*store, in); info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate; in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in); info.sigs = readStrings<StringSet>(in);
in >> info.ca; info.ca = parseContentAddressOpt(readString(in));
if (info.narSize == 0) if (info.narSize == 0)
throw Error("narInfo is too old and missing the narSize field"); throw Error("narInfo is too old and missing the narSize field");

View file

@ -48,7 +48,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart)); ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash); info.ca = std::optional { FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
} };
if (!dryRun) { if (!dryRun) {
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };

View file

@ -50,7 +50,7 @@ BuildEnvironment readEnvironment(const Path & path)
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re"; R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
static std::string indexedArrayRegex = static std::string indexedArrayRegex =
R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re"; R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
static std::regex varRegex( static std::regex varRegex(
"^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n"); "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
@ -135,13 +135,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.inputSrcs.insert(std::move(getEnvShPath)); drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true); Hash h = hashDerivationModulo(*store, drv, true);
auto shellOutPath = store->makeOutputPath("out", h, drvName); auto shellOutPath = store->makeOutputPath("out", h, drvName);
drv.outputs.insert_or_assign("out", DerivationOutput { drv.outputs.insert_or_assign("out", DerivationOutput { .path = shellOutPath });
.path = shellOutPath,
.hash = DerivationOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash { },
},
});
drv.env["out"] = store->printStorePath(shellOutPath); drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName); auto shellDrvPath2 = writeDerivation(store, drv, drvName);

View file

@ -1,5 +1,6 @@
#include "command.hh" #include "command.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh"
#include "legacy.hh" #include "legacy.hh"
#include "shared.hh" #include "shared.hh"
#include "references.hh" #include "references.hh"

View file

@ -82,7 +82,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
if (hasSelfReference) info.references.insert(info.path); if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash); info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
};
if (!json) if (!json)
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path)); printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));

View file

@ -115,7 +115,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << '\t'; std::cout << '\t';
Strings ss; Strings ss;
if (info->ultimate) ss.push_back("ultimate"); if (info->ultimate) ss.push_back("ultimate");
if (info->ca != "") ss.push_back("ca:" + info->ca); if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca));
for (auto & sig : info->sigs) ss.push_back(sig); for (auto & sig : info->sigs) ss.push_back(sig);
std::cout << concatStringsSep(" ", ss); std::cout << concatStringsSep(" ", ss);
} }

View file

@ -211,12 +211,12 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
// input without clearing the input so far. // input without clearing the input so far.
continue; continue;
} else { } else {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} }
} catch (Error & e) { } catch (Error & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} catch (Interrupted & e) { } catch (Interrupted & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} }
// We handled the current input fully, so we should clear it // We handled the current input fully, so we should clear it

View file

@ -216,7 +216,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
} catch (AssertionError & e) { } catch (AssertionError & e) {
} catch (Error & e) { } catch (Error & e) {
if (!toplevel) { if (!toplevel) {
e.addPrefix(fmt("While evaluating the attribute '%s':\n", attrPath)); e.addTrace(std::nullopt, "While evaluating the attribute '%s'", attrPath);
throw; throw;
} }
} }

View file

@ -87,7 +87,7 @@ struct CmdVerify : StorePathsCommand
if (!noContents) { if (!noContents) {
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "") if (!info->ca)
hashSink = std::make_unique<HashSink>(*info->narHash.type); hashSink = std::make_unique<HashSink>(*info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));

View file

@ -1,23 +1,39 @@
{ busybox }:
with import ./config.nix; with import ./config.nix;
let let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation { input1 = mkDerivation {
name = "build-hook-input-1"; shell = busybox;
buildCommand = "mkdir $out; echo FOO > $out/foo"; name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"]; requiredSystemFeatures = ["foo"];
}; };
input2 = mkDerivation { input2 = mkDerivation {
name = "build-hook-input-2"; shell = busybox;
buildCommand = "mkdir $out; echo BAR > $out/bar"; name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
}; };
in in
mkDerivation { mkDerivation {
name = "build-hook"; shell = busybox;
builder = ./dependencies.builder0.sh; name = "build-remote";
input1 = " " + input1 + "/."; buildCommand =
input2 = " ${input2}/."; ''
read x < ${input1}
read y < ${input2}
echo $x$y > $out
'';
} }

View file

@ -3,22 +3,29 @@ source common.sh
clearStore clearStore
if ! canUseSandbox; then exit; fi if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/store1 || true chmod -R u+w $TEST_ROOT/machine1 || true
rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ unset NIX_STORE_DIR
--sandbox-paths /nix/store --sandbox-build-dir /build-tmp \ unset NIX_STATE_DIR
--builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo --system-features foo
outPath=$TEST_ROOT/result outPath=$(readlink -f $TEST_ROOT/result)
cat $outPath/foobar | grep FOOBAR cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
# Ensure that input1 was built on store1 due to the required feature. # Ensure that input1 was built on store2 due to the required feature.
p=$(readlink -f $outPath/input-2) (! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh) nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh

View file

@ -1,6 +1,6 @@
set -e set -e
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test) export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default}
export NIX_STORE_DIR export NIX_STORE_DIR
if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then
# Maybe the build directory is symlinked. # Maybe the build directory is symlinked.
@ -11,6 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket
unset NIX_USER_CONF_FILES unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then if [[ -n $NIX_STORE ]]; then
@ -35,6 +36,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@" export SHELL="@bash@"
export PAGER=cat export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@" export HAVE_SODIUM="@HAVE_SODIUM@"
export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@ export version=@PACKAGE_VERSION@
export system=@system@ export system=@system@
@ -75,7 +77,7 @@ startDaemon() {
rm -f $NIX_STATE_DIR/daemon-socket/socket rm -f $NIX_STATE_DIR/daemon-socket/socket
nix-daemon & nix-daemon &
for ((i = 0; i < 30; i++)); do for ((i = 0; i < 30; i++)); do
if [ -e $NIX_STATE_DIR/daemon-socket/socket ]; then break; fi if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
sleep 1 sleep 1
done done
pidDaemon=$! pidDaemon=$!

View file

@ -13,24 +13,32 @@ fake_free=$TEST_ROOT/fake-free
export _NIX_TEST_FREE_SPACE_FILE=$fake_free export _NIX_TEST_FREE_SPACE_FILE=$fake_free
echo 1100 > $fake_free echo 1100 > $fake_free
fifoLock=$TEST_ROOT/fifoLock
mkfifo "$fifoLock"
expr=$(cat <<EOF expr=$(cat <<EOF
with import ./config.nix; mkDerivation { with import ./config.nix; mkDerivation {
name = "gc-A"; name = "gc-A";
buildCommand = '' buildCommand = ''
set -x set -x
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]] [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]]
mkdir \$out mkdir \$out
echo foo > \$out/bar echo foo > \$out/bar
echo 1...
sleep 2 # Pretend that we run out of space
echo 200 > ${fake_free}.tmp1 echo 100 > ${fake_free}.tmp1
mv ${fake_free}.tmp1 $fake_free mv ${fake_free}.tmp1 $fake_free
echo 2...
sleep 2 # Wait for the GC to run
echo 3... for i in {1..20}; do
sleep 2 echo ''\${i}...
echo 4... if [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]; then
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]] exit 0
fi
sleep 1
done
exit 1
''; '';
} }
EOF EOF
@ -43,15 +51,9 @@ with import ./config.nix; mkDerivation {
set -x set -x
mkdir \$out mkdir \$out
echo foo > \$out/bar echo foo > \$out/bar
echo 1...
sleep 2 # Wait for the first build to finish
echo 200 > ${fake_free}.tmp2 cat "$fifoLock"
mv ${fake_free}.tmp2 $fake_free
echo 2...
sleep 2
echo 3...
sleep 2
echo 4...
''; '';
} }
EOF EOF
@ -59,12 +61,19 @@ EOF
nix build -v -o $TEST_ROOT/result-A -L "($expr)" \ nix build -v -o $TEST_ROOT/result-A -L "($expr)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 & --min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid=$! pid1=$!
nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \ nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 --min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid2=$!
wait "$pid" # Once the first build is done, unblock the second one.
# If the first build fails, we need to postpone the failure to still allow
# the second one to finish
wait "$pid1" || FIRSTBUILDSTATUS=$?
echo "unlock" > $fifoLock
( exit ${FIRSTBUILDSTATUS:-0} )
wait "$pid2"
[[ foo = $(cat $TEST_ROOT/result-A/bar) ]] [[ foo = $(cat $TEST_ROOT/result-A/bar) ]]
[[ foo = $(cat $TEST_ROOT/result-B/bar) ]] [[ foo = $(cat $TEST_ROOT/result-B/bar) ]]

View file

@ -1,7 +1,10 @@
echo "Build started" > "$lockFifo"
mkdir $out mkdir $out
echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar
sleep 10 # Wait for someone to write on the fifo
cat "$lockFifo"
# $out should not have been GC'ed while we were sleeping, but just in # $out should not have been GC'ed while we were sleeping, but just in
# case... # case...

View file

@ -1,5 +1,7 @@
with import ./config.nix; with import ./config.nix;
{ lockFifo ? null }:
rec { rec {
input1 = mkDerivation { input1 = mkDerivation {
@ -16,6 +18,7 @@ rec {
name = "gc-concurrent"; name = "gc-concurrent";
builder = ./gc-concurrent.builder.sh; builder = ./gc-concurrent.builder.sh;
inherit input1 input2; inherit input1 input2;
inherit lockFifo;
}; };
test2 = mkDerivation { test2 = mkDerivation {

View file

@ -2,7 +2,10 @@ source common.sh
clearStore clearStore
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1) lockFifo1=$TEST_ROOT/test1.fifo
mkfifo "$lockFifo1"
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1 --argstr lockFifo "$lockFifo1")
outPath1=$(nix-store -q $drvPath1) outPath1=$(nix-store -q $drvPath1)
drvPath2=$(nix-instantiate gc-concurrent.nix -A test2) drvPath2=$(nix-instantiate gc-concurrent.nix -A test2)
@ -22,19 +25,16 @@ ln -s $outPath3 "$NIX_STATE_DIR"/gcroots/foo2
nix-store -rvv "$drvPath1" & nix-store -rvv "$drvPath1" &
pid1=$! pid1=$!
# Start build #2 in the background after 10 seconds. # Wait for the build of $drvPath1 to start
(sleep 10 && nix-store -rvv "$drvPath2") & cat $lockFifo1
pid2=$!
# Run the garbage collector while the build is running. # Run the garbage collector while the build is running.
sleep 6
nix-collect-garbage nix-collect-garbage
# Wait for build #1/#2 to finish. # Unlock the build of $drvPath1
echo "" > $lockFifo1
echo waiting for pid $pid1 to finish... echo waiting for pid $pid1 to finish...
wait $pid1 wait $pid1
echo waiting for pid $pid2 to finish...
wait $pid2
# Check that the root of build #1 and its dependencies haven't been # Check that the root of build #1 and its dependencies haven't been
# deleted. The should not be deleted by the GC because they were # deleted. The should not be deleted by the GC because they were
@ -42,8 +42,9 @@ wait $pid2
cat $outPath1/foobar cat $outPath1/foobar
cat $outPath1/input-2/bar cat $outPath1/input-2/bar
# Check that build #2 has succeeded. It should succeed because the # Check that the build build $drvPath2 succeeds.
# derivation is a GC root. # It should succeed because the derivation is a GC root.
nix-store -rvv "$drvPath2"
cat $outPath2/foobar cat $outPath2/foobar
rm -f "$NIX_STATE_DIR"/gcroots/foo* rm -f "$NIX_STATE_DIR"/gcroots/foo*

View file

@ -3,5 +3,3 @@ echo $(cat $input1/foo)$(cat $input2/bar)xyzzy > $out/foobar
# Check that the GC hasn't deleted the lock on our output. # Check that the GC hasn't deleted the lock on our output.
test -e "$out.lock" test -e "$out.lock"
sleep 6

View file

@ -18,6 +18,7 @@ build-users-group =
keep-derivations = false keep-derivations = false
sandbox = false sandbox = false
experimental-features = nix-command flakes experimental-features = nix-command flakes
gc-reserved-space = 0
include nix.conf.extra include nix.conf.extra
EOF EOF

View file

@ -40,4 +40,4 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh clean-files += $(d)/common.sh
installcheck: $(d)/common.sh $(d)/config.nix $(d)/plugins/libplugintest.$(SO_EXT) test-deps += tests/common.sh tests/config.nix tests/plugins/libplugintest.$(SO_EXT)

View file

@ -16,6 +16,11 @@ nix-env --foo 2>&1 | grep "no operation"
nix-env -q --foo 2>&1 | grep "unknown flag" nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors. # Eval Errors.
eval_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true) eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
echo $eval_res | grep "(string) (1:15)" echo $eval_arg_res | grep "at: (1:15) from string"
echo $eval_res | grep "infinite recursion encountered" echo $eval_arg_res | grep "infinite recursion encountered"
eval_stdin_res=$(echo 'let a = {} // a; in a.foo' | nix-instantiate --eval -E - 2>&1 || true)
echo $eval_stdin_res | grep "at: (1:15) from stdin"
echo $eval_stdin_res | grep "infinite recursion encountered"

View file

@ -55,3 +55,10 @@ chmod a+rx $TEST_ROOT/shell.shebang.rb
output=$($TEST_ROOT/shell.shebang.rb abc ruby) output=$($TEST_ROOT/shell.shebang.rb abc ruby)
[ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ] [ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ]
# Test 'nix develop'.
nix develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
# Test 'nix print-dev-env'.
source <(nix print-dev-env -f shell.nix shellDrv)
[[ -n $stdenv ]]

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
export REMOTE_STORE=$TEST_ROOT/remote_store export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store # Build the dependencies and push them to the remote store

View file

@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore clearStore
rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh) export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '( nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]] [[ $(cat $TEST_ROOT/result/foo) = bar ]]