Merge remote-tracking branch 'upstream/master' into add-body-to-network-errors

This commit is contained in:
John Ericson 2020-07-03 17:08:39 +00:00
commit 465daa9396
90 changed files with 1375 additions and 623 deletions

6
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View file

@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: cachix/install-nix-action@v8 - uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings - run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings

View file

@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system: installation on your system:
</para> </para>
<screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen> <screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para> <para>
The multi-user installation of Nix will create build users between The multi-user installation of Nix will create build users between
@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with: is a bit of a misnomer). To use this approach, just install Nix with:
</para> </para>
<screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen> <screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para> <para>
If you don't like the sound of this, you'll want to weigh the If you don't like the sound of this, you'll want to weigh the
@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script: NixOS.org installation script:
<screen> <screen>
sh &lt;(curl https://nixos.org/nix/install) sh &lt;(curl -L https://nixos.org/nix/install)
</screen> </screen>
</para> </para>

28
mk/run_test.sh Executable file
View file

@ -0,0 +1,28 @@
#!/bin/sh
set -u
red=""
green=""
yellow=""
normal=""
post_run_msg="ran test $1..."
if [ -t 1 ]; then
red=""
green=""
yellow=""
normal=""
fi
(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
status=$?
if [ $status -eq 0 ]; then
echo "$post_run_msg [${green}PASS$normal]"
elif [ $status -eq 99 ]; then
echo "$post_run_msg [${yellow}SKIP$normal]"
else
echo "$post_run_msg [${red}FAIL$normal]"
echo "$log" | sed 's/^/ /'
exit "$status"
fi

View file

@ -1,45 +1,15 @@
# Run program $1 as part of make installcheck. # Run program $1 as part of make installcheck.
test-deps =
define run-install-test define run-install-test
installcheck: $1 installcheck: $1.test
_installcheck-list += $1 .PHONY: $1.test
$1.test: $1 $(test-deps)
@env TEST_NAME=$(notdir $(basename $1)) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1
endef endef
# Color code from https://unix.stackexchange.com/a/10065
installcheck:
@total=0; failed=0; \
red=""; \
green=""; \
yellow=""; \
normal=""; \
if [ -t 1 ]; then \
red=""; \
green=""; \
yellow=""; \
normal=""; \
fi; \
for i in $(_installcheck-list); do \
total=$$((total + 1)); \
printf "running test $$i..."; \
log="$$(cd $$(dirname $$i) && $(tests-environment) $$(basename $$i) 2>&1)"; \
status=$$?; \
if [ $$status -eq 0 ]; then \
echo " [$${green}PASS$$normal]"; \
elif [ $$status -eq 99 ]; then \
echo " [$${yellow}SKIP$$normal]"; \
else \
echo " [$${red}FAIL$$normal]"; \
echo "$$log" | sed 's/^/ /'; \
failed=$$((failed + 1)); \
fi; \
done; \
if [ "$$failed" != 0 ]; then \
echo "$${red}$$failed out of $$total tests failed $$normal"; \
exit 1; \
else \
echo "$${green}All tests succeeded$$normal"; \
fi
.PHONY: check installcheck .PHONY: check installcheck

View file

@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is exactly what commands it is running and why. However, the script is
run in a headless fashion, like this: run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh $ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity. verbose output in the interest of brevity.
@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to If you would like to
see the output, try like this: see the output, try like this:
$ curl -o install-nix https://nixos.org/nix/install $ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix $ sh ./install-nix
EOF EOF

View file

@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
( (
echo "" echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume." echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually." echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation" echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo "" echo ""
) >&2 ) >&2

View file

@ -130,7 +130,7 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Symbol file = state.symbols.create(filename); Symbol file = state.symbols.create(filename);
return { file, lineno, 0 }; return { foFile, file, lineno, 0 };
} }

View file

@ -78,7 +78,7 @@ public:
if (!a) if (!a)
throw Error({ throw Error({
.hint = hintfmt("attribute '%s' missing", name), .hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
return *a; return *a;

View file

@ -11,7 +11,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s), .hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -25,7 +25,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s, showType(v)), .hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -529,7 +529,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s, s2), .hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -542,7 +542,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const
{ {
throw EvalError({ throw EvalError({
.hint = hintfmt(s, s2, s3), .hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -551,7 +551,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
// p1 is where the error occurred; p2 is a position mentioned in the message. // p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError({ throw EvalError({
.hint = hintfmt(s, sym, p2), .hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 } .errPos = p1
}); });
} }
@ -559,7 +559,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s), .hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -572,7 +572,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2), .hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -580,7 +580,7 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
{ {
throw AssertionError({ throw AssertionError({
.hint = hintfmt(s, s1), .hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -588,23 +588,18 @@ LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char *
{ {
throw UndefinedVarError({ throw UndefinedVarError({
.hint = hintfmt(s, s1), .hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2)) LocalNoInline(void addErrorTrace(Error & e, const char * s, const string & s2))
{ {
e.addPrefix(format(s) % s2); e.addTrace(std::nullopt, s, s2);
} }
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const ExprLambda & fun, const Pos & pos)) LocalNoInline(void addErrorTrace(Error & e, const Pos & pos, const char * s, const string & s2))
{ {
e.addPrefix(format(s) % fun.showNamePos() % pos); e.addTrace(pos, s, s2);
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2, const Pos & pos))
{
e.addPrefix(format(s) % s2 % pos);
} }
@ -818,7 +813,7 @@ void EvalState::evalFile(const Path & path_, Value & v)
try { try {
eval(e, v); eval(e, v);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating the file '%1%':\n", path2); addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw; throw;
} }
@ -1068,8 +1063,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
} catch (Error & e) { } catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix) if (pos2 && pos2->file != state.sDerivationNix)
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath), *pos2); showAttrPath(state, env, attrPath));
throw; throw;
} }
@ -1237,11 +1232,15 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
/* Evaluate the body. This is conditional on showTrace, because /* Evaluate the body. This is conditional on showTrace, because
catching exceptions makes this function not tail-recursive. */ catching exceptions makes this function not tail-recursive. */
if (settings.showTrace) if (loggerSettings.showTrace.get())
try { try {
lambda.body->eval(*this, env2, v); lambda.body->eval(*this, env2, v);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating %1%, called from %2%:\n", lambda, pos); addErrorTrace(e, lambda.pos, "while evaluating %s",
(lambda.name.set()
? "'" + (string) lambda.name + "'"
: "anonymous lambdaction"));
addErrorTrace(e, pos, "from call site%s", "");
throw; throw;
} }
else else
@ -1516,7 +1515,7 @@ void EvalState::forceValueDeep(Value & v)
try { try {
recurse(*i.value); recurse(*i.value);
} catch (Error & e) { } catch (Error & e) {
addErrorPrefix(e, "while evaluating the attribute '%1%' at %2%:\n", i.name, *i.pos); addErrorTrace(e, *i.pos, "while evaluating the attribute '%1%'", i.name);
throw; throw;
} }
} }
@ -1936,7 +1935,7 @@ string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, boo
{ {
throw TypeError({ throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()), .hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -250,7 +250,7 @@ private:
friend struct ExprAttrs; friend struct ExprAttrs;
friend struct ExprLet; friend struct ExprLet;
Expr * parse(const char * text, const Path & path, Expr * parse(const char * text, FileOrigin origin, const Path & path,
const Path & basePath, StaticEnv & staticEnv); const Path & basePath, StaticEnv & staticEnv);
public: public:

View file

@ -1,7 +1,7 @@
#include "get-drvs.hh" #include "get-drvs.hh"
#include "util.hh" #include "util.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "derivations.hh" #include "store-api.hh"
#include <cstring> #include <cstring>
#include <regex> #include <regex>

View file

@ -197,7 +197,22 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
if (!pos) if (!pos)
str << "undefined position"; str << "undefined position";
else else
str << (format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%") % (string) pos.file % pos.line % pos.column).str(); {
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
switch (pos.origin) {
case foFile:
f % (string) pos.file;
break;
case foStdin:
case foString:
f % "(string)";
break;
default:
throw Error("unhandled Pos origin!");
}
str << (f % pos.line % pos.column).str();
}
return str; return str;
} }
@ -270,7 +285,7 @@ void ExprVar::bindVars(const StaticEnv & env)
if (withLevel == -1) if (withLevel == -1)
throw UndefinedVarError({ throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name), .hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
fromWith = true; fromWith = true;
this->level = withLevel; this->level = withLevel;

View file

@ -24,11 +24,12 @@ MakeError(RestrictedPathError, Error);
struct Pos struct Pos
{ {
FileOrigin origin;
Symbol file; Symbol file;
unsigned int line, column; unsigned int line, column;
Pos() : line(0), column(0) { }; Pos() : origin(foString), line(0), column(0) { };
Pos(const Symbol & file, unsigned int line, unsigned int column) Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
: file(file), line(line), column(column) { }; : origin(origin), file(file), line(line), column(column) { };
operator bool() const operator bool() const
{ {
return line != 0; return line != 0;
@ -238,7 +239,7 @@ struct ExprLambda : Expr
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end()) if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError({ throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg), .hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
}; };
void setName(Symbol & name); void setName(Symbol & name);

View file

@ -30,7 +30,8 @@ namespace nix {
SymbolTable & symbols; SymbolTable & symbols;
Expr * result; Expr * result;
Path basePath; Path basePath;
Symbol path; Symbol file;
FileOrigin origin;
ErrorInfo error; ErrorInfo error;
Symbol sLetBody; Symbol sLetBody;
ParseData(EvalState & state) ParseData(EvalState & state)
@ -67,16 +68,15 @@ static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prev
throw ParseError({ throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", .hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos), showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
} }
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos) static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{ {
throw ParseError({ throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos), .hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
} }
@ -148,7 +148,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
throw ParseError({ throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", .hint = hintfmt("duplicate formal function argument '%1%'",
formal.name), formal.name),
.nixCode = NixCode { .errPos = pos }, .errPos = pos
}); });
formals->formals.push_front(formal); formals->formals.push_front(formal);
} }
@ -246,7 +246,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data) static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
{ {
return Pos(data->path, loc.first_line, loc.first_column); return Pos(data->origin, data->file, loc.first_line, loc.first_column);
} }
#define CUR_POS makeCurPos(*yylocp, data) #define CUR_POS makeCurPos(*yylocp, data)
@ -259,7 +259,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
{ {
data->error = { data->error = {
.hint = hintfmt(error), .hint = hintfmt(error),
.nixCode = NixCode { .errPos = makeCurPos(*loc, data) } .errPos = makeCurPos(*loc, data)
}; };
} }
@ -339,7 +339,7 @@ expr_function
{ if (!$2->dynamicAttrs.empty()) { if (!$2->dynamicAttrs.empty())
throw ParseError({ throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"), .hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS }, .errPos = CUR_POS
}); });
$$ = new ExprLet($2, $4); $$ = new ExprLet($2, $4);
} }
@ -419,7 +419,7 @@ expr_simple
if (noURLLiterals) if (noURLLiterals)
throw ParseError({ throw ParseError({
.hint = hintfmt("URL literals are disabled"), .hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS } .errPos = CUR_POS
}); });
$$ = new ExprString(data->symbols.create($1)); $$ = new ExprString(data->symbols.create($1));
} }
@ -492,7 +492,7 @@ attrs
} else } else
throw ParseError({ throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"), .hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) }, .errPos = makeCurPos(@2, data)
}); });
} }
| { $$ = new AttrPath; } | { $$ = new AttrPath; }
@ -569,13 +569,24 @@ formal
namespace nix { namespace nix {
Expr * EvalState::parse(const char * text, Expr * EvalState::parse(const char * text, FileOrigin origin,
const Path & path, const Path & basePath, StaticEnv & staticEnv) const Path & path, const Path & basePath, StaticEnv & staticEnv)
{ {
yyscan_t scanner; yyscan_t scanner;
ParseData data(*this); ParseData data(*this);
data.origin = origin;
switch (origin) {
case foFile:
data.file = data.symbols.create(path);
break;
case foStdin:
case foString:
data.file = data.symbols.create(text);
break;
default:
assert(false);
}
data.basePath = basePath; data.basePath = basePath;
data.path = data.symbols.create(path);
yylex_init(&scanner); yylex_init(&scanner);
yy_scan_string(text, scanner); yy_scan_string(text, scanner);
@ -625,13 +636,13 @@ Expr * EvalState::parseExprFromFile(const Path & path)
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv) Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
{ {
return parse(readFile(path).c_str(), path, dirOf(path), staticEnv); return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
} }
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv) Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
{ {
return parse(s.data(), "(string)", basePath, staticEnv); return parse(s.data(), foString, "", basePath, staticEnv);
} }
@ -644,7 +655,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseStdin() Expr * EvalState::parseStdin()
{ {
//Activity act(*logger, lvlTalkative, format("parsing standard input")); //Activity act(*logger, lvlTalkative, format("parsing standard input"));
return parseExprFromString(drainFD(0), absPath(".")); return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
} }
@ -693,7 +704,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path), path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }

View file

@ -96,7 +96,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -177,7 +177,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
.hint = hintfmt( .hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid", "cannot import '%1%', since path '%2%' is not valid",
path, e.path), path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -215,7 +215,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (count == 0) { if (count == 0) {
throw EvalError({ throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"), .hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
PathSet context; PathSet context;
@ -230,7 +230,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid", .hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path), program, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -239,13 +239,13 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try { try {
parsed = state.parseExprFromString(output, pos.file); parsed = state.parseExprFromString(output, pos.file);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("While parsing the output from '%1%', at %2%\n", program, pos)); e.addTrace(pos, "While parsing the output from '%1%'", program);
throw; throw;
} }
try { try {
state.eval(parsed, v); state.eval(parsed, v);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("While evaluating the output from '%1%', at %2%\n", program, pos)); e.addTrace(pos, "While evaluating the output from '%1%'", program);
throw; throw;
} }
} }
@ -385,7 +385,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (startSet == args[0]->attrs->end()) if (startSet == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'startSet' required"), .hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceList(*startSet->value, pos); state.forceList(*startSet->value, pos);
@ -399,7 +399,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (op == args[0]->attrs->end()) if (op == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'operator' required"), .hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*op->value, pos); state.forceValue(*op->value, pos);
@ -421,7 +421,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
if (key == e->attrs->end()) if (key == e->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'key' required"), .hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*key->value, pos); state.forceValue(*key->value, pos);
@ -471,7 +471,7 @@ static void prim_addErrorContext(EvalState & state, const Pos & pos, Value * * a
v = *args[1]; v = *args[1];
} catch (Error & e) { } catch (Error & e) {
PathSet context; PathSet context;
e.addPrefix(format("%1%\n") % state.coerceToString(pos, *args[0], context)); e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context));
throw; throw;
} }
} }
@ -556,14 +556,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (attr == args[0]->attrs->end()) if (attr == args[0]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'name' missing"), .hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
string drvName; string drvName;
Pos & posDrvName(*attr->pos); Pos & posDrvName(*attr->pos);
try { try {
drvName = state.forceStringNoCtx(*attr->value, pos); drvName = state.forceStringNoCtx(*attr->value, pos);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while evaluating the derivation attribute 'name' at %1%:\n", posDrvName)); e.addTrace(posDrvName, "while evaluating the derivation attribute 'name'");
throw; throw;
} }
@ -603,7 +603,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), .hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
}; };
@ -613,7 +613,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.find(j) != outputs.end()) if (outputs.find(j) != outputs.end())
throw EvalError({ throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j), .hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
/* !!! Check whether j is a valid attribute /* !!! Check whether j is a valid attribute
name. */ name. */
@ -623,14 +623,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (j == "drv") if (j == "drv")
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ), .hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
outputs.insert(j); outputs.insert(j);
} }
if (outputs.empty()) if (outputs.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"), .hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
}; };
@ -696,8 +696,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
} }
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(format("while evaluating the attribute '%1%' of the derivation '%2%' at %3%:\n") e.addTrace(posDrvName,
% key % drvName % posDrvName); "while evaluating the attribute '%1%' of the derivation '%2%'",
key, drvName);
throw; throw;
} }
} }
@ -745,20 +746,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (drv.builder == "") if (drv.builder == "")
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"), .hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
if (drv.platform == "") if (drv.platform == "")
throw EvalError({ throw EvalError({
.hint = hintfmt("required attribute 'system' missing"), .hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
/* Check whether the derivation name is valid. */ /* Check whether the derivation name is valid. */
if (isDerivation(drvName)) if (isDerivation(drvName))
throw EvalError({ throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension), .hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
if (outputHash) { if (outputHash) {
@ -766,20 +767,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (outputs.size() != 1 || *(outputs.begin()) != "out") if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({ throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"), .hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName } .errPos = posDrvName
}); });
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo); std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
Hash h = newHashAllowEmpty(*outputHash, ht); Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName); auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput { drv.outputs.insert_or_assign("out", DerivationOutput {
std::move(outPath), .path = std::move(outPath),
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "") .hash = FixedOutputHash {
+ printHashType(h.type), .method = ingestionMethod,
h.to_string(Base16, false), .hash = std::move(h),
},
}); });
} }
@ -793,7 +794,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) { for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = ""; if (!jsonObject) drv.env[i] = "";
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { StorePath::dummy, "", "" }); DerivationOutput {
.path = StorePath::dummy,
.hash = std::optional<FixedOutputHash> {},
});
} }
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true); Hash h = hashDerivationModulo(*state.store, Derivation(drv), true);
@ -802,7 +806,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto outPath = state.store->makeOutputPath(i, h, drvName); auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath); if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { std::move(outPath), "", "" }); DerivationOutput {
.path = std::move(outPath),
.hash = std::optional<FixedOutputHash>(),
});
} }
} }
@ -874,7 +881,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
if (!state.store->isInStore(path)) if (!state.store->isInStore(path))
throw EvalError({ throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path), .hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
Path path2 = state.store->toStorePath(path); Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode) if (!settings.readOnlyMode)
@ -895,7 +902,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
.hint = hintfmt( .hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid", "cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path), path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -941,7 +948,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
string s = readFile(state.checkSourcePath(state.toRealPath(path, context))); string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
@ -972,7 +979,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i == v2.attrs->end()) if (i == v2.attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute 'path' missing"), .hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; PathSet context;
@ -983,7 +990,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -999,17 +1006,17 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
string type = state.forceStringNoCtx(*args[0], pos); string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type); std::optional<HashType> ht = parseHashType(type);
if (ht == htUnknown) if (!ht)
throw Error({ throw Error({
.hint = hintfmt("unknown hash type '%1%'", type), .hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; // discarded PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context); Path p = state.coerceToPath(pos, *args[1], context);
mkString(v, hashFile(ht, state.checkSourcePath(p)).to_string(Base16, false), context); mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
} }
/* Read a directory (without . or ..) */ /* Read a directory (without . or ..) */
@ -1022,7 +1029,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
} catch (InvalidPathError & e) { } catch (InvalidPathError & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path), .hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
@ -1098,7 +1105,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
"in 'toFile': the file named '%1%' must not contain a reference " "in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)", "to a derivation but contains (%2%)",
name, path), name, path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
refs.insert(state.store->parseStorePath(path)); refs.insert(state.store->parseStorePath(path));
} }
@ -1169,7 +1176,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
if (!context.empty()) if (!context.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path), .hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
@ -1178,7 +1185,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.hint = hintfmt( .hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%", "first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])), showType(*args[0])),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v); addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
@ -1201,7 +1208,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (!context.empty()) if (!context.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path), .hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} else if (attr.name == state.sName) } else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos); name = state.forceStringNoCtx(*attr.value, *attr.pos);
@ -1215,13 +1222,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name), .hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (path.empty()) if (path.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'path' required"), .hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (name.empty()) if (name.empty())
name = baseNameOf(path); name = baseNameOf(path);
@ -1282,7 +1289,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
if (i == args[1]->attrs->end()) if (i == args[1]->attrs->end())
throw EvalError({ throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr), .hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
// !!! add to stack trace? // !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++; if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
@ -1365,7 +1372,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j == v2.attrs->end()) if (j == v2.attrs->end())
throw TypeError({ throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"), .hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
string name = state.forceStringNoCtx(*j->value, pos); string name = state.forceStringNoCtx(*j->value, pos);
@ -1375,7 +1382,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
if (j2 == v2.attrs->end()) if (j2 == v2.attrs->end())
throw TypeError({ throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"), .hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
v.attrs->push_back(Attr(sym, j2->value, j2->pos)); v.attrs->push_back(Attr(sym, j2->value, j2->pos));
} }
@ -1451,7 +1458,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
if (args[0]->type != tLambda) if (args[0]->type != tLambda)
throw TypeError({ throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"), .hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (!args[0]->lambda.fun->matchAttrs) { if (!args[0]->lambda.fun->matchAttrs) {
@ -1507,7 +1514,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
if (n < 0 || (unsigned int) n >= list.listSize()) if (n < 0 || (unsigned int) n >= list.listSize())
throw Error({ throw Error({
.hint = hintfmt("list index %1% is out of bounds", n), .hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.forceValue(*list.listElems()[n], pos); state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n]; v = *list.listElems()[n];
@ -1537,7 +1544,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
if (args[0]->listSize() == 0) if (args[0]->listSize() == 0)
throw Error({ throw Error({
.hint = hintfmt("'tail' called on an empty list"), .hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.mkList(v, args[0]->listSize() - 1); state.mkList(v, args[0]->listSize() - 1);
@ -1682,7 +1689,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
if (len < 0) if (len < 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len), .hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
state.mkList(v, len); state.mkList(v, len);
@ -1844,7 +1851,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (f2 == 0) if (f2 == 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("division by zero"), .hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
if (args[0]->type == tFloat || args[1]->type == tFloat) { if (args[0]->type == tFloat || args[1]->type == tFloat) {
@ -1856,7 +1863,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1) if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError({ throw EvalError({
.hint = hintfmt("overflow in integer division"), .hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
mkInt(v, i1 / i2); mkInt(v, i1 / i2);
@ -1917,7 +1924,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
if (start < 0) if (start < 0)
throw EvalError({ throw EvalError({
.hint = hintfmt("negative start position in 'substring'"), .hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context); mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
@ -1936,17 +1943,17 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args
static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
string type = state.forceStringNoCtx(*args[0], pos); string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type); std::optional<HashType> ht = parseHashType(type);
if (ht == htUnknown) if (!ht)
throw Error({ throw Error({
.hint = hintfmt("unknown hash type '%1%'", type), .hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
PathSet context; // discarded PathSet context; // discarded
string s = state.forceString(*args[1], context, pos); string s = state.forceString(*args[1], context, pos);
mkString(v, hashString(ht, s).to_string(Base16, false), context); mkString(v, hashString(*ht, s).to_string(Base16, false), context);
} }
@ -1986,12 +1993,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({ throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re), .hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else { } else {
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re), .hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }
@ -2059,12 +2066,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError({ throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re), .hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else { } else {
throw EvalError({ throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re), .hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }
@ -2098,7 +2105,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
if (args[0]->listSize() != args[1]->listSize()) if (args[0]->listSize() != args[1]->listSize())
throw EvalError({ throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"), .hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
vector<string> from; vector<string> from;

View file

@ -1,6 +1,6 @@
#include "primops.hh" #include "primops.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "derivations.hh" #include "store-api.hh"
namespace nix { namespace nix {
@ -148,7 +148,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!state.store->isStorePath(i.name)) if (!state.store->isStorePath(i.name))
throw EvalError({ throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name), .hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
if (!settings.readOnlyMode) if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name)); state.store->ensurePath(state.store->parseStorePath(i.name));
@ -165,7 +165,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (!isDerivation(i.name)) { if (!isDerivation(i.name)) {
throw EvalError({ throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name), .hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
} }
context.insert("=" + string(i.name)); context.insert("=" + string(i.name));
@ -178,7 +178,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter->value->listSize() && !isDerivation(i.name)) { if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError({ throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name), .hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos } .errPos = *i.pos
}); });
} }
for (unsigned int n = 0; n < iter->value->listSize(); ++n) { for (unsigned int n = 0; n < iter->value->listSize(); ++n) {

View file

@ -37,14 +37,14 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name), .hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (url.empty()) if (url.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else

View file

@ -40,14 +40,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name), .hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (url.empty()) if (url.empty())
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else

View file

@ -68,7 +68,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
if (!attrs.count("type")) if (!attrs.count("type"))
throw Error({ throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), .hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
input = fetchers::inputFromAttrs(attrs); input = fetchers::inputFromAttrs(attrs);
@ -112,14 +112,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
else else
throw EvalError({ throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who), .hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos } .errPos = *attr.pos
}); });
} }
if (!url) if (!url)
throw EvalError({ throw EvalError({
.hint = hintfmt("'url' argument required"), .hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} else } else
url = state.forceStringNoCtx(*args[0], pos); url = state.forceStringNoCtx(*args[0], pos);

View file

@ -83,7 +83,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
} catch (std::runtime_error & e) { } catch (std::runtime_error & e) {
throw EvalError({ throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()), .hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos } .errPos = pos
}); });
} }
} }

View file

@ -6,6 +6,8 @@ namespace nix::fetchers {
struct Cache struct Cache
{ {
virtual ~Cache() { }
virtual void add( virtual void add(
ref<Store> store, ref<Store> store,
const Attrs & inAttrs, const Attrs & inAttrs,

View file

@ -36,7 +36,7 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
if (res) { if (res) {
if (auto narHash = maybeGetStrAttr(attrs, "narHash")) if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash. // FIXME: require SRI hash.
res->narHash = newHashAllowEmpty(*narHash, htUnknown); res->narHash = newHashAllowEmpty(*narHash, {});
return res; return res;
} }
} }

View file

@ -70,7 +70,10 @@ DownloadFileResult downloadFile(
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name)); ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(htSHA256, *sink.s); info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash); info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = hash,
};
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
store->addToStore(info, source, NoRepair, NoCheckSigs); store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path); storePath = std::move(info.path);
@ -264,7 +267,7 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url"))); auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
if (auto hash = maybeGetStrAttr(attrs, "hash")) if (auto hash = maybeGetStrAttr(attrs, "hash"))
input->hash = newHashAllowEmpty(*hash, htUnknown); input->hash = newHashAllowEmpty(*hash, {});
return input; return input;
} }

View file

@ -26,7 +26,7 @@ Logger * makeDefaultLogger() {
case LogFormat::rawWithLogs: case LogFormat::rawWithLogs:
return makeSimpleLogger(true); return makeSimpleLogger(true);
case LogFormat::internalJson: case LogFormat::internalJson:
return makeJSONLogger(*makeSimpleLogger()); return makeJSONLogger(*makeSimpleLogger(true));
case LogFormat::bar: case LogFormat::bar:
return makeProgressBar(); return makeProgressBar();
case LogFormat::barWithLogs: case LogFormat::barWithLogs:

View file

@ -131,7 +131,7 @@ public:
auto state(state_.lock()); auto state(state_.lock());
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(*state, ei.level, oss.str()); log(*state, ei.level, oss.str());
} }

View file

@ -323,10 +323,8 @@ int handleExceptions(const string & programName, std::function<void()> fun)
printError("Try '%1% --help' for more information.", programName); printError("Try '%1% --help' for more information.", programName);
return 1; return 1;
} catch (BaseError & e) { } catch (BaseError & e) {
if (settings.showTrace && e.prefix() != "")
printError(e.prefix());
logError(e.info()); logError(e.info());
if (e.prefix() != "" && !settings.showTrace) if (e.hasTrace() && !loggerSettings.showTrace.get())
printError("(use '--show-trace' to show detailed location information)"); printError("(use '--show-trace' to show detailed location information)");
return e.status; return e.status;
} catch (std::bad_alloc & e) { } catch (std::bad_alloc & e) {

View file

@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a /* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just 'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case? */ bind-mount in this case?
if (errno != EMLINK)
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from); throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to); copyPath(from, to);
} }
@ -2750,8 +2753,8 @@ struct RestrictedStore : public LocalFSStore
void queryReferrers(const StorePath & path, StorePathSet & referrers) override void queryReferrers(const StorePath & path, StorePathSet & referrers) override
{ } { }
StorePathSet queryDerivationOutputs(const StorePath & path) override OutputPathMap queryDerivationOutputMap(const StorePath & path) override
{ throw Error("queryDerivationOutputs"); } { throw Error("queryDerivationOutputMap"); }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); } { throw Error("queryPathFromHashPart"); }
@ -3714,14 +3717,11 @@ void DerivationGoal::registerOutputs()
/* Check that fixed-output derivations produced the right /* Check that fixed-output derivations produced the right
outputs (i.e., the content hash should match the specified outputs (i.e., the content hash should match the specified
hash). */ hash). */
std::string ca; std::optional<ContentAddress> ca;
if (fixedOutput) { if (fixedOutput) {
FileIngestionMethod outputHashMode; Hash h; if (i.second.hash->method == FileIngestionMethod::Flat) {
i.second.parseHashInfo(outputHashMode, h);
if (outputHashMode == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */ /* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0) if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError( throw BuildError(
@ -3732,20 +3732,22 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by /* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */ the derivation to its content-addressed location. */
Hash h2 = outputHashMode == FileIngestionMethod::Recursive Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
? hashPath(h.type, actualPath).first ? hashPath(*i.second.hash->hash.type, actualPath).first
: hashFile(h.type, actualPath); : hashFile(*i.second.hash->hash.type, actualPath);
auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name()); auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
if (h != h2) { if (i.second.hash->hash != h2) {
/* Throw an error after registering the path as /* Throw an error after registering the path as
valid. */ valid. */
worker.hashMismatch = true; worker.hashMismatch = true;
delayedException = std::make_exception_ptr( delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
worker.store.printStorePath(dest), h.to_string(SRI, true), h2.to_string(SRI, true))); worker.store.printStorePath(dest),
i.second.hash->hash.to_string(SRI, true),
h2.to_string(SRI, true)));
Path actualDest = worker.store.Store::toRealPath(dest); Path actualDest = worker.store.Store::toRealPath(dest);
@ -3765,7 +3767,10 @@ void DerivationGoal::registerOutputs()
else else
assert(worker.store.parseStorePath(path) == dest); assert(worker.store.parseStorePath(path) == dest);
ca = makeFixedOutputCA(outputHashMode, h2); ca = FixedOutputHash {
.method = i.second.hash->method,
.hash = h2,
};
} }
/* Get rid of all weird permissions. This also checks that /* Get rid of all weird permissions. This also checks that
@ -3838,7 +3843,10 @@ void DerivationGoal::registerOutputs()
info.ca = ca; info.ca = ca;
worker.store.signPathInfo(info); worker.store.signPathInfo(info);
if (!info.references.empty()) info.ca.clear(); if (!info.references.empty()) {
// FIXME don't we have an experimental feature for fixed output with references?
info.ca = {};
}
infos.emplace(i.first, std::move(info)); infos.emplace(i.first, std::move(info));
} }
@ -4998,7 +5006,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path))) if (!pathExists(store.printStorePath(path)))
res = false; res = false;
else { else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256); Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }

View file

@ -58,14 +58,17 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
} }
}; };
/* We always have one output, and if it's a fixed-output derivation (as
checked below) it must be the only output */
auto & output = drv.outputs.begin()->second;
/* Try the hashed mirrors first. */ /* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat") if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get()) for (auto hashedMirror : settings.hashedMirrors.get())
try { try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashType(getAttr("outputHashAlgo")); auto & h = output.hash->hash;
auto h = Hash(getAttr("outputHash"), ht); fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
return; return;
} catch (Error & e) { } catch (Error & e) {
debug(e.what()); debug(e.what());

View file

@ -0,0 +1,85 @@
#include "content-address.hh"
namespace nix {
std::string FixedOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
}
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
switch (m) {
case FileIngestionMethod::Flat:
return "";
case FileIngestionMethod::Recursive:
return "r:";
default:
throw Error("impossible, caught both cases");
}
}
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
{
return "fixed:"
+ makeFileIngestionPrefix(method)
+ hash.to_string(Base32, true);
}
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string renderContentAddress(ContentAddress ca) {
return std::visit(overloaded {
[](TextHash th) {
return "text:" + th.hash.to_string(Base32, true);
},
[](FixedOutputHash fsh) {
return makeFixedOutputCA(fsh.method, fsh.hash);
}
}, ca);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto prefixSeparator = rawCa.find(':');
if (prefixSeparator != string::npos) {
auto prefix = string(rawCa, 0, prefixSeparator);
if (prefix == "text") {
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
Hash hash = Hash(string(hashTypeAndHash));
if (*hash.type != htSHA256) {
throw Error("parseContentAddress: the text hash should have type SHA256");
}
return TextHash { hash };
} else if (prefix == "fixed") {
// This has to be an inverse of makeFixedOutputCA
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos);
if (methodAndHash.substr(0,2) == "r:") {
std::string_view hashRaw = methodAndHash.substr(2,string::npos);
return FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = Hash(string(hashRaw)),
};
} else {
std::string_view hashRaw = methodAndHash;
return FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash(string(hashRaw)),
};
}
} else {
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
}
} else {
throw Error("Not a content address because it lacks an appropriate prefix");
}
};
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
return ca ? renderContentAddress(*ca) : "";
}
}

View file

@ -0,0 +1,56 @@
#pragma once
#include <variant>
#include "hash.hh"
namespace nix {
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct TextHash {
Hash hash;
};
/// Pair of a hash, and how the file system was ingested
struct FixedOutputHash {
FileIngestionMethod method;
Hash hash;
std::string printMethodAlgo() const;
};
/*
We've accumulated several types of content-addressed paths over the years;
fixed-output derivations support multiple hash algorithms and serialisation
methods (flat file vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
*/
typedef std::variant<
TextHash, // for paths computed by makeTextPath() / addTextToStore
FixedOutputHash // for path computed by makeFixedOutputPath
> ContentAddress;
/* Compute the prefix to the hash algorithm which indicates how the files were
ingested. */
std::string makeFileIngestionPrefix(const FileIngestionMethod m);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
std::string renderContentAddress(ContentAddress ca);
std::string renderContentAddress(std::optional<ContentAddress> ca);
ContentAddress parseContentAddress(std::string_view rawCa);
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
}

View file

@ -78,10 +78,10 @@ struct TunnelLogger : public Logger
if (ei.level > verbosity) return; if (ei.level > verbosity) return;
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, false);
StringSink buf; StringSink buf;
buf << STDERR_NEXT << oss.str() << "\n"; // (fs.s + "\n"); buf << STDERR_NEXT << oss.str() << "\n";
enqueueMsg(*buf.s); enqueueMsg(*buf.s);
} }
@ -347,6 +347,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break; break;
} }
case wopQueryDerivationOutputMap: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
OutputPathMap outputs = store->queryDerivationOutputMap(path);
logger->stopWork();
writeOutputPathMap(*store, to, outputs);
break;
}
case wopQueryDeriver: { case wopQueryDeriver: {
auto path = store->parseStorePath(readString(from)); auto path = store->parseStorePath(readString(from));
logger->startWork(); logger->startWork();
@ -652,7 +661,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate to << info->ultimate
<< info->sigs << info->sigs
<< info->ca; << renderContentAddress(info->ca);
} }
} else { } else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
@ -710,7 +719,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.references = readStorePaths<StorePathSet>(*store, from); info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
from >> info.ca >> repair >> dontCheckSigs; info.ca = parseContentAddressOpt(readString(from));
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs) if (!trusted && dontCheckSigs)
dontCheckSigs = false; dontCheckSigs = false;
if (!trusted) if (!trusted)

View file

@ -8,25 +8,6 @@
namespace nix { namespace nix {
void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const
{
recursive = FileIngestionMethod::Flat;
string algo = hashAlgo;
if (string(algo, 0, 2) == "r:") {
recursive = FileIngestionMethod::Recursive;
algo = string(algo, 2);
}
HashType hashType = parseHashType(algo);
if (hashType == htUnknown)
throw Error("unknown hash algorithm '%s'", algo);
hash = Hash(this->hash, hashType);
}
const StorePath & BasicDerivation::findOutput(const string & id) const const StorePath & BasicDerivation::findOutput(const string & id) const
{ {
auto i = outputs.find(id); auto i = outputs.find(id);
@ -120,6 +101,34 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
} }
static DerivationOutput parseDerivationOutput(const Store & store, istringstream_nocopy & str)
{
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
const HashType hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
}
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
static Derivation parseDerivation(const Store & store, const string & s) static Derivation parseDerivation(const Store & store, const string & s)
{ {
Derivation drv; Derivation drv;
@ -129,15 +138,8 @@ static Derivation parseDerivation(const Store & store, const string & s)
/* Parse the list of outputs. */ /* Parse the list of outputs. */
while (!endOfList(str)) { while (!endOfList(str)) {
expect(str, "("); std::string id = parseString(str); expect(str, "("); std::string id = parseString(str);
expect(str, ","); auto path = store.parseStorePath(parsePath(str)); auto output = parseDerivationOutput(store, str);
expect(str, ","); auto hashAlgo = parseString(str); drv.outputs.emplace(std::move(id), std::move(output));
expect(str, ","); auto hash = parseString(str);
expect(str, ")");
drv.outputs.emplace(id, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
} }
/* Parse the list of input derivations. */ /* Parse the list of input derivations. */
@ -263,8 +265,9 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ','; if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first); s += '('; printUnquotedString(s, i.first);
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path)); s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path));
s += ','; printUnquotedString(s, i.second.hashAlgo); s += ','; printUnquotedString(s, i.second.hash ? i.second.hash->printMethodAlgo() : "");
s += ','; printUnquotedString(s, i.second.hash); s += ','; printUnquotedString(s,
i.second.hash ? i.second.hash->hash.to_string(Base16, false) : "");
s += ')'; s += ')';
} }
@ -320,7 +323,7 @@ bool BasicDerivation::isFixedOutput() const
{ {
return outputs.size() == 1 && return outputs.size() == 1 &&
outputs.begin()->first == "out" && outputs.begin()->first == "out" &&
outputs.begin()->second.hash != ""; outputs.begin()->second.hash;
} }
@ -353,8 +356,8 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
if (drv.isFixedOutput()) { if (drv.isFixedOutput()) {
DerivationOutputs::const_iterator i = drv.outputs.begin(); DerivationOutputs::const_iterator i = drv.outputs.begin();
return hashString(htSHA256, "fixed:out:" return hashString(htSHA256, "fixed:out:"
+ i->second.hashAlgo + ":" + i->second.hash->printMethodAlgo() + ":"
+ i->second.hash + ":" + i->second.hash->hash.to_string(Base16, false) + ":"
+ store.printStorePath(i->second.path)); + store.printStorePath(i->second.path));
} }
@ -397,6 +400,31 @@ StorePathSet BasicDerivation::outputPaths() const
return paths; return paths;
} }
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
auto hash = readString(in);
std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
auto hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
};
}
return DerivationOutput {
.path = std::move(path),
.hash = std::move(fsh),
};
}
StringSet BasicDerivation::outputNames() const StringSet BasicDerivation::outputNames() const
{ {
@ -413,14 +441,8 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
auto nr = readNum<size_t>(in); auto nr = readNum<size_t>(in);
for (size_t n = 0; n < nr; n++) { for (size_t n = 0; n < nr; n++) {
auto name = readString(in); auto name = readString(in);
auto path = store.parseStorePath(readString(in)); auto output = readDerivationOutput(in, store);
auto hashAlgo = readString(in); drv.outputs.emplace(std::move(name), std::move(output));
auto hash = readString(in);
drv.outputs.emplace(name, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
} }
drv.inputSrcs = readStorePaths<StorePathSet>(store, in); drv.inputSrcs = readStorePaths<StorePathSet>(store, in);
@ -441,8 +463,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputs) for (auto & i : drv.outputs) {
out << i.first << store.printStorePath(i.second.path) << i.second.hashAlgo << i.second.hash; out << i.first
<< store.printStorePath(i.second.path);
if (i.second.hash) {
out << i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false);
} else {
out << "" << "";
}
}
writeStorePaths(store, out, drv.inputSrcs); writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;
out << drv.env.size(); out << drv.env.size();

View file

@ -1,8 +1,9 @@
#pragma once #pragma once
#include "path.hh"
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "store-api.hh" #include "content-address.hh"
#include <map> #include <map>
@ -15,9 +16,7 @@ namespace nix {
struct DerivationOutput struct DerivationOutput
{ {
StorePath path; StorePath path;
std::string hashAlgo; /* hash used for expected hash computation */ std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
std::string hash; /* expected hash, may be null */
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
}; };
typedef std::map<string, DerivationOutput> DerivationOutputs; typedef std::map<string, DerivationOutput> DerivationOutputs;
@ -70,6 +69,7 @@ struct Derivation : BasicDerivation
class Store; class Store;
enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref<Store> store, StorePath writeDerivation(ref<Store> store,

View file

@ -55,7 +55,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines. filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */ Don't complain if the stored hash is zero (unknown). */
Hash hash = hashAndWriteSink.currentHash(); Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!", throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true)); printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));

View file

@ -35,7 +35,7 @@ Settings::Settings()
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR))) , nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR))) , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR)) , nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(nixStateDir + DEFAULT_SOCKET_PATH)) , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
{ {
buildUsersGroup = getuid() == 0 ? "nixbld" : ""; buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1"; lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";

View file

@ -196,10 +196,6 @@ public:
/* Whether to lock the Nix client and worker to the same CPU. */ /* Whether to lock the Nix client and worker to the same CPU. */
bool lockCPU; bool lockCPU;
/* Whether to show a stack trace if Nix evaluation fails. */
Setting<bool> showTrace{this, false, "show-trace",
"Whether to show a stack trace on evaluation errors."};
Setting<SandboxMode> sandboxMode{this, Setting<SandboxMode> sandboxMode{this,
#if __linux__ #if __linux__
smEnabled smEnabled

View file

@ -114,7 +114,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from); auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s); info->narHash = s.empty() ? Hash() : Hash(s);
conn->from >> info->ca; info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
} }
@ -146,7 +146,7 @@ struct LegacySSHStore : public Store
<< info.narSize << info.narSize
<< info.ultimate << info.ultimate
<< info.sigs << info.sigs
<< info.ca; << renderContentAddress(info.ca);
try { try {
copyNAR(source, conn->to); copyNAR(source, conn->to);
} catch (...) { } catch (...) {

View file

@ -561,10 +561,12 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
if (out == drv.outputs.end()) if (out == drv.outputs.end())
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath)); throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
FileIngestionMethod method; Hash h; check(
out->second.parseHashInfo(method, h); makeFixedOutputPath(
out->second.hash->method,
check(makeFixedOutputPath(method, h, drvName), out->second.path, "out"); out->second.hash->hash,
drvName),
out->second.path, "out");
} }
else { else {
@ -578,7 +580,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
uint64_t LocalStore::addValidPath(State & state, uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs) const ValidPathInfo & info, bool checkOutputs)
{ {
if (info.ca != "" && !info.isContentAddressed(*this)) if (info.ca.has_value() && !info.isContentAddressed(*this))
throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't", throw Error("cannot add path '%s' to the Nix store because it claims to be content-addressed but isn't",
printStorePath(info.path)); printStorePath(info.path));
@ -590,7 +592,7 @@ uint64_t LocalStore::addValidPath(State & state,
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty()) (renderContentAddress(info.ca), (bool) info.ca)
.exec(); .exec();
uint64_t id = sqlite3_last_insert_rowid(state.db); uint64_t id = sqlite3_last_insert_rowid(state.db);
@ -664,7 +666,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
if (s) info->sigs = tokenizeString<StringSet>(s, " "); if (s) info->sigs = tokenizeString<StringSet>(s, " ");
s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7); s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
if (s) info->ca = s; if (s) info->ca = parseContentAddressOpt(s);
/* Get the references. */ /* Get the references. */
auto useQueryReferences(state->stmtQueryReferences.use()(info->id)); auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
@ -687,7 +689,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
(info.narHash.to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty()) (renderContentAddress(info.ca), (bool) info.ca)
(printStorePath(info.path)) (printStorePath(info.path))
.exec(); .exec();
} }
@ -772,17 +774,20 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
} }
StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path) OutputPathMap LocalStore::queryDerivationOutputMap(const StorePath & path)
{ {
return retrySQLite<StorePathSet>([&]() { return retrySQLite<OutputPathMap>([&]() {
auto state(_state.lock()); auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path))); (queryValidPathId(*state, path)));
StorePathSet outputs; OutputPathMap outputs;
while (useQueryDerivationOutputs.next()) while (useQueryDerivationOutputs.next())
outputs.insert(parseStorePath(useQueryDerivationOutputs.getStr(1))); outputs.emplace(
useQueryDerivationOutputs.getStr(0),
parseStorePath(useQueryDerivationOutputs.getStr(1))
);
return outputs; return outputs;
}); });
@ -983,15 +988,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath); deletePath(realPath);
if (info.ca != "" && // text hashing has long been allowed to have non-self-references because it is used for drv files.
!((hasPrefix(info.ca, "text:") && !info.references.count(info.path)) bool refersToSelf = info.references.count(info.path) > 0;
|| info.references.empty())) if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references"); settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash /* While restoring the path from the NAR, compute the hash
of the NAR. */ of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info.ca == "" || !info.references.count(info.path)) if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256); hashSink = std::make_unique<HashSink>(htSHA256);
else else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart())); hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
@ -1077,7 +1082,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
ValidPathInfo info(dstPath); ValidPathInfo info(dstPath);
info.narHash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
info.ca = makeFixedOutputCA(method, h); info.ca = FixedOutputHash { .method = method, .hash = h };
registerValidPath(info); registerValidPath(info);
} }
@ -1141,7 +1146,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.references = references; info.references = references;
info.ca = "text:" + hash.to_string(Base32, true); info.ca = TextHash { .hash = hash };
registerValidPath(info); registerValidPath(info);
} }
@ -1252,10 +1257,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "" || !info->references.count(info->path)) if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash.type); hashSink = std::make_unique<HashSink>(*info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink); dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish(); auto current = hashSink->finish();

View file

@ -133,7 +133,7 @@ public:
StorePathSet queryValidDerivers(const StorePath & path) override; StorePathSet queryValidDerivers(const StorePath & path) override;
StorePathSet queryDerivationOutputs(const StorePath & path) override; OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override; std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;

View file

@ -203,7 +203,7 @@ public:
narInfo->deriver = StorePath(queryNAR.getStr(9)); narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " ")) for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig); narInfo->sigs.insert(sig);
narInfo->ca = queryNAR.getStr(11); narInfo->ca = parseContentAddressOpt(queryNAR.getStr(11));
return {oValid, narInfo}; return {oValid, narInfo};
}); });
@ -237,7 +237,7 @@ public:
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
(concatStringsSep(" ", info->sigs)) (concatStringsSep(" ", info->sigs))
(info->ca) (renderContentAddress(info->ca))
(time(0)).exec(); (time(0)).exec();
} else { } else {

View file

@ -67,8 +67,9 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig") else if (name == "Sig")
sigs.insert(value); sigs.insert(value);
else if (name == "CA") { else if (name == "CA") {
if (!ca.empty()) corrupt(); if (ca) corrupt();
ca = value; // FIXME: allow blank ca or require skipping field?
ca = parseContentAddressOpt(value);
} }
pos = eol + 1; pos = eol + 1;
@ -104,8 +105,8 @@ std::string NarInfo::to_string(const Store & store) const
for (auto sig : sigs) for (auto sig : sigs)
res += "Sig: " + sig + "\n"; res += "Sig: " + sig + "\n";
if (!ca.empty()) if (ca)
res += "CA: " + ca + "\n"; res += "CA: " + renderContentAddress(*ca) + "\n";
return res; return res;
} }

View file

@ -1,4 +1,4 @@
#include "derivations.hh" #include "store-api.hh"
#include <nlohmann/json_fwd.hpp> #include <nlohmann/json_fwd.hpp>

View file

@ -1,5 +1,6 @@
#pragma once #pragma once
#include "content-address.hh"
#include "types.hh" #include "types.hh"
namespace nix { namespace nix {
@ -61,15 +62,11 @@ public:
typedef std::set<StorePath> StorePathSet; typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths; typedef std::vector<StorePath> StorePaths;
typedef std::map<string, StorePath> OutputPathMap;
/* Extension of derivations in the Nix store. */ /* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv"; const std::string drvExtension = ".drv";
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct StorePathWithOutputs struct StorePathWithOutputs
{ {
StorePath path; StorePath path;

View file

@ -8,6 +8,7 @@
#include "derivations.hh" #include "derivations.hh"
#include "pool.hh" #include "pool.hh"
#include "finally.hh" #include "finally.hh"
#include "logging.hh"
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
@ -38,6 +39,29 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
out << store.printStorePath(i); out << store.printStorePath(i);
} }
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
{
std::map<string, StorePath> pathMap;
auto rawInput = readStrings<Strings>(from);
if (rawInput.size() % 2)
throw Error("got an odd number of elements from the daemon when trying to read a output path map");
auto curInput = rawInput.begin();
while (curInput != rawInput.end()) {
auto thisKey = *curInput++;
auto thisValue = *curInput++;
pathMap.emplace(thisKey, store.parseStorePath(thisValue));
}
return pathMap;
}
void writeOutputPathMap(const Store & store, Sink & out, const std::map<string, StorePath> & pathMap)
{
out << 2*pathMap.size();
for (auto & i : pathMap) {
out << i.first;
out << store.printStorePath(i.second);
}
}
/* TODO: Separate these store impls into different files, give them better names */ /* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params) RemoteStore::RemoteStore(const Params & params)
@ -197,7 +221,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.maxSilentTime.name); overrides.erase(settings.maxSilentTime.name);
overrides.erase(settings.buildCores.name); overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name); overrides.erase(settings.useSubstitutes.name);
overrides.erase(settings.showTrace.name); overrides.erase(loggerSettings.showTrace.name);
conn.to << overrides.size(); conn.to << overrides.size();
for (auto & i : overrides) for (auto & i : overrides)
conn.to << i.first << i.second.value; conn.to << i.first << i.second.value;
@ -381,7 +405,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate; conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
conn->from >> info->ca; info->ca = parseContentAddressOpt(readString(conn->from));
} }
} }
callback(std::move(info)); callback(std::move(info));
@ -412,12 +436,24 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
{ {
auto conn(getConnection()); auto conn(getConnection());
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 0x16) {
return Store::queryDerivationOutputs(path);
}
conn->to << wopQueryDerivationOutputs << printStorePath(path); conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return readStorePaths<StorePathSet>(*this, conn->from);
} }
OutputPathMap RemoteStore::queryDerivationOutputMap(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
return readOutputPathMap(*this, conn->from);
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart) std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{ {
auto conn(getConnection()); auto conn(getConnection());
@ -465,7 +501,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << info.ca << info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs; << repair << !checkSigs;
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21; bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
if (!tunnel) copyNAR(source, conn->to); if (!tunnel) copyNAR(source, conn->to);

View file

@ -51,6 +51,7 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override; StorePathSet queryDerivationOutputs(const StorePath & path) override;
OutputPathMap queryDerivationOutputMap(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override; std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override; StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;

View file

@ -173,20 +173,20 @@ static std::string makeType(
StorePath Store::makeFixedOutputPath( StorePath Store::makeFixedOutputPath(
FileIngestionMethod recursive, FileIngestionMethod method,
const Hash & hash, const Hash & hash,
std::string_view name, std::string_view name,
const StorePathSet & references, const StorePathSet & references,
bool hasSelfReference) const bool hasSelfReference) const
{ {
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) { if (hash.type == htSHA256 && method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name); return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
} else { } else {
assert(references.empty()); assert(references.empty());
return makeStorePath("output:out", return makeStorePath("output:out",
hashString(htSHA256, hashString(htSHA256,
"fixed:out:" "fixed:out:"
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "") + makeFileIngestionPrefix(method)
+ hash.to_string(Base16, true) + ":"), + hash.to_string(Base16, true) + ":"),
name); name);
} }
@ -242,6 +242,16 @@ bool Store::PathInfoCacheValue::isKnownNow()
return std::chrono::steady_clock::now() < time_point + ttl; return std::chrono::steady_clock::now() < time_point + ttl;
} }
StorePathSet Store::queryDerivationOutputs(const StorePath & path)
{
auto outputMap = this->queryDerivationOutputMap(path);
StorePathSet outputPaths;
for (auto & i: outputMap) {
outputPaths.emplace(std::move(i.second));
}
return outputPaths;
}
bool Store::isValidPath(const StorePath & storePath) bool Store::isValidPath(const StorePath & storePath)
{ {
std::string hashPart(storePath.hashPart()); std::string hashPart(storePath.hashPart());
@ -471,8 +481,8 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
jsonRefs.elem(printStorePath(ref)); jsonRefs.elem(printStorePath(ref));
} }
if (info->ca != "") if (info->ca)
jsonPath.attr("ca", info->ca); jsonPath.attr("ca", renderContentAddress(info->ca));
std::pair<uint64_t, uint64_t> closureSizes; std::pair<uint64_t, uint64_t> closureSizes;
@ -757,41 +767,35 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
sigs.insert(secretKey.signDetached(fingerprint(store))); sigs.insert(secretKey.signDetached(fingerprint(store)));
} }
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
bool ValidPathInfo::isContentAddressed(const Store & store) const bool ValidPathInfo::isContentAddressed(const Store & store) const
{ {
auto warn = [&]() { if (! ca) return false;
logWarning(
ErrorInfo{
.name = "Path not content-addressed",
.hint = hintfmt("path '%s' claims to be content-addressed but isn't", store.printStorePath(path))
});
};
if (hasPrefix(ca, "text:")) { auto caPath = std::visit(overloaded {
Hash hash(ca.substr(5)); [&](TextHash th) {
if (store.makeTextPath(path.name(), hash, references) == path) return store.makeTextPath(path.name(), th.hash, references);
return true; },
else [&](FixedOutputHash fsh) {
warn();
}
else if (hasPrefix(ca, "fixed:")) {
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = references; auto refs = references;
bool hasSelfReference = false; bool hasSelfReference = false;
if (refs.count(path)) { if (refs.count(path)) {
hasSelfReference = true; hasSelfReference = true;
refs.erase(path); refs.erase(path);
} }
if (store.makeFixedOutputPath(recursive, hash, path.name(), refs, hasSelfReference) == path) return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
return true;
else
warn();
} }
}, *ca);
return false; bool res = caPath == path;
if (!res)
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
return res;
} }
@ -822,14 +826,6 @@ Strings ValidPathInfo::shortRefs() const
} }
std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
{
return "fixed:"
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
+ hash.to_string(Base32, true);
}
} }

View file

@ -2,12 +2,14 @@
#include "path.hh" #include "path.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh"
#include "serialise.hh" #include "serialise.hh"
#include "crypto.hh" #include "crypto.hh"
#include "lru-cache.hh" #include "lru-cache.hh"
#include "sync.hh" #include "sync.hh"
#include "globals.hh" #include "globals.hh"
#include "config.hh" #include "config.hh"
#include "derivations.hh"
#include <atomic> #include <atomic>
#include <limits> #include <limits>
@ -17,6 +19,7 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include <chrono> #include <chrono>
#include <variant>
namespace nix { namespace nix {
@ -31,15 +34,12 @@ MakeError(SubstituterDisabled, Error);
MakeError(NotInStore, Error); MakeError(NotInStore, Error);
struct BasicDerivation;
struct Derivation;
class FSAccessor; class FSAccessor;
class NarInfoDiskCache; class NarInfoDiskCache;
class Store; class Store;
class JSONPlaceholder; class JSONPlaceholder;
enum RepairFlag : bool { NoRepair = false, Repair = true };
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true }; enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true }; enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true }; enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
@ -111,7 +111,6 @@ struct SubstitutablePathInfo
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos; typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo struct ValidPathInfo
{ {
StorePath path; StorePath path;
@ -140,21 +139,11 @@ struct ValidPathInfo
that a particular output path was produced by a derivation; the that a particular output path was produced by a derivation; the
path then implies the contents.) path then implies the contents.)
Ideally, the content-addressability assertion would just be a Ideally, the content-addressability assertion would just be a Boolean,
Boolean, and the store path would be computed from and the store path would be computed from the name component, narHash
the name component, narHash and references. However, and references. However, we support many types of content addresses.
1) we've accumulated several types of content-addressed paths
over the years; and 2) fixed-output derivations support
multiple hash algorithms and serialisation methods (flat file
vs NAR). Thus, ca has one of the following forms:
* text:sha256:<sha256 hash of file contents>: For paths
computed by makeTextPath() / addTextToStore().
* fixed:<r?>:<ht>:<h>: For paths computed by
makeFixedOutputPath() / addToStore().
*/ */
std::string ca; std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const bool operator == (const ValidPathInfo & i) const
{ {
@ -189,9 +178,10 @@ struct ValidPathInfo
Strings shortRefs() const; Strings shortRefs() const;
ValidPathInfo(const StorePath & path) : path(path) { } ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path) : path(std::move(path)) { } ValidPathInfo(StorePath && path) : path(std::move(path)) { };
ValidPathInfo(const StorePath & path) : path(path) { };
virtual ~ValidPathInfo() { } virtual ~ValidPathInfo() { }
}; };
@ -428,8 +418,11 @@ public:
virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; }; virtual StorePathSet queryValidDerivers(const StorePath & path) { return {}; };
/* Query the outputs of the derivation denoted by `path'. */ /* Query the outputs of the derivation denoted by `path'. */
virtual StorePathSet queryDerivationOutputs(const StorePath & path) virtual StorePathSet queryDerivationOutputs(const StorePath & path);
{ unsupported("queryDerivationOutputs"); }
/* Query the mapping outputName=>outputPath for the given derivation */
virtual OutputPathMap queryDerivationOutputMap(const StorePath & path)
{ unsupported("queryDerivationOutputMap"); }
/* Query the full store path given the hash part of a valid store /* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */ path, or empty if the path doesn't exist. */
@ -838,12 +831,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
std::istream & str, std::istream & str,
bool hashGiven = false); bool hashGiven = false);
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
/* Split URI into protocol+hierarchy part and its parameter set. */ /* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri); std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x115 #define PROTOCOL_VERSION 0x116
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -30,7 +30,7 @@ typedef enum {
wopSetOptions = 19, wopSetOptions = 19,
wopCollectGarbage = 20, wopCollectGarbage = 20,
wopQuerySubstitutablePathInfo = 21, wopQuerySubstitutablePathInfo = 21,
wopQueryDerivationOutputs = 22, wopQueryDerivationOutputs = 22, // obsolete
wopQueryAllValidPaths = 23, wopQueryAllValidPaths = 23,
wopQueryFailedPaths = 24, wopQueryFailedPaths = 24,
wopClearFailedPaths = 25, wopClearFailedPaths = 25,
@ -49,6 +49,7 @@ typedef enum {
wopNarFromPath = 38, wopNarFromPath = 38,
wopAddToStoreNar = 39, wopAddToStoreNar = 39,
wopQueryMissing = 40, wopQueryMissing = 40,
wopQueryDerivationOutputMap = 41,
} WorkerOp; } WorkerOp;
@ -69,5 +70,6 @@ template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths); void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
} }

View file

@ -262,7 +262,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
names[name] = 0; names[name] = 0;
} }
} else if (s == "node") { } else if (s == "node") {
if (s.empty()) throw badArchive("entry name missing"); if (name.empty()) throw badArchive("entry name missing");
parse(sink, source, path + "/" + name); parse(sink, source, path + "/" + name);
} else } else
throw badArchive("unknown field " + s); throw badArchive("unknown field " + s);

View file

@ -162,8 +162,18 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
.labels = {"hash-algo"}, .labels = {"hash-algo"},
.handler = {[ht](std::string s) { .handler = {[ht](std::string s) {
*ht = parseHashType(s); *ht = parseHashType(s);
if (*ht == htUnknown) }}
throw UsageError("unknown hash type '%1%'", s); };
}
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
{
return Flag {
.longName = std::move(longName),
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
.labels = {"hash-algo"},
.handler = {[oht](std::string s) {
*oht = std::optional<HashType> { parseHashType(s) };
}} }}
}; };
} }

View file

@ -85,6 +85,7 @@ protected:
Handler handler; Handler handler;
static Flag mkHashTypeFlag(std::string && longName, HashType * ht); static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
}; };
std::map<std::string, Flag::ptr> longFlags; std::map<std::string, Flag::ptr> longFlags;

View file

@ -7,14 +7,11 @@
namespace nix { namespace nix {
const std::string nativeSystem = SYSTEM; const std::string nativeSystem = SYSTEM;
// addPrefix is used for show-trace. Strings added with addPrefix BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
// will print ahead of the error itself.
BaseError & BaseError::addPrefix(const FormatOrString & fs)
{ {
prefix_ = fs.s + prefix_; err.traces.push_front(Trace { .pos = e, .hint = hint});
return *this; return *this;
} }
@ -28,7 +25,7 @@ const string& BaseError::calcWhat() const
err.name = sname(); err.name = sname();
std::ostringstream oss; std::ostringstream oss;
oss << err; showErrorInfo(oss, err, false);
what_ = oss.str(); what_ = oss.str();
return *what_; return *what_;
@ -56,28 +53,114 @@ string showErrPos(const ErrPos &errPos)
} }
} }
// if nixCode contains lines of code, print them to the ostream, indicating the error column. std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixCode) {
if (errPos.line <= 0)
return std::nullopt;
if (errPos.origin == foFile) {
LinesOfCode loc;
try {
AutoCloseFD fd = open(errPos.file.c_str(), O_RDONLY | O_CLOEXEC);
if (!fd) {
logError(SysError("opening file '%1%'", errPos.file).info());
return std::nullopt;
}
else
{
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
do
{
line = readLine(fd.get());
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
} while (true);
return loc;
}
}
catch (EndOfFile &eof) {
if (loc.errLineOfCode.has_value())
return loc;
else
return std::nullopt;
}
catch (std::exception &e) {
printError("error reading nix file: %s\n%s", errPos.file, e.what());
return std::nullopt;
}
} else {
std::istringstream iss(errPos.file);
// count the newlines.
int count = 0;
string line;
int pl = errPos.line - 1;
LinesOfCode loc;
do
{
std::getline(iss, line);
++count;
if (count < pl)
{
;
}
else if (count == pl) {
loc.prevLineOfCode = line;
} else if (count == pl + 1) {
loc.errLineOfCode = line;
} else if (count == pl + 2) {
loc.nextLineOfCode = line;
break;
}
if (!iss.good())
break;
} while (true);
return loc;
}
}
// print lines of code to the ostream, indicating the error column.
void printCodeLines(std::ostream &out,
const string &prefix,
const ErrPos &errPos,
const LinesOfCode &loc)
{ {
// previous line of code. // previous line of code.
if (nixCode.prevLineOfCode.has_value()) { if (loc.prevLineOfCode.has_value()) {
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line - 1), (errPos.line - 1),
*nixCode.prevLineOfCode); *loc.prevLineOfCode);
} }
if (nixCode.errLineOfCode.has_value()) { if (loc.errLineOfCode.has_value()) {
// line of code containing the error. // line of code containing the error.
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line), (errPos.line),
*nixCode.errLineOfCode); *loc.errLineOfCode);
// error arrows for the column range. // error arrows for the column range.
if (nixCode.errPos.column > 0) { if (errPos.column > 0) {
int start = nixCode.errPos.column; int start = errPos.column;
std::string spaces; std::string spaces;
for (int i = 0; i < start; ++i) { for (int i = 0; i < start; ++i) {
spaces.append(" "); spaces.append(" ");
@ -94,16 +177,42 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
} }
// next line of code. // next line of code.
if (nixCode.nextLineOfCode.has_value()) { if (loc.nextLineOfCode.has_value()) {
out << std::endl out << std::endl
<< fmt("%1% %|2$5d|| %3%", << fmt("%1% %|2$5d|| %3%",
prefix, prefix,
(nixCode.errPos.line + 1), (errPos.line + 1),
*nixCode.nextLineOfCode); *loc.nextLineOfCode);
} }
} }
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo) void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
{
if (pos)
{
switch (pos.origin) {
case foFile: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " in file: " << ANSI_NORMAL << pos.file;
break;
}
case foString: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from string" << ANSI_NORMAL;
break;
}
case foStdin: {
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
ANSI_BLUE << " from stdin" << ANSI_NORMAL;
break;
}
default:
throw Error("invalid FileOrigin in errPos");
}
}
}
std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace)
{ {
auto errwidth = std::max<size_t>(getWindowSize().second, 20); auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = ""; string prefix = "";
@ -158,8 +267,12 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
} }
} }
auto ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length(); auto ndl = prefix.length()
auto dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl; + filterANSIEscapes(levelString, true).length()
+ 7
+ einfo.name.length()
+ einfo.programName.value_or("").length();
auto dashwidth = std::max<int>(errwidth - ndl, 3);
std::string dashes(dashwidth, '-'); std::string dashes(dashwidth, '-');
@ -179,16 +292,9 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
einfo.programName.value_or("")); einfo.programName.value_or(""));
bool nl = false; // intersperse newline between sections. bool nl = false; // intersperse newline between sections.
if (einfo.nixCode.has_value()) { if (einfo.errPos.has_value() && (*einfo.errPos)) {
if (einfo.nixCode->errPos.file != "") { out << prefix << std::endl;
// filename, line, column. printAtPos(prefix, *einfo.errPos, out);
out << std::endl << fmt("%1%in file: " ANSI_BLUE "%2% %3%" ANSI_NORMAL,
prefix,
einfo.nixCode->errPos.file,
showErrPos(einfo.nixCode->errPos));
} else {
out << std::endl << fmt("%1%from command line argument", prefix);
}
nl = true; nl = true;
} }
@ -200,13 +306,17 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true; nl = true;
} }
if (einfo.errPos.has_value() && (*einfo.errPos)) {
auto loc = getCodeLines(*einfo.errPos);
// lines of code. // lines of code.
if (einfo.nixCode.has_value() && einfo.nixCode->errLineOfCode.has_value()) { if (loc.has_value()) {
if (nl) if (nl)
out << std::endl << prefix; out << std::endl << prefix;
printCodeLines(out, prefix, *einfo.nixCode); printCodeLines(out, prefix, *einfo.errPos, *loc);
nl = true; nl = true;
} }
}
// hint // hint
if (einfo.hint.has_value()) { if (einfo.hint.has_value()) {
@ -216,6 +326,59 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
nl = true; nl = true;
} }
// traces
if (showTrace && !einfo.traces.empty())
{
const string tracetitle(" show-trace ");
int fill = errwidth - tracetitle.length();
int lw = 0;
int rw = 0;
const int min_dashes = 3;
if (fill > min_dashes * 2) {
if (fill % 2 != 0) {
lw = fill / 2;
rw = lw + 1;
}
else
{
lw = rw = fill / 2;
}
}
else
lw = rw = min_dashes;
if (nl)
out << std::endl << prefix;
out << ANSI_BLUE << std::string(lw, '-') << tracetitle << std::string(rw, '-') << ANSI_NORMAL;
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
{
try {
out << std::endl << prefix;
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
nl = true;
if (*iter->pos) {
auto pos = iter->pos.value();
out << std::endl << prefix;
printAtPos(prefix, pos, out);
auto loc = getCodeLines(pos);
if (loc.has_value())
{
out << std::endl << prefix;
printCodeLines(out, prefix, pos, *loc);
out << std::endl << prefix;
}
}
} catch(const std::bad_optional_access& e) {
out << iter->hint.str() << std::endl;
}
}
}
return out; return out;
} }
} }

View file

@ -25,20 +25,20 @@ namespace nix {
/* /*
This file defines two main structs/classes used in nix error handling. This file defines two main structs/classes used in nix error handling.
ErrorInfo provides a standard payload of error information, with conversion to string ErrorInfo provides a standard payload of error information, with conversion to string
happening in the logger rather than at the call site. happening in the logger rather than at the call site.
BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
an ErrorInfo. an ErrorInfo.
ErrorInfo structs are sent to the logger as part of an exception, or directly with the ErrorInfo structs are sent to the logger as part of an exception, or directly with the
logError or logWarning macros. logError or logWarning macros.
See the error-demo.cc program for usage examples. See the error-demo.cc program for usage examples.
*/ */
typedef enum { typedef enum {
lvlError = 0, lvlError = 0,
@ -50,11 +50,25 @@ typedef enum {
lvlVomit lvlVomit
} Verbosity; } Verbosity;
typedef enum {
foFile,
foStdin,
foString
} FileOrigin;
// the lines of code surrounding an error.
struct LinesOfCode {
std::optional<string> prevLineOfCode;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
};
// ErrPos indicates the location of an error in a nix file. // ErrPos indicates the location of an error in a nix file.
struct ErrPos { struct ErrPos {
int line = 0; int line = 0;
int column = 0; int column = 0;
string file; string file;
FileOrigin origin;
operator bool() const operator bool() const
{ {
@ -65,9 +79,14 @@ struct ErrPos {
template <class P> template <class P>
ErrPos& operator=(const P &pos) ErrPos& operator=(const P &pos)
{ {
origin = pos.origin;
line = pos.line; line = pos.line;
column = pos.column; column = pos.column;
// is file symbol null?
if (pos.file.set())
file = pos.file; file = pos.file;
else
file = "";
return *this; return *this;
} }
@ -78,11 +97,9 @@ struct ErrPos {
} }
}; };
struct NixCode { struct Trace {
ErrPos errPos; std::optional<ErrPos> pos;
std::optional<string> prevLineOfCode; hintformat hint;
std::optional<string> errLineOfCode;
std::optional<string> nextLineOfCode;
}; };
struct ErrorInfo { struct ErrorInfo {
@ -90,19 +107,19 @@ struct ErrorInfo {
string name; string name;
string description; string description;
std::optional<hintformat> hint; std::optional<hintformat> hint;
std::optional<NixCode> nixCode; std::optional<ErrPos> errPos;
std::list<Trace> traces;
static std::optional<string> programName; static std::optional<string> programName;
}; };
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo); std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace);
/* BaseError should generally not be caught, as it has Interrupted as /* BaseError should generally not be caught, as it has Interrupted as
a subclass. Catch Error instead. */ a subclass. Catch Error instead. */
class BaseError : public std::exception class BaseError : public std::exception
{ {
protected: protected:
string prefix_; // used for location traces etc.
mutable ErrorInfo err; mutable ErrorInfo err;
mutable std::optional<string> what_; mutable std::optional<string> what_;
@ -113,7 +130,7 @@ public:
template<typename... Args> template<typename... Args>
BaseError(unsigned int status, const Args & ... args) BaseError(unsigned int status, const Args & ... args)
: err { .level = lvlError, : err {.level = lvlError,
.hint = hintfmt(args...) .hint = hintfmt(args...)
} }
, status(status) , status(status)
@ -121,13 +138,13 @@ public:
template<typename... Args> template<typename... Args>
BaseError(const std::string & fs, const Args & ... args) BaseError(const std::string & fs, const Args & ... args)
: err { .level = lvlError, : err {.level = lvlError,
.hint = hintfmt(fs, args...) .hint = hintfmt(fs, args...)
} }
{ } { }
BaseError(hintformat hint) BaseError(hintformat hint)
: err { .level = lvlError, : err {.level = lvlError,
.hint = hint .hint = hint
} }
{ } { }
@ -150,10 +167,17 @@ public:
#endif #endif
const string & msg() const { return calcWhat(); } const string & msg() const { return calcWhat(); }
const string & prefix() const { return prefix_; }
BaseError & addPrefix(const FormatOrString & fs);
const ErrorInfo & info() { calcWhat(); return err; } const ErrorInfo & info() { calcWhat(); return err; }
template<typename... Args>
BaseError & addTrace(std::optional<ErrPos> e, const string &fs, const Args & ... args)
{
return addTrace(e, hintfmt(fs, args...));
}
BaseError & addTrace(std::optional<ErrPos> e, hintformat hint);
bool hasTrace() const { return !err.traces.empty(); }
}; };
#define MakeError(newClass, superClass) \ #define MakeError(newClass, superClass) \

View file

@ -1,6 +1,7 @@
#pragma once #pragma once
#include <boost/format.hpp> #include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string> #include <string>
#include "ansicolor.hh" #include "ansicolor.hh"
@ -103,7 +104,9 @@ class hintformat
public: public:
hintformat(const string &format) :fmt(format) hintformat(const string &format) :fmt(format)
{ {
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
} }
hintformat(const hintformat &hf) hintformat(const hintformat &hf)
@ -117,6 +120,13 @@ public:
return *this; return *this;
} }
template<class T>
hintformat& operator%(const normaltxt<T> &value)
{
fmt % value.value;
return *this;
}
std::string str() const std::string str() const
{ {
return fmt.str(); return fmt.str();
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f; return f;
} }
inline hintformat hintfmt(std::string plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
}
} }

View file

@ -4,6 +4,7 @@
#include <openssl/md5.h> #include <openssl/md5.h>
#include <openssl/sha.h> #include <openssl/sha.h>
#include "args.hh"
#include "hash.hh" #include "hash.hh"
#include "archive.hh" #include "archive.hh"
#include "util.hh" #include "util.hh"
@ -18,11 +19,13 @@ namespace nix {
void Hash::init() void Hash::init()
{ {
if (type == htMD5) hashSize = md5HashSize; assert(type);
else if (type == htSHA1) hashSize = sha1HashSize; switch (*type) {
else if (type == htSHA256) hashSize = sha256HashSize; case htMD5: hashSize = md5HashSize; break;
else if (type == htSHA512) hashSize = sha512HashSize; case htSHA1: hashSize = sha1HashSize; break;
else abort(); case htSHA256: hashSize = sha256HashSize; break;
case htSHA512: hashSize = sha512HashSize; break;
}
assert(hashSize <= maxHashSize); assert(hashSize <= maxHashSize);
memset(hash, 0, maxHashSize); memset(hash, 0, maxHashSize);
} }
@ -98,15 +101,22 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash) string printHash16or32(const Hash & hash)
{ {
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false); return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
} }
HashType assertInitHashType(const Hash & h)
{
assert(h.type);
return *h.type;
}
std::string Hash::to_string(Base base, bool includeType) const std::string Hash::to_string(Base base, bool includeType) const
{ {
std::string s; std::string s;
if (base == SRI || includeType) { if (base == SRI || includeType) {
s += printHashType(type); s += printHashType(assertInitHashType(*this));
s += base == SRI ? '-' : ':'; s += base == SRI ? '-' : ':';
} }
switch (base) { switch (base) {
@ -124,8 +134,10 @@ std::string Hash::to_string(Base base, bool includeType) const
return s; return s;
} }
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
Hash::Hash(std::string_view s, HashType type) Hash::Hash(std::string_view s, std::optional<HashType> type)
: type(type) : type(type)
{ {
size_t pos = 0; size_t pos = 0;
@ -136,17 +148,17 @@ Hash::Hash(std::string_view s, HashType type)
sep = s.find('-'); sep = s.find('-');
if (sep != string::npos) { if (sep != string::npos) {
isSRI = true; isSRI = true;
} else if (type == htUnknown) } else if (! type)
throw BadHash("hash '%s' does not include a type", s); throw BadHash("hash '%s' does not include a type", s);
} }
if (sep != string::npos) { if (sep != string::npos) {
string hts = string(s, 0, sep); string hts = string(s, 0, sep);
this->type = parseHashType(hts); this->type = parseHashType(hts);
if (this->type == htUnknown) if (!this->type)
throw BadHash("unknown hash type '%s'", hts); throw BadHash("unknown hash type '%s'", hts);
if (type != htUnknown && type != this->type) if (type && type != this->type)
throw BadHash("hash '%s' should have type '%s'", s, printHashType(type)); throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type));
pos = sep + 1; pos = sep + 1;
} }
@ -202,13 +214,15 @@ Hash::Hash(std::string_view s, HashType type)
} }
else else
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(type)); throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type));
} }
Hash newHashAllowEmpty(std::string hashStr, HashType ht) Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
{ {
if (hashStr.empty()) { if (hashStr.empty()) {
Hash h(ht); if (!ht)
throw BadHash("empty hash requires explicit hash type");
Hash h(*ht);
warn("found empty hash, assuming '%s'", h.to_string(SRI, true)); warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h; return h;
} else } else
@ -328,24 +342,36 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
} }
HashType parseHashType(const string & s) std::optional<HashType> parseHashTypeOpt(const string & s)
{ {
if (s == "md5") return htMD5; if (s == "md5") return htMD5;
else if (s == "sha1") return htSHA1; else if (s == "sha1") return htSHA1;
else if (s == "sha256") return htSHA256; else if (s == "sha256") return htSHA256;
else if (s == "sha512") return htSHA512; else if (s == "sha512") return htSHA512;
else return htUnknown; else return std::optional<HashType> {};
} }
HashType parseHashType(const string & s)
{
auto opt_h = parseHashTypeOpt(s);
if (opt_h)
return *opt_h;
else
throw UsageError("unknown hash algorithm '%1%'", s);
}
string printHashType(HashType ht) string printHashType(HashType ht)
{ {
if (ht == htMD5) return "md5"; switch (ht) {
else if (ht == htSHA1) return "sha1"; case htMD5: return "md5";
else if (ht == htSHA256) return "sha256"; case htSHA1: return "sha1";
else if (ht == htSHA512) return "sha512"; case htSHA256: return "sha256";
else abort(); case htSHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
assert(false);
}
} }
} }

View file

@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error); MakeError(BadHash, Error);
enum HashType : char { htUnknown, htMD5, htSHA1, htSHA256, htSHA512 }; enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16; const int md5HashSize = 16;
@ -29,7 +29,7 @@ struct Hash
unsigned int hashSize = 0; unsigned int hashSize = 0;
unsigned char hash[maxHashSize] = {}; unsigned char hash[maxHashSize] = {};
HashType type = htUnknown; std::optional<HashType> type = {};
/* Create an unset hash object. */ /* Create an unset hash object. */
Hash() { }; Hash() { };
@ -40,14 +40,18 @@ struct Hash
/* Initialize the hash from a string representation, in the format /* Initialize the hash from a string representation, in the format
"[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a "[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
Subresource Integrity hash expression). If the 'type' argument Subresource Integrity hash expression). If the 'type' argument
is htUnknown, then the hash type must be specified in the is not present, then the hash type must be specified in the
string. */ string. */
Hash(std::string_view s, HashType type = htUnknown); Hash(std::string_view s, std::optional<HashType> type);
// type must be provided
Hash(std::string_view s, HashType type);
// hash type must be part of string
Hash(std::string_view s);
void init(); void init();
/* Check whether a hash is set. */ /* Check whether a hash is set. */
operator bool () const { return type != htUnknown; } operator bool () const { return (bool) type; }
/* Check whether two hash are equal. */ /* Check whether two hash are equal. */
bool operator == (const Hash & h2) const; bool operator == (const Hash & h2) const;
@ -95,7 +99,7 @@ struct Hash
}; };
/* Helper that defaults empty hashes to the 0 hash. */ /* Helper that defaults empty hashes to the 0 hash. */
Hash newHashAllowEmpty(std::string hashStr, HashType ht); Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
/* Print a hash in base-16 if it's MD5, or base-32 otherwise. */ /* Print a hash in base-16 if it's MD5, or base-32 otherwise. */
string printHash16or32(const Hash & hash); string printHash16or32(const Hash & hash);
@ -118,6 +122,8 @@ Hash compressHash(const Hash & hash, unsigned int newSize);
/* Parse a string representing a hash type. */ /* Parse a string representing a hash type. */
HashType parseHashType(const string & s); HashType parseHashType(const string & s);
/* Will return nothing on parse error */
std::optional<HashType> parseHashTypeOpt(const string & s);
/* And the reverse. */ /* And the reverse. */
string printHashType(HashType ht); string printHashType(HashType ht);

View file

@ -1,5 +1,6 @@
#include "logging.hh" #include "logging.hh"
#include "util.hh" #include "util.hh"
#include "config.hh"
#include <atomic> #include <atomic>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -7,6 +8,10 @@
namespace nix { namespace nix {
LoggerSettings loggerSettings;
static GlobalConfig::Register r1(&loggerSettings);
static thread_local ActivityId curActivity = 0; static thread_local ActivityId curActivity = 0;
ActivityId getCurActivity() ActivityId getCurActivity()
@ -72,11 +77,12 @@ public:
void logEI(const ErrorInfo & ei) override void logEI(const ErrorInfo & ei) override
{ {
std::stringstream oss; std::stringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
log(ei.level, oss.str()); log(ei.level, oss.str());
} }
void startActivity(ActivityId act, Verbosity lvl, ActivityType type, void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
const std::string & s, const Fields & fields, ActivityId parent) const std::string & s, const Fields & fields, ActivityId parent)
override override
@ -173,7 +179,7 @@ struct JSONLogger : Logger {
void logEI(const ErrorInfo & ei) override void logEI(const ErrorInfo & ei) override
{ {
std::ostringstream oss; std::ostringstream oss;
oss << ei; showErrorInfo(oss, ei, loggerSettings.showTrace.get());
nlohmann::json json; nlohmann::json json;
json["action"] = "msg"; json["action"] = "msg";

View file

@ -2,6 +2,7 @@
#include "types.hh" #include "types.hh"
#include "error.hh" #include "error.hh"
#include "config.hh"
namespace nix { namespace nix {
@ -34,6 +35,16 @@ typedef enum {
typedef uint64_t ActivityId; typedef uint64_t ActivityId;
struct LoggerSettings : Config
{
Setting<bool> showTrace{this,
false,
"show-trace",
"Whether to show a stack trace on evaluation errors."};
};
extern LoggerSettings loggerSettings;
class Logger class Logger
{ {
friend struct Activity; friend struct Activity;

View file

@ -0,0 +1,78 @@
#include "compression.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* compress / decompress
* --------------------------------------------------------------------------*/
TEST(compress, compressWithUnknownMethod) {
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
}
TEST(compress, noneMethodDoesNothingToTheInput) {
ref<std::string> o = compress("none", "this-is-a-test");
ASSERT_EQ(*o, "this-is-a-test");
}
TEST(decompress, decompressXzCompressed) {
auto method = "xz";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressBzip2Compressed) {
auto method = "bzip2";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressBrCompressed) {
auto method = "br";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
ASSERT_EQ(*o, str);
}
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
auto method = "bzip2";
auto str = "this is a string that does not qualify as valid bzip2 data";
ASSERT_THROW(decompress(method, str), CompressionError);
}
/* ----------------------------------------------------------------------------
* compression sinks
* --------------------------------------------------------------------------*/
TEST(makeCompressionSink, noneSinkDoesNothingToInput) {
StringSink strSink;
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
auto sink = makeCompressionSink("none", strSink);
(*sink)(inputString);
sink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
}
TEST(makeCompressionSink, compressAndDecompress) {
StringSink strSink;
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
auto decompressionSink = makeDecompressionSink("bzip2", strSink);
auto sink = makeCompressionSink("bzip2", *decompressionSink);
(*sink)(inputString);
sink->finish();
decompressionSink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
}
}

View file

@ -72,9 +72,4 @@ namespace nix {
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
"c7d329eeb6dd26545e96e55b874be909"); "c7d329eeb6dd26545e96e55b874be909");
} }
TEST(hashString, hashingWithUnknownAlgoExits) {
auto s = "unknown";
ASSERT_DEATH(hashString(HashType::htUnknown, s), "");
}
} }

View file

@ -1,6 +1,7 @@
#include "logging.hh" #include "logging.hh"
#include "nixexpr.hh" #include "nixexpr.hh"
#include "util.hh" #include "util.hh"
#include <fstream>
#include <gtest/gtest.h> #include <gtest/gtest.h>
@ -10,6 +11,13 @@ namespace nix {
* logEI * logEI
* --------------------------------------------------------------------------*/ * --------------------------------------------------------------------------*/
const char *test_file =
"previous line of code\n"
"this is the problem line of code\n"
"next line of code\n";
const char *one_liner =
"this is the other problem line of code";
TEST(logEI, catpuresBasicProperties) { TEST(logEI, catpuresBasicProperties) {
MakeError(TestError, Error); MakeError(TestError, Error);
@ -42,7 +50,7 @@ namespace nix {
logger->logEI(ei); logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
} }
} }
@ -60,8 +68,7 @@ namespace nix {
logError(e.info()); logError(e.info());
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
} }
} }
@ -137,7 +144,6 @@ namespace nix {
* logError * logError
* --------------------------------------------------------------------------*/ * --------------------------------------------------------------------------*/
TEST(logError, logErrorWithoutHintOrCode) { TEST(logError, logErrorWithoutHintOrCode) {
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -152,7 +158,7 @@ namespace nix {
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) { TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -162,21 +168,16 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foString, problem_file, 02, 13),
.errPos = Pos(problem_file, 40, 13), });
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
} }
TEST(logError, logErrorWithoutLinesOfCode) { TEST(logError, logErrorWithInvalidFile) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create("invalid filename");
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logError({ logError({
@ -185,28 +186,23 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foFile, problem_file, 02, 13)
.errPos = Pos(problem_file, 40, 13) });
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nopening file '\x1B[33;1minvalid filename\x1B[0m': \x1B[33;1mNo such file or directory\x1B[0m\n\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
} }
TEST(logError, logErrorWithOnlyHintAndName) { TEST(logError, logErrorWithOnlyHintAndName) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
logError({ logError({
.name = "error name", .name = "error name",
.hint = hintfmt("hint %1%", "only"), .hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode { });
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
} }
@ -219,18 +215,18 @@ namespace nix {
logWarning({ logWarning({
.name = "name", .name = "name",
.description = "error description", .description = "warning description",
.hint = hintfmt("there was a %1%", "warning"), .hint = hintfmt("there was a %1%", "warning"),
}); });
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
} }
TEST(logWarning, logWarningWithFileLineNumAndCode) { TEST(logWarning, logWarningWithFileLineNumAndCode) {
SymbolTable testTable; SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix"); auto problem_file = testTable.create(test_file);
testing::internal::CaptureStderr(); testing::internal::CaptureStderr();
@ -240,16 +236,123 @@ namespace nix {
.hint = hintfmt("this hint has %1% templated %2%!!", .hint = hintfmt("this hint has %1% templated %2%!!",
"yellow", "yellow",
"values"), "values"),
.nixCode = NixCode { .errPos = Pos(foStdin, problem_file, 2, 13),
.errPos = Pos(problem_file, 40, 13), });
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
auto str = testing::internal::GetCapturedStderr(); auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n"); ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
/* ----------------------------------------------------------------------------
* traces
* --------------------------------------------------------------------------*/
TEST(addTrace, showTracesWithShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(true);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n");
}
TEST(addTrace, hideTracesWithoutShowTrace) {
SymbolTable testTable;
auto problem_file = testTable.create(test_file);
auto oneliner_file = testTable.create(one_liner);
auto e = AssertionError(ErrorInfo {
.name = "wat",
.description = "a well-known problem occurred",
.hint = hintfmt("it has been %1% days since our last error", "zero"),
.errPos = Pos(foString, problem_file, 2, 13),
});
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
testing::internal::CaptureStderr();
loggerSettings.showTrace.assign(false);
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\na well-known problem occurred\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
}
/* ----------------------------------------------------------------------------
* hintfmt
* --------------------------------------------------------------------------*/
TEST(hintfmt, percentStringWithoutArgs) {
const char *teststr = "this is 100%s correct!";
ASSERT_STREQ(
hintfmt(teststr).str().c_str(),
teststr);
}
TEST(hintfmt, fmtToHintfmt) {
ASSERT_STREQ(
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
"the color of this this text is not yellow");
}
TEST(hintfmt, tooFewArguments) {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
}
TEST(hintfmt, tooManyArguments) {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
/* ----------------------------------------------------------------------------
* ErrPos
* --------------------------------------------------------------------------*/
TEST(errpos, invalidPos) {
// contains an invalid symbol, which we should not dereference!
Pos invalid;
// constructing without access violation.
ErrPos ep(invalid);
// assignment without access violation.
ep = invalid;
} }
} }

View file

@ -593,7 +593,7 @@ static void upgradeDerivations(Globals & globals,
} else newElems.push_back(i); } else newElems.push_back(i);
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while trying to find an upgrade for '%s':\n", i.queryName())); e.addTrace(std::nullopt, "while trying to find an upgrade for '%s'", i.queryName());
throw; throw;
} }
} }
@ -1185,7 +1185,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
} catch (AssertionError & e) { } catch (AssertionError & e) {
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName()); printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
} catch (Error & e) { } catch (Error & e) {
e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName())); e.addTrace(std::nullopt, "while querying the derivation named '%1%'", i.queryName());
throw; throw;
} }
} }

View file

@ -72,8 +72,6 @@ static int _main(int argc, char * * argv)
else if (*arg == "--type") { else if (*arg == "--type") {
string s = getArg(*arg, arg, end); string s = getArg(*arg, arg, end);
ht = parseHashType(s); ht = parseHashType(s);
if (ht == htUnknown)
throw UsageError("unknown hash type '%1%'", s);
} }
else if (*arg == "--print-path") else if (*arg == "--print-path")
printPath = true; printPath = true;

View file

@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i); auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path); auto info = store->queryPathInfo(path);
HashSink sink(info->narHash.type); HashSink sink(*info->narHash.type);
store->narFromPath(path, sink); store->narFromPath(path, sink);
auto current = sink.finish(); auto current = sink.finish();
if (current.first != info->narHash) { if (current.first != info->narHash) {
@ -864,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << info->ca << info->sigs; out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
} catch (InvalidPath &) { } catch (InvalidPath &) {
} }
} }
@ -952,7 +952,7 @@ static void opServe(Strings opFlags, Strings opArgs)
info.references = readStorePaths<StorePathSet>(*store, in); info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate; in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in); info.sigs = readStrings<StringSet>(in);
in >> info.ca; info.ca = parseContentAddressOpt(readString(in));
if (info.narSize == 0) if (info.narSize == 0)
throw Error("narInfo is too old and missing the narSize field"); throw Error("narInfo is too old and missing the narSize field");

View file

@ -48,7 +48,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart)); ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash); info.ca = std::optional { FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
} };
if (!dryRun) { if (!dryRun) {
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };

View file

@ -50,7 +50,7 @@ BuildEnvironment readEnvironment(const Path & path)
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re"; R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
static std::string indexedArrayRegex = static std::string indexedArrayRegex =
R"re((?:\(( *\[[0-9]+]="(?:[^"\\]|\\.)*")**\)))re"; R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
static std::regex varRegex( static std::regex varRegex(
"^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n"); "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + indexedArrayRegex + ")\n");
@ -135,7 +135,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.inputSrcs.insert(std::move(getEnvShPath)); drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true); Hash h = hashDerivationModulo(*store, drv, true);
auto shellOutPath = store->makeOutputPath("out", h, drvName); auto shellOutPath = store->makeOutputPath("out", h, drvName);
drv.outputs.insert_or_assign("out", DerivationOutput { shellOutPath, "", "" }); drv.outputs.insert_or_assign("out", DerivationOutput { .path = shellOutPath });
drv.env["out"] = store->printStorePath(shellOutPath); drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName); auto shellDrvPath2 = writeDerivation(store, drv, drvName);

View file

@ -1,5 +1,6 @@
#include "command.hh" #include "command.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh"
#include "legacy.hh" #include "legacy.hh"
#include "shared.hh" #include "shared.hh"
#include "references.hh" #include "references.hh"
@ -79,12 +80,12 @@ static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(FileIngest
struct CmdToBase : Command struct CmdToBase : Command
{ {
Base base; Base base;
HashType ht = htUnknown; std::optional<HashType> ht;
std::vector<std::string> args; std::vector<std::string> args;
CmdToBase(Base base) : base(base) CmdToBase(Base base) : base(base)
{ {
addFlag(Flag::mkHashTypeFlag("type", &ht)); addFlag(Flag::mkHashTypeOptFlag("type", &ht));
expectArgs("strings", &args); expectArgs("strings", &args);
} }
@ -132,8 +133,6 @@ static int compatNixHash(int argc, char * * argv)
else if (*arg == "--type") { else if (*arg == "--type") {
string s = getArg(*arg, arg, end); string s = getArg(*arg, arg, end);
ht = parseHashType(s); ht = parseHashType(s);
if (ht == htUnknown)
throw UsageError("unknown hash type '%1%'", s);
} }
else if (*arg == "--to-base16") op = opTo16; else if (*arg == "--to-base16") op = opTo16;
else if (*arg == "--to-base32") op = opTo32; else if (*arg == "--to-base32") op = opTo32;

View file

@ -82,7 +82,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
if (hasSelfReference) info.references.insert(info.path); if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash; info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash); info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
};
if (!json) if (!json)
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path)); printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));

View file

@ -115,7 +115,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
std::cout << '\t'; std::cout << '\t';
Strings ss; Strings ss;
if (info->ultimate) ss.push_back("ultimate"); if (info->ultimate) ss.push_back("ultimate");
if (info->ca != "") ss.push_back("ca:" + info->ca); if (info->ca) ss.push_back("ca:" + renderContentAddress(*info->ca));
for (auto & sig : info->sigs) ss.push_back(sig); for (auto & sig : info->sigs) ss.push_back(sig);
std::cout << concatStringsSep(" ", ss); std::cout << concatStringsSep(" ", ss);
} }

View file

@ -211,12 +211,12 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
// input without clearing the input so far. // input without clearing the input so far.
continue; continue;
} else { } else {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} }
} catch (Error & e) { } catch (Error & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} catch (Interrupted & e) { } catch (Interrupted & e) {
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg()); printMsg(lvlError, e.msg());
} }
// We handled the current input fully, so we should clear it // We handled the current input fully, so we should clear it

View file

@ -216,7 +216,7 @@ struct CmdSearch : SourceExprCommand, MixJSON
} catch (AssertionError & e) { } catch (AssertionError & e) {
} catch (Error & e) { } catch (Error & e) {
if (!toplevel) { if (!toplevel) {
e.addPrefix(fmt("While evaluating the attribute '%s':\n", attrPath)); e.addTrace(std::nullopt, "While evaluating the attribute '%s'", attrPath);
throw; throw;
} }
} }

View file

@ -70,9 +70,9 @@ struct CmdShowDerivation : InstallablesCommand
for (auto & output : drv.outputs) { for (auto & output : drv.outputs) {
auto outputObj(outputsObj.object(output.first)); auto outputObj(outputsObj.object(output.first));
outputObj.attr("path", store->printStorePath(output.second.path)); outputObj.attr("path", store->printStorePath(output.second.path));
if (output.second.hash != "") { if (output.second.hash) {
outputObj.attr("hashAlgo", output.second.hashAlgo); outputObj.attr("hashAlgo", output.second.hash->printMethodAlgo());
outputObj.attr("hash", output.second.hash); outputObj.attr("hash", output.second.hash->hash.to_string(Base16, false));
} }
} }
} }

View file

@ -87,10 +87,10 @@ struct CmdVerify : StorePathsCommand
if (!noContents) { if (!noContents) {
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (info->ca == "") if (!info->ca)
hashSink = std::make_unique<HashSink>(info->narHash.type); hashSink = std::make_unique<HashSink>(*info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink); store->narFromPath(info->path, *hashSink);

View file

@ -1,23 +1,39 @@
{ busybox }:
with import ./config.nix; with import ./config.nix;
let let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation { input1 = mkDerivation {
name = "build-hook-input-1"; shell = busybox;
buildCommand = "mkdir $out; echo FOO > $out/foo"; name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"]; requiredSystemFeatures = ["foo"];
}; };
input2 = mkDerivation { input2 = mkDerivation {
name = "build-hook-input-2"; shell = busybox;
buildCommand = "mkdir $out; echo BAR > $out/bar"; name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
}; };
in in
mkDerivation { mkDerivation {
name = "build-hook"; shell = busybox;
builder = ./dependencies.builder0.sh; name = "build-remote";
input1 = " " + input1 + "/."; buildCommand =
input2 = " ${input2}/."; ''
read x < ${input1}
read y < ${input2}
echo $x$y > $out
'';
} }

View file

@ -3,22 +3,29 @@ source common.sh
clearStore clearStore
if ! canUseSandbox; then exit; fi if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/store1 || true chmod -R u+w $TEST_ROOT/machine1 || true
rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1 chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ unset NIX_STORE_DIR
--sandbox-paths /nix/store --sandbox-build-dir /build-tmp \ unset NIX_STATE_DIR
--builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo --system-features foo
outPath=$TEST_ROOT/result outPath=$(readlink -f $TEST_ROOT/result)
cat $outPath/foobar | grep FOOBAR cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
# Ensure that input1 was built on store1 due to the required feature. # Ensure that input1 was built on store2 due to the required feature.
p=$(readlink -f $outPath/input-2) (! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh) nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh

View file

@ -1,6 +1,6 @@
set -e set -e
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test) export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/${TEST_NAME:-default}
export NIX_STORE_DIR export NIX_STORE_DIR
if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then if ! NIX_STORE_DIR=$(readlink -f $TEST_ROOT/store 2> /dev/null); then
# Maybe the build directory is symlinked. # Maybe the build directory is symlinked.
@ -11,6 +11,7 @@ export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_CONF_DIR=$TEST_ROOT/etc export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_DAEMON_SOCKET_PATH=$TEST_ROOT/daemon-socket
unset NIX_USER_CONF_FILES unset NIX_USER_CONF_FILES
export _NIX_TEST_SHARED=$TEST_ROOT/shared export _NIX_TEST_SHARED=$TEST_ROOT/shared
if [[ -n $NIX_STORE ]]; then if [[ -n $NIX_STORE ]]; then
@ -35,6 +36,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@" export SHELL="@bash@"
export PAGER=cat export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@" export HAVE_SODIUM="@HAVE_SODIUM@"
export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@ export version=@PACKAGE_VERSION@
export system=@system@ export system=@system@
@ -75,7 +77,7 @@ startDaemon() {
rm -f $NIX_STATE_DIR/daemon-socket/socket rm -f $NIX_STATE_DIR/daemon-socket/socket
nix-daemon & nix-daemon &
for ((i = 0; i < 30; i++)); do for ((i = 0; i < 30; i++)); do
if [ -e $NIX_STATE_DIR/daemon-socket/socket ]; then break; fi if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
sleep 1 sleep 1
done done
pidDaemon=$! pidDaemon=$!

View file

@ -13,24 +13,32 @@ fake_free=$TEST_ROOT/fake-free
export _NIX_TEST_FREE_SPACE_FILE=$fake_free export _NIX_TEST_FREE_SPACE_FILE=$fake_free
echo 1100 > $fake_free echo 1100 > $fake_free
fifoLock=$TEST_ROOT/fifoLock
mkfifo "$fifoLock"
expr=$(cat <<EOF expr=$(cat <<EOF
with import ./config.nix; mkDerivation { with import ./config.nix; mkDerivation {
name = "gc-A"; name = "gc-A";
buildCommand = '' buildCommand = ''
set -x set -x
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]] [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 3 ]]
mkdir \$out mkdir \$out
echo foo > \$out/bar echo foo > \$out/bar
echo 1...
sleep 2 # Pretend that we run out of space
echo 200 > ${fake_free}.tmp1 echo 100 > ${fake_free}.tmp1
mv ${fake_free}.tmp1 $fake_free mv ${fake_free}.tmp1 $fake_free
echo 2...
sleep 2 # Wait for the GC to run
echo 3... for i in {1..20}; do
sleep 2 echo ''\${i}...
echo 4... if [[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]]; then
[[ \$(ls \$NIX_STORE/*-garbage? | wc -l) = 1 ]] exit 0
fi
sleep 1
done
exit 1
''; '';
} }
EOF EOF
@ -43,15 +51,9 @@ with import ./config.nix; mkDerivation {
set -x set -x
mkdir \$out mkdir \$out
echo foo > \$out/bar echo foo > \$out/bar
echo 1...
sleep 2 # Wait for the first build to finish
echo 200 > ${fake_free}.tmp2 cat "$fifoLock"
mv ${fake_free}.tmp2 $fake_free
echo 2...
sleep 2
echo 3...
sleep 2
echo 4...
''; '';
} }
EOF EOF
@ -59,12 +61,19 @@ EOF
nix build -v -o $TEST_ROOT/result-A -L "($expr)" \ nix build -v -o $TEST_ROOT/result-A -L "($expr)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 & --min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid=$! pid1=$!
nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \ nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 --min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid2=$!
wait "$pid" # Once the first build is done, unblock the second one.
# If the first build fails, we need to postpone the failure to still allow
# the second one to finish
wait "$pid1" || FIRSTBUILDSTATUS=$?
echo "unlock" > $fifoLock
( exit ${FIRSTBUILDSTATUS:-0} )
wait "$pid2"
[[ foo = $(cat $TEST_ROOT/result-A/bar) ]] [[ foo = $(cat $TEST_ROOT/result-A/bar) ]]
[[ foo = $(cat $TEST_ROOT/result-B/bar) ]] [[ foo = $(cat $TEST_ROOT/result-B/bar) ]]

View file

@ -1,7 +1,10 @@
echo "Build started" > "$lockFifo"
mkdir $out mkdir $out
echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar
sleep 10 # Wait for someone to write on the fifo
cat "$lockFifo"
# $out should not have been GC'ed while we were sleeping, but just in # $out should not have been GC'ed while we were sleeping, but just in
# case... # case...

View file

@ -1,5 +1,7 @@
with import ./config.nix; with import ./config.nix;
{ lockFifo ? null }:
rec { rec {
input1 = mkDerivation { input1 = mkDerivation {
@ -16,6 +18,7 @@ rec {
name = "gc-concurrent"; name = "gc-concurrent";
builder = ./gc-concurrent.builder.sh; builder = ./gc-concurrent.builder.sh;
inherit input1 input2; inherit input1 input2;
inherit lockFifo;
}; };
test2 = mkDerivation { test2 = mkDerivation {

View file

@ -2,7 +2,10 @@ source common.sh
clearStore clearStore
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1) lockFifo1=$TEST_ROOT/test1.fifo
mkfifo "$lockFifo1"
drvPath1=$(nix-instantiate gc-concurrent.nix -A test1 --argstr lockFifo "$lockFifo1")
outPath1=$(nix-store -q $drvPath1) outPath1=$(nix-store -q $drvPath1)
drvPath2=$(nix-instantiate gc-concurrent.nix -A test2) drvPath2=$(nix-instantiate gc-concurrent.nix -A test2)
@ -22,19 +25,16 @@ ln -s $outPath3 "$NIX_STATE_DIR"/gcroots/foo2
nix-store -rvv "$drvPath1" & nix-store -rvv "$drvPath1" &
pid1=$! pid1=$!
# Start build #2 in the background after 10 seconds. # Wait for the build of $drvPath1 to start
(sleep 10 && nix-store -rvv "$drvPath2") & cat $lockFifo1
pid2=$!
# Run the garbage collector while the build is running. # Run the garbage collector while the build is running.
sleep 6
nix-collect-garbage nix-collect-garbage
# Wait for build #1/#2 to finish. # Unlock the build of $drvPath1
echo "" > $lockFifo1
echo waiting for pid $pid1 to finish... echo waiting for pid $pid1 to finish...
wait $pid1 wait $pid1
echo waiting for pid $pid2 to finish...
wait $pid2
# Check that the root of build #1 and its dependencies haven't been # Check that the root of build #1 and its dependencies haven't been
# deleted. The should not be deleted by the GC because they were # deleted. The should not be deleted by the GC because they were
@ -42,8 +42,9 @@ wait $pid2
cat $outPath1/foobar cat $outPath1/foobar
cat $outPath1/input-2/bar cat $outPath1/input-2/bar
# Check that build #2 has succeeded. It should succeed because the # Check that the build build $drvPath2 succeeds.
# derivation is a GC root. # It should succeed because the derivation is a GC root.
nix-store -rvv "$drvPath2"
cat $outPath2/foobar cat $outPath2/foobar
rm -f "$NIX_STATE_DIR"/gcroots/foo* rm -f "$NIX_STATE_DIR"/gcroots/foo*

View file

@ -3,5 +3,3 @@ echo $(cat $input1/foo)$(cat $input2/bar)xyzzy > $out/foobar
# Check that the GC hasn't deleted the lock on our output. # Check that the GC hasn't deleted the lock on our output.
test -e "$out.lock" test -e "$out.lock"
sleep 6

View file

@ -18,6 +18,7 @@ build-users-group =
keep-derivations = false keep-derivations = false
sandbox = false sandbox = false
experimental-features = nix-command flakes experimental-features = nix-command flakes
gc-reserved-space = 0
include nix.conf.extra include nix.conf.extra
EOF EOF

View file

@ -40,4 +40,4 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh clean-files += $(d)/common.sh
installcheck: $(d)/common.sh $(d)/config.nix $(d)/plugins/libplugintest.$(SO_EXT) test-deps += tests/common.sh tests/config.nix tests/plugins/libplugintest.$(SO_EXT)

View file

@ -16,6 +16,11 @@ nix-env --foo 2>&1 | grep "no operation"
nix-env -q --foo 2>&1 | grep "unknown flag" nix-env -q --foo 2>&1 | grep "unknown flag"
# Eval Errors. # Eval Errors.
eval_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true) eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
echo $eval_res | grep "(string) (1:15)" echo $eval_arg_res | grep "at: (1:15) from string"
echo $eval_res | grep "infinite recursion encountered" echo $eval_arg_res | grep "infinite recursion encountered"
eval_stdin_res=$(echo 'let a = {} // a; in a.foo' | nix-instantiate --eval -E - 2>&1 || true)
echo $eval_stdin_res | grep "at: (1:15) from stdin"
echo $eval_stdin_res | grep "infinite recursion encountered"

View file

@ -55,3 +55,10 @@ chmod a+rx $TEST_ROOT/shell.shebang.rb
output=$($TEST_ROOT/shell.shebang.rb abc ruby) output=$($TEST_ROOT/shell.shebang.rb abc ruby)
[ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ] [ "$output" = '-e load("'"$TEST_ROOT"'/shell.shebang.rb") -- abc ruby' ]
# Test 'nix develop'.
nix develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
# Test 'nix print-dev-env'.
source <(nix print-dev-env -f shell.nix shellDrv)
[[ -n $stdenv ]]

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
export REMOTE_STORE=$TEST_ROOT/remote_store export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store # Build the dependencies and push them to the remote store

View file

@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore clearStore
rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh) export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '( nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(

View file

@ -2,6 +2,8 @@ source common.sh
clearStore clearStore
rm -f $TEST_ROOT/result
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]] [[ $(cat $TEST_ROOT/result/foo) = bar ]]