Merge branch 'master' into caveman-LOCs

This commit is contained in:
Ben Burdette 2020-06-18 13:07:53 -06:00
commit e6f93b94fc
124 changed files with 1435 additions and 1315 deletions

3
.gitignore vendored
View file

@ -49,6 +49,9 @@ perl/Makefile.config
# /src/libstore/
*.gen.*
# /src/libutil/
/src/libutil/tests/libutil-tests
/src/nix/nix
# /src/nix-env/

View file

@ -1,7 +1,6 @@
makefiles = \
mk/precompiled-headers.mk \
local.mk \
nix-rust/local.mk \
src/libutil/local.mk \
src/libutil/tests/local.mk \
src/libstore/local.mk \

View file

@ -70,7 +70,7 @@ path just built.</para>
<screen>
$ nix-build ./deterministic.nix -A stable
these derivations will be built:
this derivation will be built:
/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv
building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
/nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable
@ -85,7 +85,7 @@ checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'...
<screen>
$ nix-build ./deterministic.nix -A unstable
these derivations will be built:
this derivation will be built:
/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv
building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'...
/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable
@ -193,7 +193,7 @@ repeat = 1
An example output of this configuration:
<screen>
$ nix-build ./test.nix -A unstable
these derivations will be built:
this derivation will be built:
/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...

View file

@ -122,7 +122,7 @@ post-build-hook = /etc/nix/upload-to-cache.sh
<screen>
$ nix-build -E '(import &lt;nixpkgs&gt; {}).writeText "example" (builtins.toString builtins.currentTime)'
these derivations will be built:
this derivation will be built:
/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv
building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'...
running post-build-hook '/home/grahamc/projects/github.com/NixOS/nix/post-hook.sh'...

View file

@ -516,7 +516,7 @@ source:
$ nix-env -f '&lt;nixpkgs>' -iA hello --dry-run
(dry run; not doing anything)
installing hello-2.10
these paths will be fetched (0.04 MiB download, 0.19 MiB unpacked):
this path will be fetched (0.04 MiB download, 0.19 MiB unpacked):
/nix/store/wkhdf9jinag5750mqlax6z2zbwhqb76n-hello-2.10
<replaceable>...</replaceable></screen>

View file

@ -11,6 +11,10 @@
<arg>
<arg choice='plain'><option>--quiet</option></arg>
</arg>
<arg>
<option>--log-format</option>
<replaceable>format</replaceable>
</arg>
<arg>
<group choice='plain'>
<arg choice='plain'><option>--no-build-output</option></arg>

View file

@ -92,6 +92,37 @@
</varlistentry>
<varlistentry xml:id="opt-log-format"><term><option>--log-format</option> <replaceable>format</replaceable></term>
<listitem>
<para>This option can be used to change the output of the log format, with
<replaceable>format</replaceable> being one of:</para>
<variablelist>
<varlistentry><term>raw</term>
<listitem><para>This is the raw format, as outputted by nix-build.</para></listitem>
</varlistentry>
<varlistentry><term>internal-json</term>
<listitem><para>Outputs the logs in a structured manner. NOTE: the json schema is not guarantees to be stable between releases.</para></listitem>
</varlistentry>
<varlistentry><term>bar</term>
<listitem><para>Only display a progress bar during the builds.</para></listitem>
</varlistentry>
<varlistentry><term>bar-with-logs</term>
<listitem><para>Display the raw logs, with the progress bar at the bottom.</para></listitem>
</varlistentry>
</variablelist>
</listitem>
</varlistentry>
<varlistentry><term><option>--no-build-output</option> / <option>-Q</option></term>
<listitem><para>By default, output written by builders to standard

View file

@ -39,7 +39,7 @@ bundle.</para>
<step><para>Set the environment variable and install Nix</para>
<screen>
$ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt
$ sh &lt;(curl https://nixos.org/nix/install)
$ sh &lt;(curl -L https://nixos.org/nix/install)
</screen></step>
<step><para>In the shell profile and rc files (for example,

View file

@ -12,7 +12,7 @@
</para>
<screen>
$ sh &lt;(curl https://nixos.org/nix/install)
$ sh &lt;(curl -L https://nixos.org/nix/install)
</screen>
<para>
@ -39,7 +39,7 @@
To explicitly select a single-user installation on your system:
<screen>
sh &lt;(curl https://nixos.org/nix/install) --no-daemon
sh &lt;(curl -L https://nixos.org/nix/install) --no-daemon
</screen>
</para>

View file

@ -15,7 +15,7 @@ to subsequent chapters.</para>
<step><para>Install single-user Nix by running the following:
<screen>
$ bash &lt;(curl https://nixos.org/nix/install)
$ bash &lt;(curl -L https://nixos.org/nix/install)
</screen>
This will install Nix in <filename>/nix</filename>. The install script

View file

@ -142,8 +142,12 @@ $oldName =~ s/"//g;
sub getStorePath {
my ($jobName) = @_;
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
die unless $buildInfo->{buildproducts}->{1}->{type} eq "nix-build";
return $buildInfo->{buildproducts}->{1}->{path};
for my $product (values %{$buildInfo->{buildproducts}}) {
next unless $product->{type} eq "nix-build";
next if $product->{path} =~ /[a-z]+$/;
return $product->{path};
}
die;
}
write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",

View file

@ -125,7 +125,8 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
$(trace-ar) $(AR) crs $$@ $$?
$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string();
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base32 : Base16);
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);

View file

@ -50,7 +50,6 @@ rec {
libarchive
boost
nlohmann_json
rustc cargo
# Tests
git

View file

@ -12,64 +12,8 @@ let
builtins.readFile ./.version
+ (if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}");
# Create a "vendor" directory that contains the crates listed in
# Cargo.lock. This allows Nix to be built without network access.
vendoredCrates' =
let
lockFile = builtins.fromTOML (builtins.readFile nix-rust/Cargo.lock);
files = map (pkg: import <nix/fetchurl.nix> {
url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download";
sha256 = lockFile.metadata."checksum ${pkg.name} ${pkg.version} (registry+https://github.com/rust-lang/crates.io-index)";
}) (builtins.filter (pkg: pkg.source or "" == "registry+https://github.com/rust-lang/crates.io-index") lockFile.package);
in pkgs.runCommand "cargo-vendor-dir" {}
''
mkdir -p $out/vendor
cat > $out/vendor/config <<EOF
[source.crates-io]
replace-with = "vendored-sources"
[source.vendored-sources]
directory = "vendor"
EOF
${toString (builtins.map (file: ''
mkdir $out/vendor/tmp
tar xvf ${file} -C $out/vendor/tmp
dir=$(echo $out/vendor/tmp/*)
# Add just enough metadata to keep Cargo happy.
printf '{"files":{},"package":"${file.outputHash}"}' > "$dir/.cargo-checksum.json"
# Clean up some cruft from the winapi crates. FIXME: find
# a way to remove winapi* from our dependencies.
if [[ $dir =~ /winapi ]]; then
find $dir -name "*.a" -print0 | xargs -0 rm -f --
fi
mv "$dir" $out/vendor/
rm -rf $out/vendor/tmp
'') files)}
'';
jobs = rec {
vendoredCrates =
with pkgs;
runCommand "vendored-crates" {}
''
mkdir -p $out/nix-support
name=nix-vendored-crates-${version}
fn=$out/$name.tar.xz
tar cvfJ $fn -C ${vendoredCrates'} vendor \
--owner=0 --group=0 --mode=u+rw,uga+r \
--transform "s,vendor,$name,"
echo "file crates-tarball $fn" >> $out/nix-support/hydra-build-products
'';
build = pkgs.lib.genAttrs systems (system:
let pkgs = import nixpkgs { inherit system; }; in
@ -101,8 +45,6 @@ let
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
(cd perl; autoreconf --install --force --verbose)
'';
@ -247,11 +189,6 @@ let
src = nix;
preConfigure =
''
ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
'';
enableParallelBuilding = true;
buildInputs = buildDeps ++ propagatedDeps;

View file

@ -7,7 +7,7 @@ with import ./release-common.nix { inherit pkgs; };
(if useClang then clangStdenv else stdenv).mkDerivation {
name = "nix";
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps ++ [ pkgs.rustfmt ];
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
inherit configureFlags;

View file

@ -244,7 +244,7 @@ connected:
uploadLock = -1;
BasicDerivation drv(readDerivation(*store, store->realStoreDir + "/" + std::string(drvPath->to_string())));
auto drv = store->readDerivation(*drvPath);
drv.inputSrcs = store->parseStorePathSet(inputs);
auto result = sshStore->buildDerivation(*drvPath, drv);

View file

@ -6,11 +6,11 @@
namespace nix {
static Strings parseAttrPath(const string & s)
static Strings parseAttrPath(std::string_view s)
{
Strings res;
string cur;
string::const_iterator i = s.begin();
auto i = s.begin();
while (i != s.end()) {
if (*i == '.') {
res.push_back(cur);
@ -32,6 +32,15 @@ static Strings parseAttrPath(const string & s)
}
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
{
std::vector<Symbol> res;
for (auto & a : parseAttrPath(s))
res.push_back(state.symbols.create(a));
return res;
}
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
Bindings & autoArgs, Value & vIn)
{

View file

@ -16,4 +16,6 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
/* Heuristic to find the filename and lineno or a nix value. */
Pos findDerivationFilename(EvalState & state, Value & v, std::string what);
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);
}

View file

@ -76,11 +76,10 @@ public:
{
auto a = get(name);
if (!a)
throw Error(
ErrorInfo {
.hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("attribute '%s' missing", name),
.nixCode = NixCode { .errPos = pos }
});
return *a;
}

View file

@ -9,11 +9,10 @@ namespace nix {
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
{
throw EvalError(
ErrorInfo {
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
@ -24,11 +23,10 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
{
throw TypeError(
ErrorInfo {
.hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt(s, showType(v)),
.nixCode = NixCode { .errPos = pos }
});
}

View file

@ -161,12 +161,12 @@ const Value *getPrimOp(const Value &v) {
}
string showType(const Value & v)
string showType(ValueType type)
{
switch (v.type) {
switch (type) {
case tInt: return "an integer";
case tBool: return "a boolean";
case tString: return v.string.context ? "a string with context" : "a string";
case tBool: return "a Boolean";
case tString: return "a string";
case tPath: return "a path";
case tNull: return "null";
case tAttrs: return "a set";
@ -175,14 +175,27 @@ string showType(const Value & v)
case tApp: return "a function application";
case tLambda: return "a function";
case tBlackhole: return "a black hole";
case tPrimOp: return "a built-in function";
case tPrimOpApp: return "a partially applied built-in function";
case tExternal: return "an external value";
case tFloat: return "a float";
}
abort();
}
string showType(const Value & v)
{
switch (v.type) {
case tString: return v.string.context ? "a string with context" : "a string";
case tPrimOp:
return fmt("the built-in function '%s'", string(v.primOp->name));
case tPrimOpApp:
return fmt("the partially applied built-in function '%s'", string(getPrimOp(v)->primOp->name));
case tExternal: return v.external->showType();
case tFloat: return "a float";
default:
return showType(v.type);
}
abort();
}
@ -323,6 +336,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sOutputHash(symbols.create("outputHash"))
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
, sOutputHashMode(symbols.create("outputHashMode"))
, sRecurseForDerivations(symbols.create("recurseForDerivations"))
, repair(NoRepair)
, store(store)
, baseEnv(allocEnv(128))
@ -471,14 +485,21 @@ Value * EvalState::addConstant(const string & name, Value & v)
Value * EvalState::addPrimOp(const string & name,
size_t arity, PrimOpFun primOp)
{
auto name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
Symbol sym = symbols.create(name2);
/* Hack to make constants lazy: turn them into a application of
the primop to a dummy value. */
if (arity == 0) {
auto vPrimOp = allocValue();
vPrimOp->type = tPrimOp;
vPrimOp->primOp = new PrimOp(primOp, 1, sym);
Value v;
primOp(*this, noPos, nullptr, v);
mkApp(v, *vPrimOp, *vPrimOp);
return addConstant(name, v);
}
Value * v = allocValue();
string name2 = string(name, 0, 2) == "__" ? string(name, 2) : name;
Symbol sym = symbols.create(name2);
v->type = tPrimOp;
v->primOp = new PrimOp(primOp, arity, sym);
staticBaseEnv.vars[symbols.create(name)] = baseEnvDispl;
@ -506,11 +527,10 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2))
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2))
{
throw EvalError(
ErrorInfo {
.hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt(s, s2),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, const string & s3))
@ -520,30 +540,27 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, con
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2, const string & s3))
{
throw EvalError(
ErrorInfo {
.hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt(s, s2, s3),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const Symbol & sym, const Pos & p2))
{
// p1 is where the error occurred; p2 is a position mentioned in the message.
throw EvalError(
ErrorInfo {
.hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 }
});
throw EvalError({
.hint = hintfmt(s, sym, p2),
.nixCode = NixCode { .errPos = p1 }
});
}
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
{
throw TypeError(
ErrorInfo {
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt(s),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwTypeError(const char * s, const string & s1))
@ -553,29 +570,26 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const string & s1))
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const ExprLambda & fun, const Symbol & s2))
{
throw TypeError(
ErrorInfo {
.hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt(s, fun.showNamePos(), s2),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1))
{
throw AssertionError(
ErrorInfo {
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
throw AssertionError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char * s, const string & s1))
{
throw UndefinedVarError(
ErrorInfo {
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
throw UndefinedVarError({
.hint = hintfmt(s, s1),
.nixCode = NixCode { .errPos = pos }
});
}
LocalNoInline(void addErrorPrefix(Error & e, const char * s, const string & s2))
@ -1920,11 +1934,10 @@ void EvalState::printStats()
string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
{
throw TypeError(
ErrorInfo {
.hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt("cannot coerce %1% to a string", showType()),
.nixCode = NixCode { .errPos = pos }
});
}

View file

@ -18,7 +18,7 @@ namespace nix {
class Store;
class EvalState;
struct StorePath;
class StorePath;
enum RepairFlag : bool;
@ -74,7 +74,8 @@ public:
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
sOutputHash, sOutputHashAlgo, sOutputHashMode;
sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations;
Symbol sDerivationNix;
/* If set, force copying files to the Nix store even if they
@ -324,6 +325,7 @@ private:
/* Return a string representing the type of the value `v'. */
string showType(ValueType type);
string showType(const Value & v);
/* Decode a context string !<name>!<path> into a pair <path,

View file

@ -348,7 +348,7 @@ static void getDerivations(EvalState & state, Value & vIn,
should we recurse into it? => Only if it has a
`recurseForDerivations = true' attribute. */
if (i->value->type == tAttrs) {
Bindings::iterator j = i->value->attrs->find(state.symbols.create("recurseForDerivations"));
Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations);
if (j != i->value->attrs->end() && state.forceBool(*j->value, *j->pos))
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
}

View file

@ -219,4 +219,3 @@ or { return OR_KW; }
}
%%

View file

@ -8,7 +8,7 @@ libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexe
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
libexpr_LIBS = libutil libstore libfetchers libnixrust
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS =
ifneq ($(OS), FreeBSD)

View file

@ -283,11 +283,10 @@ void ExprVar::bindVars(const StaticEnv & env)
enclosing `with'. If there is no `with', then we can issue an
"undefined variable" error now. */
if (withLevel == -1)
throw UndefinedVarError(
ErrorInfo {
.hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos }
});
throw UndefinedVarError({
.hint = hintfmt("undefined variable '%1%'", name),
.nixCode = NixCode { .errPos = pos }
});
fromWith = true;
this->level = withLevel;
}

View file

@ -237,11 +237,10 @@ struct ExprLambda : Expr
: pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
{
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
throw ParseError(
ErrorInfo {
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos }
});
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
.nixCode = NixCode { .errPos = pos }
});
};
void setName(Symbol & name);
string showNamePos() const;

View file

@ -65,23 +65,20 @@ namespace nix {
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
{
throw ParseError(
ErrorInfo {
.hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos },
});
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%",
showAttrPath(attrPath), prevPos),
.nixCode = NixCode { .errPos = pos },
});
}
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
{
throw ParseError(
ErrorInfo {
.hint = hintfmt("attribute '%1%' already defined at %2%",
attr, prevPos),
.nixCode = NixCode { .errPos = pos },
});
throw ParseError({
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
.nixCode = NixCode { .errPos = pos },
});
}
@ -149,12 +146,11 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
{
if (!formals->argNames.insert(formal.name).second)
throw ParseError(
ErrorInfo {
.hint = hintfmt("duplicate formal function argument '%1%'",
formal.name),
.nixCode = NixCode { .errPos = pos },
});
throw ParseError({
.hint = hintfmt("duplicate formal function argument '%1%'",
formal.name),
.nixCode = NixCode { .errPos = pos },
});
formals->formals.push_front(formal);
}
@ -262,10 +258,10 @@ static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
{
data->error = ErrorInfo {
data->error = {
.hint = hintfmt(error),
.nixCode = NixCode { .errPos = makeCurPos(*loc, data) }
};
};
}
@ -342,11 +338,10 @@ expr_function
{ $$ = new ExprWith(CUR_POS, $2, $4); }
| LET binds IN expr_function
{ if (!$2->dynamicAttrs.empty())
throw ParseError(
ErrorInfo {
.hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS },
});
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in let"),
.nixCode = NixCode { .errPos = CUR_POS },
});
$$ = new ExprLet($2, $4);
}
| expr_if
@ -423,11 +418,10 @@ expr_simple
| URI {
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
if (noURLLiterals)
throw ParseError(
ErrorInfo {
.hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS }
});
throw ParseError({
.hint = hintfmt("URL literals are disabled"),
.nixCode = NixCode { .errPos = CUR_POS }
});
$$ = new ExprString(data->symbols.create($1));
}
| '(' expr ')' { $$ = $2; }
@ -497,11 +491,10 @@ attrs
$$->push_back(AttrName(str->s));
delete str;
} else
throw ParseError(
ErrorInfo {
.hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) },
});
throw ParseError({
.hint = hintfmt("dynamic attributes not allowed in inherit"),
.nixCode = NixCode { .errPos = makeCurPos(@2, data) },
});
}
| { $$ = new AttrPath; }
;
@ -707,11 +700,13 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
Path res = r.second + suffix;
if (pathExists(res)) return canonPath(res);
}
throw ThrownError(
ErrorInfo {
.hint = hintfmt("file '%1%' was not found in the Nix search path (add it using $NIX_PATH or -I)", path),
.nixCode = NixCode { .errPos = pos }
});
throw ThrownError({
.hint = hintfmt(evalSettings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
.nixCode = NixCode { .errPos = pos }
});
}
@ -727,10 +722,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).storePath) };
} catch (FileTransferError & e) {
logWarning(
ErrorInfo {
.name = "Entry download",
.hint = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
logWarning({
.name = "Entry download",
.hint = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
});
res = { false, "" };
}
@ -739,10 +733,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (pathExists(path))
res = { true, path };
else {
logWarning(
ErrorInfo {
.name = "Entry not found",
.hint = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
logWarning({
.name = "Entry not found",
.hint = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
});
res = { false, "" };
}

View file

@ -50,20 +50,20 @@ void EvalState::realiseContext(const PathSet & context)
std::vector<StorePathWithOutputs> drvs;
for (auto & i : context) {
std::pair<string, string> decoded = decodeContext(i);
auto ctx = store->parseStorePath(decoded.first);
auto [ctxS, outputName] = decodeContext(i);
auto ctx = store->parseStorePath(ctxS);
if (!store->isValidPath(ctx))
throw InvalidPathError(store->printStorePath(ctx));
if (!decoded.second.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx.clone(), {decoded.second}});
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
/* Add the output of this derivation to the allowed
paths. */
if (allowedPaths) {
auto drv = store->derivationFromPath(store->parseStorePath(decoded.first));
DerivationOutputs::iterator i = drv.outputs.find(decoded.second);
auto drv = store->derivationFromPath(ctx);
DerivationOutputs::iterator i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'", decoded.first, decoded.second);
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
allowedPaths->insert(store->printStorePath(i->second.path));
}
}
@ -79,6 +79,7 @@ void EvalState::realiseContext(const PathSet & context)
StorePathSet willBuild, willSubstitute, unknown;
unsigned long long downloadSize, narSize;
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs);
}
@ -93,12 +94,10 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
@ -174,13 +173,12 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt(
"cannot import '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
path = state.checkSourcePath(path);
@ -215,11 +213,10 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
auto elems = args[0]->listElems();
auto count = args[0]->listSize();
if (count == 0) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("at least one argument to 'exec' required"),
.nixCode = NixCode { .errPos = pos }
});
}
PathSet context;
auto program = state.coerceToString(pos, *elems[0], context, false, false);
@ -230,12 +227,12 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.nixCode = NixCode { .errPos = pos }
});}
throw EvalError({
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
program, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
auto output = runProgram(program, true, commandArgs);
Expr * parsed;
@ -386,11 +383,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator startSet =
args[0]->attrs->find(state.symbols.create("startSet"));
if (startSet == args[0]->attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("attribute 'startSet' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceList(*startSet->value, pos);
ValueList workSet;
@ -401,11 +397,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator op =
args[0]->attrs->find(state.symbols.create("operator"));
if (op == args[0]->attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("attribute 'operator' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
@ -424,11 +419,10 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator key =
e->attrs->find(state.symbols.create("key"));
if (key == e->attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("attribute 'key' required"),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*key->value, pos);
if (!doneKeys.insert(key->value).second) continue;
@ -560,11 +554,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Figure out the name first (for stack backtraces). */
Bindings::iterator attr = args[0]->attrs->find(state.sName);
if (attr == args[0]->attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("required attribute 'name' missing"),
.nixCode = NixCode { .errPos = pos }
});
string drvName;
Pos & posDrvName(*attr->pos);
try {
@ -608,41 +601,37 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
else
throw EvalError(
ErrorInfo {
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
.nixCode = NixCode { .errPos = posDrvName }
});
};
auto handleOutputs = [&](const Strings & ss) {
outputs.clear();
for (auto & j : ss) {
if (outputs.find(j) != outputs.end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("duplicate derivation output '%1%'", j),
.nixCode = NixCode { .errPos = posDrvName }
});
/* !!! Check whether j is a valid attribute
name. */
/* Derivations cannot be named drv, because
then we'd have an attribute drvPath in
the resulting set. */
if (j == "drv")
throw EvalError(
ErrorInfo {
.hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("invalid derivation output name 'drv'" ),
.nixCode = NixCode { .errPos = posDrvName }
});
outputs.insert(j);
}
if (outputs.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("derivation cannot have an empty set of outputs"),
.nixCode = NixCode { .errPos = posDrvName }
});
};
try {
@ -735,9 +724,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
StorePathSet refs;
state.store->computeFSClosure(state.store->parseStorePath(std::string_view(path).substr(1)), refs);
for (auto & j : refs) {
drv.inputSrcs.insert(j.clone());
drv.inputSrcs.insert(j);
if (j.isDerivation())
drv.inputDrvs[j.clone()] = state.store->queryDerivationOutputNames(j);
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
}
}
@ -754,38 +743,35 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Do we have all required attributes? */
if (drv.builder == "")
throw EvalError(
ErrorInfo {
.hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("required attribute 'builder' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
if (drv.platform == "")
throw EvalError(
ErrorInfo {
.hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("required attribute 'system' missing"),
.nixCode = NixCode { .errPos = posDrvName }
});
/* Check whether the derivation name is valid. */
if (isDerivation(drvName))
throw EvalError(
ErrorInfo {
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName }
});
throw EvalError({
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
.nixCode = NixCode { .errPos = posDrvName }
});
if (outputHash) {
/* Handle fixed-output derivations. */
if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error(
ErrorInfo {
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName }
});
throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
.nixCode = NixCode { .errPos = posDrvName }
});
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo);
Hash h(*outputHash, ht);
Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
@ -807,7 +793,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = "";
drv.outputs.insert_or_assign(i,
DerivationOutput(StorePath::dummy.clone(), "", ""));
DerivationOutput { StorePath::dummy, "", "" });
}
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true);
@ -816,7 +802,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i,
DerivationOutput(std::move(outPath), "", ""));
DerivationOutput { std::move(outPath), "", "" });
}
}
@ -829,7 +815,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Optimisation, but required in read-only mode! because in that
case we don't actually write store derivations, so we can't
read them later. */
drvHashes.insert_or_assign(drvPath.clone(),
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false));
state.mkAttrs(v, 1 + drv.outputs.size());
@ -886,11 +872,10 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
e.g. nix-push does the right thing. */
if (!state.store->isStorePath(path)) path = canonPath(path, true);
if (!state.store->isInStore(path))
throw EvalError(
ErrorInfo {
.hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("path '%1%' is not in the Nix store", path),
.nixCode = NixCode { .errPos = pos }
});
Path path2 = state.store->toStorePath(path);
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(path2));
@ -906,13 +891,12 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt(
"cannot check the existence of '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
try {
@ -955,13 +939,11 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid"
, path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
string s = readFile(state.checkSourcePath(state.toRealPath(path, context)));
if (s.find((char) 0) != string::npos)
throw Error("the contents of the file '%1%' cannot be represented as a Nix string", path);
@ -988,11 +970,10 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
i = v2.attrs->find(state.symbols.create("path"));
if (i == v2.attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("attribute 'path' missing"),
.nixCode = NixCode { .errPos = pos }
});
PathSet context;
string path = state.coerceToString(pos, *i->value, context, false, false);
@ -1000,12 +981,10 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
searchPath.emplace_back(prefix, path);
@ -1022,11 +1001,10 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type);
if (ht == htUnknown)
throw Error(
ErrorInfo {
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context);
@ -1042,12 +1020,10 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
try {
state.realiseContext(ctx);
} catch (InvalidPathError & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid",
path, e.path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
.nixCode = NixCode { .errPos = pos }
});
}
DirEntries entries = readDirectory(state.checkSourcePath(path));
@ -1117,15 +1093,13 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
for (auto path : context) {
if (path.at(0) != '/')
throw EvalError(
ErrorInfo {
.hint = hintfmt(
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name,
path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError( {
.hint = hintfmt(
"in 'toFile': the file named '%1%' must not contain a reference "
"to a derivation but contains (%2%)",
name, path),
.nixCode = NixCode { .errPos = pos }
});
refs.insert(state.store->parseStorePath(path));
}
@ -1193,21 +1167,19 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
if (!context.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(
ErrorInfo {
.hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt(
"first argument in call to 'filterSource' is not a function but %1%",
showType(*args[0])),
.nixCode = NixCode { .errPos = pos }
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
}
@ -1227,12 +1199,10 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
PathSet context;
path = state.coerceToPath(*attr.pos, *attr.value, context);
if (!context.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("string '%1%' cannot refer to other paths",
path),
.nixCode = NixCode { .errPos = *attr.pos }
});
throw EvalError({
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
.nixCode = NixCode { .errPos = *attr.pos }
});
} else if (attr.name == state.sName)
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "filter") {
@ -1241,21 +1211,18 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
} else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else
throw EvalError(
ErrorInfo {
.hint = hintfmt("unsupported argument '%1%' to 'addPath'",
attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
throw EvalError({
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (path.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("'path' required"),
.nixCode = NixCode { .errPos = pos }
});
if (name.empty())
name = baseNameOf(path);
@ -1313,11 +1280,10 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
// !!! Should we create a symbol here or just do a lookup?
Bindings::iterator i = args[1]->attrs->find(state.symbols.create(attr));
if (i == args[1]->attrs->end())
throw EvalError(
ErrorInfo {
.hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("attribute '%1%' missing", attr),
.nixCode = NixCode { .errPos = pos }
});
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
@ -1397,22 +1363,20 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
Bindings::iterator j = v2.attrs->find(state.sName);
if (j == v2.attrs->end())
throw TypeError(
ErrorInfo {
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
string name = state.forceStringNoCtx(*j->value, pos);
Symbol sym = state.symbols.create(name);
if (seen.insert(sym).second) {
Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue));
if (j2 == v2.attrs->end())
throw TypeError(
ErrorInfo {
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.nixCode = NixCode { .errPos = pos }
});
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
}
@ -1485,11 +1449,10 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
{
state.forceValue(*args[0], pos);
if (args[0]->type != tLambda)
throw TypeError(
ErrorInfo {
.hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos }
});
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
.nixCode = NixCode { .errPos = pos }
});
if (!args[0]->lambda.fun->matchAttrs) {
state.mkAttrs(v, 0);
@ -1542,11 +1505,10 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
{
state.forceList(list, pos);
if (n < 0 || (unsigned int) n >= list.listSize())
throw Error(
ErrorInfo {
.hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("list index %1% is out of bounds", n),
.nixCode = NixCode { .errPos = pos }
});
state.forceValue(*list.listElems()[n], pos);
v = *list.listElems()[n];
}
@ -1573,11 +1535,10 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
{
state.forceList(*args[0], pos);
if (args[0]->listSize() == 0)
throw Error(
ErrorInfo {
.hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("'tail' called on an empty list"),
.nixCode = NixCode { .errPos = pos }
});
state.mkList(v, args[0]->listSize() - 1);
for (unsigned int n = 0; n < v.listSize(); ++n)
@ -1719,12 +1680,10 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
auto len = state.forceInt(*args[1], pos);
if (len < 0)
throw EvalError(
ErrorInfo {
.hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("cannot create list of size %1%", len),
.nixCode = NixCode { .errPos = pos }
});
state.mkList(v, len);
@ -1883,11 +1842,10 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
NixFloat f2 = state.forceFloat(*args[1], pos);
if (f2 == 0)
throw EvalError(
ErrorInfo {
.hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("division by zero"),
.nixCode = NixCode { .errPos = pos }
});
if (args[0]->type == tFloat || args[1]->type == tFloat) {
mkFloat(v, state.forceFloat(*args[0], pos) / state.forceFloat(*args[1], pos));
@ -1896,11 +1854,10 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
NixInt i2 = state.forceInt(*args[1], pos);
/* Avoid division overflow as it might raise SIGFPE. */
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
throw EvalError(
ErrorInfo {
.hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("overflow in integer division"),
.nixCode = NixCode { .errPos = pos }
});
mkInt(v, i1 / i2);
}
@ -1958,11 +1915,10 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
string s = state.coerceToString(pos, *args[2], context);
if (start < 0)
throw EvalError(
ErrorInfo {
.hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("negative start position in 'substring'"),
.nixCode = NixCode { .errPos = pos }
});
mkString(v, (unsigned int) start >= s.size() ? "" : string(s, start, len), context);
}
@ -1982,11 +1938,10 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
string type = state.forceStringNoCtx(*args[0], pos);
HashType ht = parseHashType(type);
if (ht == htUnknown)
throw Error(
ErrorInfo {
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("unknown hash type '%1%'", type),
.nixCode = NixCode { .errPos = pos }
});
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
@ -2029,17 +1984,15 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
} catch (std::regex_error &e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError(
ErrorInfo {
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
} else {
throw EvalError(
ErrorInfo {
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
}
}
}
@ -2104,17 +2057,15 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
} catch (std::regex_error &e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
throw EvalError(
ErrorInfo {
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
} else {
throw EvalError(
ErrorInfo {
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("invalid regular expression '%s'", re),
.nixCode = NixCode { .errPos = pos }
});
}
}
}
@ -2145,11 +2096,10 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
state.forceList(*args[0], pos);
state.forceList(*args[1], pos);
if (args[0]->listSize() != args[1]->listSize())
throw EvalError(
ErrorInfo {
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
.nixCode = NixCode { .errPos = pos }
});
vector<string> from;
from.reserve(args[0]->listSize());
@ -2252,10 +2202,11 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
RegisterPrimOp::PrimOps * RegisterPrimOp::primOps;
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun)
RegisterPrimOp::RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun,
std::optional<std::string> requiredFeature)
{
if (!primOps) primOps = new PrimOps;
primOps->emplace_back(name, arity, fun);
primOps->push_back({name, arity, fun, requiredFeature});
}
@ -2447,7 +2398,8 @@ void EvalState::createBaseEnv()
if (RegisterPrimOp::primOps)
for (auto & primOp : *RegisterPrimOp::primOps)
addPrimOp(std::get<0>(primOp), std::get<1>(primOp), std::get<2>(primOp));
if (!primOp.requiredFeature || settings.isExperimentalFeatureEnabled(*primOp.requiredFeature))
addPrimOp(primOp.name, primOp.arity, primOp.primOp);
/* Now that we've added all primops, sort the `builtins' set,
because attribute lookups expect it to be sorted. */

View file

@ -7,12 +7,25 @@ namespace nix {
struct RegisterPrimOp
{
typedef std::vector<std::tuple<std::string, size_t, PrimOpFun>> PrimOps;
struct Info
{
std::string name;
size_t arity;
PrimOpFun primOp;
std::optional<std::string> requiredFeature;
};
typedef std::vector<Info> PrimOps;
static PrimOps * primOps;
/* You can register a constant by passing an arity of 0. fun
will get called during EvalState initialization, so there
may be primops not yet added and builtins is not yet sorted. */
RegisterPrimOp(std::string name, size_t arity, PrimOpFun fun);
RegisterPrimOp(
std::string name,
size_t arity,
PrimOpFun fun,
std::optional<std::string> requiredFeature = {});
};
/* These primops are disabled without enableNativeCode, but plugins

View file

@ -146,11 +146,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
auto sAllOutputs = state.symbols.create("allOutputs");
for (auto & i : *args[1]->attrs) {
if (!state.store->isStorePath(i.name))
throw EvalError(
ErrorInfo {
.hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
throw EvalError({
.hint = hintfmt("Context key '%s' is not a store path", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(i.name));
state.forceAttrs(*i.value, *i.pos);
@ -164,11 +163,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter != i.value->attrs->end()) {
if (state.forceBool(*iter->value, *iter->pos)) {
if (!isDerivation(i.name)) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
throw EvalError({
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
}
context.insert("=" + string(i.name));
}
@ -178,11 +176,10 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
if (iter != i.value->attrs->end()) {
state.forceList(*iter->value, *iter->pos);
if (iter->value->listSize() && !isDerivation(i.name)) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
throw EvalError({
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
.nixCode = NixCode { .errPos = *i.pos }
});
}
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);

View file

@ -35,19 +35,17 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
else if (n == "submodules")
fetchSubmodules = state.forceBool(*attr.value, *attr.pos);
else
throw EvalError(
ErrorInfo {
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (url.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.coerceToString(pos, *args[0], context, false, false);

View file

@ -38,19 +38,17 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError(
ErrorInfo {
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (url.empty())
throw EvalError(
ErrorInfo {
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.coerceToString(pos, *args[0], context, false, false);

View file

@ -23,7 +23,7 @@ void emitTreeAttrs(
assert(tree.info.narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
tree.info.narHash.to_string(SRI));
tree.info.narHash.to_string(SRI, true));
if (input->getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
@ -66,11 +66,10 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
}
if (!attrs.count("type"))
throw Error(
ErrorInfo {
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos }
});
throw Error({
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
.nixCode = NixCode { .errPos = pos }
});
input = fetchers::inputFromAttrs(attrs);
} else
@ -107,24 +106,21 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
if (n == "url")
url = state.forceStringNoCtx(*attr.value, *attr.pos);
else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, *attr.pos);
else
throw EvalError(
ErrorInfo {
.hint = hintfmt("unsupported argument '%s' to '%s'",
attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos }
});
throw EvalError({
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
.nixCode = NixCode { .errPos = *attr.pos }
});
}
if (!url)
throw EvalError(
ErrorInfo {
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("'url' argument required"),
.nixCode = NixCode { .errPos = pos }
});
} else
url = state.forceStringNoCtx(*args[0], pos);
@ -151,7 +147,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(), hash.to_string());
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)

View file

@ -81,11 +81,10 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
try {
visit(v, parser(tomlStream).parse());
} catch (std::runtime_error & e) {
throw EvalError(
ErrorInfo {
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos }
});
throw EvalError({
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
.nixCode = NixCode { .errPos = pos }
});
}
}

View file

@ -36,7 +36,7 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
if (res) {
if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash.
res->narHash = Hash(*narHash);
res->narHash = newHashAllowEmpty(*narHash, htUnknown);
return res;
}
}
@ -47,7 +47,7 @@ Attrs Input::toAttrs() const
{
auto attrs = toAttrsInternal();
if (narHash)
attrs.emplace("narHash", narHash->to_string(SRI));
attrs.emplace("narHash", narHash->to_string(SRI, true));
attrs.emplace("type", type());
return attrs;
}
@ -67,7 +67,7 @@ std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store)
if (narHash && narHash != input->narHash)
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, narHash->to_string(SRI), input->narHash->to_string(SRI));
to_string(), tree.actualPath, narHash->to_string(SRI, true), input->narHash->to_string(SRI, true));
return {std::move(tree), input};
}

View file

@ -76,7 +76,7 @@ struct GitHubInput : Input
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
rev = Hash(json["sha"], htSHA1);
rev = Hash(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
}

View file

@ -8,4 +8,4 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
libfetchers_LIBS = libutil libstore libnixrust
libfetchers_LIBS = libutil libstore

View file

@ -196,9 +196,9 @@ struct TarballInput : Input
// NAR hashes are preferred over file hashes since tar/zip files
// don't have a canonical representation.
if (narHash)
url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
url2.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
else if (hash)
url2.query.insert_or_assign("hash", hash->to_string(SRI));
url2.query.insert_or_assign("hash", hash->to_string(SRI, true));
return url2;
}
@ -207,7 +207,7 @@ struct TarballInput : Input
Attrs attrs;
attrs.emplace("url", url.to_string());
if (hash)
attrs.emplace("hash", hash->to_string(SRI));
attrs.emplace("hash", hash->to_string(SRI, true));
return attrs;
}
@ -264,8 +264,7 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
if (auto hash = maybeGetStrAttr(attrs, "hash"))
// FIXME: require SRI hash.
input->hash = Hash(*hash);
input->hash = newHashAllowEmpty(*hash, htUnknown);
return input;
}

View file

@ -1,5 +1,6 @@
#include "common-args.hh"
#include "globals.hh"
#include "loggers.hh"
namespace nix {
@ -38,6 +39,14 @@ MixCommonArgs::MixCommonArgs(const string & programName)
}},
});
addFlag({
.longName = "log-format",
.description = "format of log output; \"raw\", \"internal-json\", \"bar\" "
"or \"bar-with-logs\"",
.labels = {"format"},
.handler = {[](std::string format) { setLogFormat(format); }},
});
addFlag({
.longName = "max-jobs",
.shortName = 'j',

52
src/libmain/loggers.cc Normal file
View file

@ -0,0 +1,52 @@
#include "loggers.hh"
#include "progress-bar.hh"
namespace nix {
LogFormat defaultLogFormat = LogFormat::raw;
LogFormat parseLogFormat(const std::string & logFormatStr) {
if (logFormatStr == "raw")
return LogFormat::raw;
else if (logFormatStr == "raw-with-logs")
return LogFormat::rawWithLogs;
else if (logFormatStr == "internal-json")
return LogFormat::internalJson;
else if (logFormatStr == "bar")
return LogFormat::bar;
else if (logFormatStr == "bar-with-logs")
return LogFormat::barWithLogs;
throw Error("option 'log-format' has an invalid value '%s'", logFormatStr);
}
Logger * makeDefaultLogger() {
switch (defaultLogFormat) {
case LogFormat::raw:
return makeSimpleLogger(false);
case LogFormat::rawWithLogs:
return makeSimpleLogger(true);
case LogFormat::internalJson:
return makeJSONLogger(*makeSimpleLogger());
case LogFormat::bar:
return makeProgressBar();
case LogFormat::barWithLogs:
return makeProgressBar(true);
default:
abort();
}
}
void setLogFormat(const std::string & logFormatStr) {
setLogFormat(parseLogFormat(logFormatStr));
}
void setLogFormat(const LogFormat & logFormat) {
defaultLogFormat = logFormat;
createDefaultLogger();
}
void createDefaultLogger() {
logger = makeDefaultLogger();
}
}

20
src/libmain/loggers.hh Normal file
View file

@ -0,0 +1,20 @@
#pragma once
#include "types.hh"
namespace nix {
enum class LogFormat {
raw,
rawWithLogs,
internalJson,
bar,
barWithLogs,
};
void setLogFormat(const std::string & logFormatStr);
void setLogFormat(const LogFormat & logFormat);
void createDefaultLogger();
}

View file

@ -106,19 +106,20 @@ public:
updateThread.join();
}
void stop()
void stop() override
{
auto state(state_.lock());
if (!state->active) return;
state->active = false;
std::string status = getStatus(*state);
writeToStderr("\r\e[K");
if (status != "")
writeToStderr("[" + status + "]\n");
updateCV.notify_one();
quitCV.notify_one();
}
bool isVerbose() override {
return printBuildLogs;
}
void log(Verbosity lvl, const FormatOrString & fs) override
{
auto state(state_.lock());
@ -152,7 +153,7 @@ public:
{
auto state(state_.lock());
if (lvl <= verbosity && !s.empty())
if (lvl <= verbosity && !s.empty() && type != actBuildWaiting)
log(*state, lvl, s + "...");
state->activities.emplace_back(ActInfo());
@ -164,7 +165,7 @@ public:
state->activitiesByType[type].its.emplace(act, i);
if (type == actBuild) {
auto name = storePathToName(getS(fields, 0));
std::string name(storePathToName(getS(fields, 0)));
if (hasSuffix(name, ".drv"))
name = name.substr(0, name.size() - 4);
i->s = fmt("building " ANSI_BOLD "%s" ANSI_NORMAL, name);
@ -467,11 +468,17 @@ public:
}
};
Logger * makeProgressBar(bool printBuildLogs)
{
return new ProgressBar(
printBuildLogs,
isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb"
);
}
void startProgressBar(bool printBuildLogs)
{
logger = new ProgressBar(
printBuildLogs,
isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb");
logger = makeProgressBar(printBuildLogs);
}
void stopProgressBar()

View file

@ -4,6 +4,8 @@
namespace nix {
Logger * makeProgressBar(bool printBuildLogs = false);
void startProgressBar(bool printBuildLogs = false);
void stopProgressBar();

View file

@ -2,6 +2,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "util.hh"
#include "loggers.hh"
#include <algorithm>
#include <cctype>
@ -47,7 +48,10 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl)
{
if (!willBuild.empty()) {
printMsg(lvl, "these derivations will be built:");
if (willBuild.size() == 1)
printMsg(lvl, fmt("this derivation will be built:"));
else
printMsg(lvl, fmt("these %d derivations will be built:", willBuild.size()));
auto sorted = store->topoSortPaths(willBuild);
reverse(sorted.begin(), sorted.end());
for (auto & i : sorted)
@ -55,9 +59,18 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
}
if (!willSubstitute.empty()) {
printMsg(lvl, fmt("these paths will be fetched (%.2f MiB download, %.2f MiB unpacked):",
downloadSize / (1024.0 * 1024.0),
narSize / (1024.0 * 1024.0)));
const float downloadSizeMiB = downloadSize / (1024.f * 1024.f);
const float narSizeMiB = narSize / (1024.f * 1024.f);
if (willSubstitute.size() == 1) {
printMsg(lvl, fmt("this path will be fetched (%.2f MiB download, %.2f MiB unpacked):",
downloadSizeMiB,
narSizeMiB));
} else {
printMsg(lvl, fmt("these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):",
willSubstitute.size(),
downloadSizeMiB,
narSizeMiB));
}
for (auto & i : willSubstitute)
printMsg(lvl, fmt(" %s", store->printStorePath(i)));
}
@ -169,7 +182,7 @@ LegacyArgs::LegacyArgs(const std::string & programName,
.longName = "no-build-output",
.shortName = 'Q',
.description = "do not show build output",
.handler = {&settings.verboseBuild, false},
.handler = {[&]() {setLogFormat(LogFormat::raw); }},
});
addFlag({
@ -289,7 +302,7 @@ int handleExceptions(const string & programName, std::function<void()> fun)
{
ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this
ErrorInfo::programName = programName;
ErrorInfo::programName = baseNameOf(programName);
string error = ANSI_RED "error:" ANSI_NORMAL " ";
try {

View file

@ -93,7 +93,7 @@ std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
{
return storePathToHash(printStorePath(storePath)) + ".narinfo";
return std::string(storePath.hashPart()) + ".narinfo";
}
void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
@ -102,7 +102,7 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
auto hashPart = storePathToHash(printStorePath(narInfo->path));
std::string hashPart(narInfo->path.hashPart());
{
auto state_(state.lock());
@ -164,7 +164,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
}
upsertFile(storePathToHash(printStorePath(info.path)) + ".ls", jsonOut.str(), "application/json");
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
}
/* Compress the NAR. */
@ -360,7 +360,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
const StorePathSet & references, RepairFlag repair)
{
ValidPathInfo info(computeStorePathForText(name, s, references));
info.references = cloneStorePathSet(references);
info.references = references;
if (repair || !isValidPath(info.path)) {
StringSink sink;
@ -388,21 +388,19 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
narInfo->sigs.insert(sigs.begin(), sigs.end());
auto narInfoFile = narInfoFileFor(narInfo->path);
writeNarInfo(narInfo);
}
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
{
auto drvPath = path.clone();
auto drvPath = path;
if (!path.isDerivation()) {
try {
auto info = queryPathInfo(path);
// FIXME: add a "Log" field to .narinfo
if (!info->deriver) return nullptr;
drvPath = info->deriver->clone();
drvPath = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
}

View file

@ -86,7 +86,7 @@ struct HookInstance;
/* A pointer to a goal. */
class Goal;
struct Goal;
class DerivationGoal;
typedef std::shared_ptr<Goal> GoalPtr;
typedef std::weak_ptr<Goal> WeakGoalPtr;
@ -104,13 +104,10 @@ typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
class Goal : public std::enable_shared_from_this<Goal>
struct Goal : public std::enable_shared_from_this<Goal>
{
public:
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
protected:
/* Backlink to the worker. */
Worker & worker;
@ -138,6 +135,9 @@ protected:
/* Whether the goal is finished. */
ExitCode exitCode;
/* Exception containing an error message, if any. */
std::optional<Error> ex;
Goal(Worker & worker) : worker(worker)
{
nrFailed = nrNoSubstituters = nrIncompleteClosure = 0;
@ -149,7 +149,6 @@ protected:
trace("goal destroyed");
}
public:
virtual void work() = 0;
void addWaitee(GoalPtr waitee);
@ -173,21 +172,14 @@ public:
return name;
}
ExitCode getExitCode()
{
return exitCode;
}
/* Callback in case of a timeout. It should wake up its waiters,
get rid of any running child processes that are being monitored
by the worker (important!), etc. */
virtual void timedOut() = 0;
virtual void timedOut(Error && ex) = 0;
virtual string key() = 0;
protected:
virtual void amDone(ExitCode result);
void amDone(ExitCode result, std::optional<Error> ex = {});
};
@ -303,7 +295,7 @@ public:
/* Make a goal (with caching). */
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(StorePath && drvPath,
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, BuildMode buildMode = bmNormal);
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair);
@ -355,7 +347,7 @@ public:
contents. */
bool pathContentsGood(const StorePath & path);
void markContentsGood(StorePath && path);
void markContentsGood(const StorePath & path);
void updateProgress()
{
@ -392,8 +384,7 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
assert(waitees.find(waitee) != waitees.end());
waitees.erase(waitee);
trace(format("waitee '%1%' done; %2% left") %
waitee->name % waitees.size());
trace(fmt("waitee '%s' done; %d left", waitee->name, waitees.size()));
if (result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure) ++nrFailed;
@ -418,12 +409,20 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
}
void Goal::amDone(ExitCode result)
void Goal::amDone(ExitCode result, std::optional<Error> ex)
{
trace("done");
assert(exitCode == ecBusy);
assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure);
exitCode = result;
if (ex) {
if (!waiters.empty())
logError(ex->info());
else
this->ex = std::move(*ex);
}
for (auto & i : waiters) {
GoalPtr goal = i.lock();
if (goal) goal->waiteeDone(shared_from_this(), result);
@ -869,6 +868,9 @@ private:
std::unique_ptr<Activity> act;
/* Activity that denotes waiting for a lock. */
std::unique_ptr<Activity> actLock;
std::map<ActivityId, Activity> builderActivities;
/* The remote machine on which we're building. */
@ -898,16 +900,16 @@ private:
friend struct RestrictedStore;
public:
DerivationGoal(StorePath && drvPath, const StringSet & wantedOutputs,
DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs,
Worker & worker, BuildMode buildMode = bmNormal);
DerivationGoal(StorePath && drvPath, const BasicDerivation & drv,
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode = bmNormal);
~DerivationGoal();
/* Whether we need to perform hash rewriting if there are valid output paths. */
bool needsHashRewrite();
void timedOut() override;
void timedOut(Error && ex) override;
string key() override
{
@ -922,7 +924,7 @@ public:
StorePath getDrvPath()
{
return drvPath.clone();
return drvPath;
}
/* Add wanted outputs to an already existing derivation goal. */
@ -1006,14 +1008,11 @@ private:
void repairClosure();
void amDone(ExitCode result) override
{
Goal::amDone(result);
}
void started();
void done(BuildResult::Status status, const string & msg = "");
void done(
BuildResult::Status status,
std::optional<Error> ex = {});
StorePathSet exportReferences(const StorePathSet & storePaths);
};
@ -1022,11 +1021,11 @@ private:
const Path DerivationGoal::homeDir = "/homeless-shelter";
DerivationGoal::DerivationGoal(StorePath && drvPath, const StringSet & wantedOutputs,
DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs,
Worker & worker, BuildMode buildMode)
: Goal(worker)
, useDerivation(true)
, drvPath(std::move(drvPath))
, drvPath(drvPath)
, wantedOutputs(wantedOutputs)
, buildMode(buildMode)
{
@ -1039,11 +1038,11 @@ DerivationGoal::DerivationGoal(StorePath && drvPath, const StringSet & wantedOut
}
DerivationGoal::DerivationGoal(StorePath && drvPath, const BasicDerivation & drv,
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode)
: Goal(worker)
, useDerivation(false)
, drvPath(std::move(drvPath))
, drvPath(drvPath)
, buildMode(buildMode)
{
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
@ -1107,10 +1106,10 @@ void DerivationGoal::killChild()
}
void DerivationGoal::timedOut()
void DerivationGoal::timedOut(Error && ex)
{
killChild();
done(BuildResult::TimedOut);
done(BuildResult::TimedOut, ex);
}
@ -1158,11 +1157,7 @@ void DerivationGoal::loadDerivation()
trace("loading derivation");
if (nrFailed != 0) {
logError({
.name = "missing derivation during build",
.hint = hintfmt("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
});
done(BuildResult::MiscFailure);
done(BuildResult::MiscFailure, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
return;
}
@ -1198,7 +1193,13 @@ void DerivationGoal::haveDerivation()
return;
}
parsedDrv = std::make_unique<ParsedDerivation>(drvPath.clone(), *drv);
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
if (parsedDrv->contentAddressed()) {
settings.requireExperimentalFeature("ca-derivations");
throw Error("ca-derivations isn't implemented yet");
}
/* We are first going to try to create the invalid output paths
through substitutes. If that doesn't work, we'll build
@ -1306,7 +1307,7 @@ void DerivationGoal::repairClosure()
if (i.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(i);
for (auto & j : drv.outputs)
outputsToDrv.insert_or_assign(j.second.path.clone(), i.clone());
outputsToDrv.insert_or_assign(j.second.path, i);
}
/* Check each path (slow!). */
@ -1351,13 +1352,9 @@ void DerivationGoal::inputsRealised()
if (nrFailed != 0) {
if (!useDerivation)
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
logError({
.name = "Dependencies could not be built",
.hint = hintfmt(
"cannot build derivation '%s': %s dependencies couldn't be built",
worker.store.printStorePath(drvPath), nrFailed)
});
done(BuildResult::DependencyFailed);
done(BuildResult::DependencyFailed, Error(
"%s dependencies of derivation '%s' failed to build",
nrFailed, worker.store.printStorePath(drvPath)));
return;
}
@ -1439,10 +1436,15 @@ void DerivationGoal::tryToBuild()
lockFiles.insert(worker.store.Store::toRealPath(outPath));
if (!outputLocks.lockPaths(lockFiles, "", false)) {
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles))));
worker.waitForAWhile(shared_from_this());
return;
}
actLock.reset();
/* Now check again whether the outputs are valid. This is because
another process may have started building in parallel. After
it has finished and released the locks, we can (and should)
@ -1458,7 +1460,7 @@ void DerivationGoal::tryToBuild()
return;
}
missingPaths = cloneStorePathSet(drv->outputPaths());
missingPaths = drv->outputPaths();
if (buildMode != bmCheck)
for (auto & i : validPaths) missingPaths.erase(i);
@ -1481,6 +1483,7 @@ void DerivationGoal::tryToBuild()
case rpAccept:
/* Yes, it has started doing so. Wait until we get
EOF from the hook. */
actLock.reset();
result.startTime = time(0); // inexact
state = &DerivationGoal::buildDone;
started();
@ -1488,6 +1491,9 @@ void DerivationGoal::tryToBuild()
case rpPostpone:
/* Not now; wait until at least one child finishes or
the wake-up timeout expires. */
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
worker.waitForAWhile(shared_from_this());
outputLocks.unlock();
return;
@ -1497,6 +1503,8 @@ void DerivationGoal::tryToBuild()
}
}
actLock.reset();
/* Make sure that we are allowed to start a build. If this
derivation prefers to be done locally, do it even if
maxBuildJobs is 0. */
@ -1524,7 +1532,9 @@ void DerivationGoal::tryLocalBuild() {
uid. */
buildUser->kill();
} else {
debug("waiting for build users");
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for UID to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
worker.waitForAWhile(shared_from_this());
return;
}
@ -1535,17 +1545,18 @@ void DerivationGoal::tryLocalBuild() {
#endif
}
actLock.reset();
try {
/* Okay, we have to build. */
startBuilder();
} catch (BuildError & e) {
logError(e.info());
outputLocks.unlock();
buildUser.reset();
worker.permanentFailure = true;
done(BuildResult::InputRejected, e.msg());
done(BuildResult::InputRejected, e);
return;
}
@ -1657,10 +1668,10 @@ void DerivationGoal::buildDone()
}
auto msg = fmt("builder for '%s' %s",
worker.store.printStorePath(drvPath),
yellowtxt(worker.store.printStorePath(drvPath)),
statusToString(status));
if (!settings.verboseBuild && !logTail.empty()) {
if (!logger->isVerbose() && !logTail.empty()) {
msg += (format("; last %d log lines:") % logTail.size()).str();
for (auto & line : logTail)
msg += "\n " + line;
@ -1709,11 +1720,7 @@ void DerivationGoal::buildDone()
}
void flushLine() {
if (settings.verboseBuild) {
printError("post-build-hook: " + currentLine);
} else {
act.result(resPostBuildLogLine, currentLine);
}
act.result(resPostBuildLogLine, currentLine);
currentLine.clear();
}
@ -1762,8 +1769,6 @@ void DerivationGoal::buildDone()
outputLocks.unlock();
} catch (BuildError & e) {
logError(e.info());
outputLocks.unlock();
BuildResult::Status st = BuildResult::MiscFailure;
@ -1782,7 +1787,7 @@ void DerivationGoal::buildDone()
BuildResult::PermanentFailure;
}
done(st, e.msg());
done(st, e);
return;
}
@ -1902,14 +1907,14 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
if (!inputPaths.count(storePath))
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath));
worker.store.computeFSClosure(singleton(storePath), paths);
worker.store.computeFSClosure({storePath}, paths);
}
/* If there are derivations in the graph, then include their
outputs as well. This is useful if you want to do things
like passing all build-time dependencies of some path to a
derivation that builds a NixOS DVD image. */
auto paths2 = cloneStorePathSet(paths);
auto paths2 = paths;
for (auto & j : paths2) {
if (j.isDerivation()) {
@ -2038,7 +2043,7 @@ void DerivationGoal::startBuilder()
/* Write closure info to <fileName>. */
writeFile(tmpDir + "/" + fileName,
worker.store.makeValidityRegistration(
exportReferences(singleton(storePath)), false, false));
exportReferences({storePath}), false, false));
}
}
@ -2223,7 +2228,7 @@ void DerivationGoal::startBuilder()
for (auto & i : missingPaths)
if (worker.store.isValidPath(i) && pathExists(worker.store.printStorePath(i))) {
addHashRewrite(i);
redirectedBadOutputs.insert(i.clone());
redirectedBadOutputs.insert(i);
}
}
@ -2718,8 +2723,8 @@ struct RestrictedStore : public LocalFSStore
StorePathSet queryAllValidPaths() override
{
StorePathSet paths;
for (auto & p : goal.inputPaths) paths.insert(p.clone());
for (auto & p : goal.addedPaths) paths.insert(p.clone());
for (auto & p : goal.inputPaths) paths.insert(p);
for (auto & p : goal.addedPaths) paths.insert(p);
return paths;
}
@ -2748,9 +2753,6 @@ struct RestrictedStore : public LocalFSStore
StorePathSet queryDerivationOutputs(const StorePath & path) override
{ throw Error("queryDerivationOutputs"); }
StringSet queryDerivationOutputNames(const StorePath & path) override
{ throw Error("queryDerivationOutputNames"); }
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ throw Error("queryPathFromHashPart"); }
@ -2810,7 +2812,7 @@ struct RestrictedStore : public LocalFSStore
auto drv = derivationFromPath(path.path);
for (auto & output : drv.outputs)
if (wantOutput(output.first, path.outputs))
newPaths.insert(output.second.path.clone());
newPaths.insert(output.second.path);
} else if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
}
@ -2855,7 +2857,7 @@ struct RestrictedStore : public LocalFSStore
if (goal.isAllowed(path.path))
allowed.emplace_back(path);
else
unknown.insert(path.path.clone());
unknown.insert(path.path);
}
next->queryMissing(allowed, willBuild, willSubstitute,
@ -2950,7 +2952,7 @@ void DerivationGoal::addDependency(const StorePath & path)
{
if (isAllowed(path)) return;
addedPaths.insert(path.clone());
addedPaths.insert(path);
/* If we're doing a sandbox build, then we have to make the path
appear in the sandbox. */
@ -3572,7 +3574,7 @@ StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv
if (store.isStorePath(i))
result.insert(store.parseStorePath(i));
else if (drv.outputs.count(i))
result.insert(drv.outputs.find(i)->second.path.clone());
result.insert(drv.outputs.find(i)->second.path);
else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
}
return result;
@ -3630,9 +3632,9 @@ void DerivationGoal::registerOutputs()
output paths, and any paths that have been built via recursive
Nix calls. */
StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p.clone());
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path.clone());
for (auto & p : addedPaths) referenceablePaths.insert(p.clone());
for (auto & p : inputPaths) referenceablePaths.insert(p);
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path);
for (auto & p : addedPaths) referenceablePaths.insert(p);
/* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all
@ -3743,7 +3745,7 @@ void DerivationGoal::registerOutputs()
worker.hashMismatch = true;
delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
worker.store.printStorePath(dest), h.to_string(SRI), h2.to_string(SRI)));
worker.store.printStorePath(dest), h.to_string(SRI, true), h2.to_string(SRI, true)));
Path actualDest = worker.store.Store::toRealPath(dest);
@ -3831,7 +3833,7 @@ void DerivationGoal::registerOutputs()
info.narHash = hash.first;
info.narSize = hash.second;
info.references = std::move(references);
info.deriver = drvPath.clone();
info.deriver = drvPath;
info.ultimate = true;
info.ca = ca;
worker.store.signPathInfo(info);
@ -3875,7 +3877,6 @@ void DerivationGoal::registerOutputs()
.hint = hint
});
curRound = nrRounds; // we know enough, bail out early
}
}
@ -3947,23 +3948,23 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
uint64_t closureSize = 0;
StorePathSet pathsDone;
std::queue<StorePath> pathsLeft;
pathsLeft.push(path.clone());
pathsLeft.push(path);
while (!pathsLeft.empty()) {
auto path = pathsLeft.front().clone();
auto path = pathsLeft.front();
pathsLeft.pop();
if (!pathsDone.insert(path.clone()).second) continue;
if (!pathsDone.insert(path).second) continue;
auto i = outputsByPath.find(worker.store.printStorePath(path));
if (i != outputsByPath.end()) {
closureSize += i->second.narSize;
for (auto & ref : i->second.references)
pathsLeft.push(ref.clone());
pathsLeft.push(ref);
} else {
auto info = worker.store.queryPathInfo(path);
closureSize += info->narSize;
for (auto & ref : info->references)
pathsLeft.push(ref.clone());
pathsLeft.push(ref);
}
}
@ -3990,8 +3991,8 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
auto spec = parseReferenceSpecifiers(worker.store, *drv, *value);
auto used = recursive
? cloneStorePathSet(getClosure(info.path).first)
: cloneStorePathSet(info.references);
? getClosure(info.path).first
: info.references;
if (recursive && checks.ignoreSelfRefs)
used.erase(info.path);
@ -4001,10 +4002,10 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
for (auto & i : used)
if (allowed) {
if (!spec.count(i))
badPaths.insert(i.clone());
badPaths.insert(i);
} else {
if (spec.count(i))
badPaths.insert(i.clone());
badPaths.insert(i);
}
if (!badPaths.empty()) {
@ -4139,14 +4140,11 @@ void DerivationGoal::handleChildOutput(int fd, const string & data)
{
logSize += data.size();
if (settings.maxLogSize && logSize > settings.maxLogSize) {
logError({
.name = "Max log size exceeded",
.hint = hintfmt(
"%1% killed after writing more than %2% bytes of log output",
getName(), settings.maxLogSize)
});
killChild();
done(BuildResult::LogLimitExceeded);
done(
BuildResult::LogLimitExceeded,
Error("%s killed after writing more than %d bytes of log output",
getName(), settings.maxLogSize));
return;
}
@ -4188,13 +4186,8 @@ void DerivationGoal::flushLine()
;
else {
if (settings.verboseBuild &&
(settings.printRepeatedBuilds || curRound == 1))
printError(currentLogLine);
else {
logTail.push_back(currentLogLine);
if (logTail.size() > settings.logLines) logTail.pop_front();
}
logTail.push_back(currentLogLine);
if (logTail.size() > settings.logLines) logTail.pop_front();
act->result(resBuildLogLine, currentLogLine);
}
@ -4212,7 +4205,7 @@ StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
bool good =
worker.store.isValidPath(i.second.path) &&
(!checkHash || worker.pathContentsGood(i.second.path));
if (good == returnValid) result.insert(i.second.path.clone());
if (good == returnValid) result.insert(i.second.path);
}
return result;
}
@ -4228,15 +4221,16 @@ void DerivationGoal::addHashRewrite(const StorePath & path)
deletePath(worker.store.printStorePath(p));
inputRewrites[h1] = h2;
outputRewrites[h2] = h1;
redirectedOutputs.insert_or_assign(path.clone(), std::move(p));
redirectedOutputs.insert_or_assign(path, std::move(p));
}
void DerivationGoal::done(BuildResult::Status status, const string & msg)
void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex)
{
result.status = status;
result.errorMsg = msg;
amDone(result.success() ? ecSuccess : ecFailed);
if (ex)
result.errorMsg = ex->what();
amDone(result.success() ? ecSuccess : ecFailed, ex);
if (result.status == BuildResult::TimedOut)
worker.timedOut = true;
if (result.status == BuildResult::PermanentFailure)
@ -4302,10 +4296,10 @@ private:
GoalState state;
public:
SubstitutionGoal(StorePath && storePath, Worker & worker, RepairFlag repair = NoRepair);
SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair);
~SubstitutionGoal();
void timedOut() override { abort(); };
void timedOut(Error && ex) override { abort(); };
string key() override
{
@ -4328,18 +4322,13 @@ public:
void handleChildOutput(int fd, const string & data) override;
void handleEOF(int fd) override;
StorePath getStorePath() { return storePath.clone(); }
void amDone(ExitCode result) override
{
Goal::amDone(result);
}
StorePath getStorePath() { return storePath; }
};
SubstitutionGoal::SubstitutionGoal(StorePath && storePath, Worker & worker, RepairFlag repair)
SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair)
: Goal(worker)
, storePath(std::move(storePath))
, storePath(storePath)
, repair(repair)
{
state = &SubstitutionGoal::init;
@ -4573,7 +4562,7 @@ void SubstitutionGoal::finished()
return;
}
worker.markContentsGood(storePath.clone());
worker.markContentsGood(storePath);
printMsg(lvlChatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath));
@ -4643,10 +4632,10 @@ Worker::~Worker()
GoalPtr Worker::makeDerivationGoal(const StorePath & path,
const StringSet & wantedOutputs, BuildMode buildMode)
{
GoalPtr goal = derivationGoals[path.clone()].lock(); // FIXME
GoalPtr goal = derivationGoals[path].lock(); // FIXME
if (!goal) {
goal = std::make_shared<DerivationGoal>(path.clone(), wantedOutputs, *this, buildMode);
derivationGoals.insert_or_assign(path.clone(), goal);
goal = std::make_shared<DerivationGoal>(path, wantedOutputs, *this, buildMode);
derivationGoals.insert_or_assign(path, goal);
wakeUp(goal);
} else
(dynamic_cast<DerivationGoal *>(goal.get()))->addWantedOutputs(wantedOutputs);
@ -4654,10 +4643,10 @@ GoalPtr Worker::makeDerivationGoal(const StorePath & path,
}
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(StorePath && drvPath,
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, BuildMode buildMode)
{
auto goal = std::make_shared<DerivationGoal>(std::move(drvPath), drv, *this, buildMode);
auto goal = std::make_shared<DerivationGoal>(drvPath, drv, *this, buildMode);
wakeUp(goal);
return goal;
}
@ -4665,10 +4654,10 @@ std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(StorePath && drv
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair)
{
GoalPtr goal = substitutionGoals[path.clone()].lock(); // FIXME
GoalPtr goal = substitutionGoals[path].lock(); // FIXME
if (!goal) {
goal = std::make_shared<SubstitutionGoal>(path.clone(), *this, repair);
substitutionGoals.insert_or_assign(path.clone(), goal);
goal = std::make_shared<SubstitutionGoal>(path, *this, repair);
substitutionGoals.insert_or_assign(path, goal);
wakeUp(goal);
}
return goal;
@ -4697,7 +4686,7 @@ void Worker::removeGoal(GoalPtr goal)
topGoals.erase(goal);
/* If a top-level goal failed, then kill all other goals
(unless keepGoing was set). */
if (goal->getExitCode() == Goal::ecFailed && !settings.keepGoing)
if (goal->exitCode == Goal::ecFailed && !settings.keepGoing)
topGoals.clear();
}
@ -4875,8 +4864,6 @@ void Worker::waitForInput()
up after a few seconds at most. */
if (!waitingForAWhile.empty()) {
useTimeout = true;
if (lastWokenUp == steady_time_point::min())
printInfo("waiting for locks, build slots or build users...");
if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) lastWokenUp = before;
timeout = std::max(1L,
(long) std::chrono::duration_cast<std::chrono::seconds>(
@ -4941,32 +4928,24 @@ void Worker::waitForInput()
}
}
if (goal->getExitCode() == Goal::ecBusy &&
if (goal->exitCode == Goal::ecBusy &&
0 != settings.maxSilentTime &&
j->respectTimeouts &&
after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime))
{
logError({
.name = "Silent build timeout",
.hint = hintfmt(
goal->timedOut(Error(
"%1% timed out after %2% seconds of silence",
goal->getName(), settings.maxSilentTime)
});
goal->timedOut();
goal->getName(), settings.maxSilentTime));
}
else if (goal->getExitCode() == Goal::ecBusy &&
else if (goal->exitCode == Goal::ecBusy &&
0 != settings.buildTimeout &&
j->respectTimeouts &&
after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout))
{
logError({
.name = "Build timeout",
.hint = hintfmt(
goal->timedOut(Error(
"%1% timed out after %2% seconds",
goal->getName(), settings.buildTimeout)
});
goal->timedOut();
goal->getName(), settings.buildTimeout));
}
}
@ -5023,7 +5002,7 @@ bool Worker::pathContentsGood(const StorePath & path)
Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
pathContentsGoodCache.insert_or_assign(path.clone(), res);
pathContentsGoodCache.insert_or_assign(path, res);
if (!res)
logError({
.name = "Corrupted path",
@ -5033,9 +5012,9 @@ bool Worker::pathContentsGood(const StorePath & path)
}
void Worker::markContentsGood(StorePath && path)
void Worker::markContentsGood(const StorePath & path)
{
pathContentsGoodCache.insert_or_assign(std::move(path), true);
pathContentsGoodCache.insert_or_assign(path, true);
}
@ -5072,23 +5051,35 @@ void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths,
worker.run(goals);
StorePathSet failed;
std::optional<Error> ex;
for (auto & i : goals) {
if (i->getExitCode() != Goal::ecSuccess) {
if (i->ex) {
if (ex)
logError(i->ex->info());
else
ex = i->ex;
}
if (i->exitCode != Goal::ecSuccess) {
DerivationGoal * i2 = dynamic_cast<DerivationGoal *>(i.get());
if (i2) failed.insert(i2->getDrvPath());
else failed.insert(dynamic_cast<SubstitutionGoal *>(i.get())->getStorePath());
}
}
if (!failed.empty())
if (failed.size() == 1 && ex) {
ex->status = worker.exitStatus();
throw *ex;
} else if (!failed.empty()) {
if (ex) logError(ex->info());
throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
}
}
BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode)
{
Worker worker(*this);
auto goal = worker.makeBasicDerivationGoal(drvPath.clone(), drv, buildMode);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode);
BuildResult result;
@ -5109,7 +5100,7 @@ void LocalStore::ensurePath(const StorePath & path)
/* If the path is already valid, we're done. */
if (isValidPath(path)) return;
primeCache(*this, {StorePathWithOutputs(path)});
primeCache(*this, {{path}});
Worker worker(*this);
GoalPtr goal = worker.makeSubstitutionGoal(path);
@ -5117,8 +5108,13 @@ void LocalStore::ensurePath(const StorePath & path)
worker.run(goals);
if (goal->getExitCode() != Goal::ecSuccess)
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
if (goal->exitCode != Goal::ecSuccess) {
if (goal->ex) {
goal->ex->status = worker.exitStatus();
throw *goal->ex;
} else
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
}
}
@ -5130,7 +5126,7 @@ void LocalStore::repairPath(const StorePath & path)
worker.run(goals);
if (goal->getExitCode() != Goal::ecSuccess) {
if (goal->exitCode != Goal::ecSuccess) {
/* Since substituting the path didn't work, if we have a valid
deriver, then rebuild the deriver. */
auto info = queryPathInfo(path);

View file

@ -22,10 +22,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
srcFiles = readDirectory(srcDir);
} catch (SysError & e) {
if (e.errNo == ENOTDIR) {
logWarning(
ErrorInfo {
.name = "Create links - directory",
.hint = hintfmt("not including '%s' in the user environment because it's not a directory", srcDir)
logWarning({
.name = "Create links - directory",
.hint = hintfmt("not including '%s' in the user environment because it's not a directory", srcDir)
});
return;
}
@ -45,10 +44,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
throw SysError("getting status of '%1%'", srcFile);
} catch (SysError & e) {
if (e.errNo == ENOENT || e.errNo == ENOTDIR) {
logWarning(
ErrorInfo {
.name = "Create links - skipping symlink",
.hint = hintfmt("skipping dangling symlink '%s'", dstFile)
logWarning({
.name = "Create links - skipping symlink",
.hint = hintfmt("skipping dangling symlink '%s'", dstFile)
});
continue;
}

View file

@ -293,7 +293,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
StorePathSet paths; // FIXME
paths.insert(path.clone());
paths.insert(path);
auto res = store->querySubstitutablePaths(paths);
logger->stopWork();
to << (res.count(path) != 0);
@ -327,7 +327,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
StorePathSet paths;
if (op == wopQueryReferences)
for (auto & i : store->queryPathInfo(path)->references)
paths.insert(i.clone());
paths.insert(i);
else if (op == wopQueryReferrers)
store->queryReferrers(path, paths);
else if (op == wopQueryValidDerivers)
@ -341,8 +341,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopQueryDerivationOutputNames: {
auto path = store->parseStorePath(readString(from));
logger->startWork();
StringSet names;
names = store->queryDerivationOutputNames(path);
auto names = store->readDerivation(path).outputNames();
logger->stopWork();
to << names;
break;
@ -370,8 +369,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
std::string s, baseName;
FileIngestionMethod method;
{
bool fixed, recursive;
bool fixed; uint8_t recursive;
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
@ -592,9 +593,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
SubstitutablePathInfos infos;
StorePathSet paths;
paths.insert(path.clone()); // FIXME
store->querySubstitutablePathInfos(paths, infos);
store->querySubstitutablePathInfos({path}, infos);
logger->stopWork();
auto i = infos.find(path);
if (i == infos.end())

View file

@ -27,28 +27,6 @@ void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & has
}
BasicDerivation::BasicDerivation(const BasicDerivation & other)
: platform(other.platform)
, builder(other.builder)
, args(other.args)
, env(other.env)
{
for (auto & i : other.outputs)
outputs.insert_or_assign(i.first,
DerivationOutput(i.second.path.clone(), std::string(i.second.hashAlgo), std::string(i.second.hash)));
for (auto & i : other.inputSrcs)
inputSrcs.insert(i.clone());
}
Derivation::Derivation(const Derivation & other)
: BasicDerivation(other)
{
for (auto & i : other.inputDrvs)
inputDrvs.insert_or_assign(i.first.clone(), i.second);
}
const StorePath & BasicDerivation::findOutput(const string & id) const
{
auto i = outputs.find(id);
@ -67,9 +45,9 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair)
{
auto references = cloneStorePathSet(drv.inputSrcs);
auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs)
references.insert(i.first.clone());
references.insert(i.first);
/* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
@ -155,7 +133,11 @@ static Derivation parseDerivation(const Store & store, const string & s)
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); auto hash = parseString(str);
expect(str, ")");
drv.outputs.emplace(id, DerivationOutput(std::move(path), std::move(hashAlgo), std::move(hash)));
drv.outputs.emplace(id, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
}
/* Parse the list of input derivations. */
@ -204,6 +186,12 @@ Derivation readDerivation(const Store & store, const Path & drvPath)
Derivation Store::derivationFromPath(const StorePath & drvPath)
{
ensurePath(drvPath);
return readDerivation(drvPath);
}
Derivation Store::readDerivation(const StorePath & drvPath)
{
auto accessor = getFSAccessor();
try {
return parseDerivation(*this, accessor->readFile(printStorePath(drvPath)));
@ -377,8 +365,8 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
auto h = drvHashes.find(i.first);
if (h == drvHashes.end()) {
assert(store.isValidPath(i.first));
h = drvHashes.insert_or_assign(i.first.clone(), hashDerivationModulo(store,
readDerivation(store, store.toRealPath(i.first)), false)).first;
h = drvHashes.insert_or_assign(i.first, hashDerivationModulo(store,
store.readDerivation(i.first), false)).first;
}
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
}
@ -405,11 +393,20 @@ StorePathSet BasicDerivation::outputPaths() const
{
StorePathSet paths;
for (auto & i : outputs)
paths.insert(i.second.path.clone());
paths.insert(i.second.path);
return paths;
}
StringSet BasicDerivation::outputNames() const
{
StringSet names;
for (auto & i : outputs)
names.insert(i.first);
return names;
}
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
{
drv.outputs.clear();
@ -419,7 +416,11 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
auto hash = readString(in);
drv.outputs.emplace(name, DerivationOutput(std::move(path), std::move(hashAlgo), std::move(hash)));
drv.outputs.emplace(name, DerivationOutput {
.path = std::move(path),
.hashAlgo = std::move(hashAlgo),
.hash = std::move(hash)
});
}
drv.inputSrcs = readStorePaths<StorePathSet>(store, in);

View file

@ -17,11 +17,6 @@ struct DerivationOutput
StorePath path;
std::string hashAlgo; /* hash used for expected hash computation */
std::string hash; /* expected hash, may be null */
DerivationOutput(StorePath && path, std::string && hashAlgo, std::string && hash)
: path(std::move(path))
, hashAlgo(std::move(hashAlgo))
, hash(std::move(hash))
{ }
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
};
@ -43,7 +38,6 @@ struct BasicDerivation
StringPairs env;
BasicDerivation() { }
explicit BasicDerivation(const BasicDerivation & other);
virtual ~BasicDerivation() { };
/* Return the path corresponding to the output identifier `id' in
@ -58,6 +52,8 @@ struct BasicDerivation
/* Return the output paths of a derivation. */
StorePathSet outputPaths() const;
/* Return the output names of a derivation. */
StringSet outputNames() const;
};
struct Derivation : BasicDerivation
@ -69,8 +65,6 @@ struct Derivation : BasicDerivation
std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { }
Derivation(Derivation && other) = default;
explicit Derivation(const Derivation & other);
};

View file

@ -57,7 +57,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(), hash.to_string());
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
hashAndWriteSink
<< exportMagic
@ -105,7 +105,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
auto source = StringSource { *tee.source.data };
addToStore(info, source, NoRepair, checkSigs, accessor);
res.push_back(info.path.clone());
res.push_back(info.path);
}
return res;

View file

@ -72,6 +72,18 @@ struct curlFileTransfer : public FileTransfer
curl_off_t writtenToSink = 0;
inline static const std::set<long> successfulStatuses {200, 201, 204, 206, 304, 0 /* other protocol */};
/* Get the HTTP status code, or 0 for other protocols. */
long getHTTPStatus()
{
long httpStatus = 0;
long protocol = 0;
curl_easy_getinfo(req, CURLINFO_PROTOCOL, &protocol);
if (protocol == CURLPROTO_HTTP || protocol == CURLPROTO_HTTPS)
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
return httpStatus;
}
TransferItem(curlFileTransfer & fileTransfer,
const FileTransferRequest & request,
Callback<FileTransferResult> && callback)
@ -83,12 +95,11 @@ struct curlFileTransfer : public FileTransfer
, callback(std::move(callback))
, finalSink([this](const unsigned char * data, size_t len) {
if (this->request.dataCallback) {
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
auto httpStatus = getHTTPStatus();
/* Only write data to the sink if this is a
successful response. */
if (httpStatus == 0 || httpStatus == 200 || httpStatus == 201 || httpStatus == 206) {
if (successfulStatuses.count(httpStatus)) {
writtenToSink += len;
this->request.dataCallback((char *) data, len);
}
@ -316,8 +327,7 @@ struct curlFileTransfer : public FileTransfer
void finish(CURLcode code)
{
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
auto httpStatus = getHTTPStatus();
char * effectiveUriCStr;
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
@ -343,8 +353,7 @@ struct curlFileTransfer : public FileTransfer
if (writeException)
failEx(writeException);
else if (code == CURLE_OK &&
(httpStatus == 200 || httpStatus == 201 || httpStatus == 204 || httpStatus == 206 || httpStatus == 304 || httpStatus == 226 /* FTP */ || httpStatus == 0 /* other protocol */))
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
{
result.cached = httpStatus == 304;
act.progress(result.bodySize, result.bodySize);

View file

@ -128,13 +128,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath,
gcroots directory. */
if (settings.checkRootReachability) {
auto roots = findRoots(false);
if (roots[storePath.clone()].count(gcRoot) == 0)
logWarning(
ErrorInfo {
.name = "GC root",
.hint = hintfmt("warning: '%1%' is not in a directory where the garbage collector looks for roots; "
"therefore, '%2%' might be removed by the garbage collector",
gcRoot, printStorePath(storePath))
if (roots[storePath].count(gcRoot) == 0)
logWarning({
.name = "GC root",
.hint = hintfmt("warning: '%1%' is not in a directory where the garbage collector looks for roots; "
"therefore, '%2%' might be removed by the garbage collector",
gcRoot, printStorePath(storePath))
});
}
@ -479,9 +478,9 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
if (!isValidPath(path)) continue;
debug("got additional root '%1%'", pathS);
if (censor)
roots[path.clone()].insert(censored);
roots[path].insert(censored);
else
roots[path.clone()].insert(links.begin(), links.end());
roots[path].insert(links.begin(), links.end());
}
}
@ -593,11 +592,11 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.roots.count(path)) {
debug("cannot delete '%1%' because it's a root", printStorePath(path));
state.alive.insert(path.clone());
state.alive.insert(path);
return true;
}
visited.insert(path.clone());
visited.insert(path);
if (!isValidPath(path)) return false;
@ -611,7 +610,7 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
incoming.insert(i.clone());
incoming.insert(i);
}
/* If keep-outputs is set, then don't delete this path if there
@ -619,13 +618,13 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
if (state.gcKeepOutputs) {
auto derivers = queryValidDerivers(path);
for (auto & i : derivers)
incoming.insert(i.clone());
incoming.insert(i);
}
for (auto & i : incoming)
if (i != path)
if (canReachRoot(state, visited, i)) {
state.alive.insert(path.clone());
state.alive.insert(path);
return true;
}
@ -669,7 +668,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path)
nix-store --delete doesn't have the unexpected effect of
recursing into derivations and outputs. */
for (auto & i : visited)
state.dead.insert(i.clone());
state.dead.insert(i);
if (state.shouldDelete)
deletePathRecursive(state, path);
}
@ -755,7 +754,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
if (!options.ignoreLiveness)
findRootsNoTemp(rootMap, true);
for (auto & i : rootMap) state.roots.insert(i.first.clone());
for (auto & i : rootMap) state.roots.insert(i.first);
/* Read the temporary roots. This acquires read locks on all
per-process temporary root files. So after this point no paths
@ -764,8 +763,8 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
Roots tempRoots;
findTempRoots(fds, tempRoots, true);
for (auto & root : tempRoots) {
state.tempRoots.insert(root.first.clone());
state.roots.insert(root.first.clone());
state.tempRoots.insert(root.first);
state.roots.insert(root.first);
}
/* After this point the set of roots or temporary roots cannot

View file

@ -271,7 +271,7 @@ public:
"listed in 'trusted-public-keys'."};
Setting<StringSet> extraPlatforms{this,
std::string{SYSTEM} == "x86_64-linux" ? StringSet{"i686-linux"} : StringSet{},
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
"extra-platforms",
"Additional platforms that can be built on the local system. "
"These may be supported natively (e.g. armv7 on some aarch64 CPUs "

View file

@ -256,7 +256,7 @@ struct LegacySSHStore : public Store
conn->to.flush();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
out.insert(i.clone());
out.insert(i);
}
StorePathSet queryValidPaths(const StorePathSet & paths,

View file

@ -90,13 +90,13 @@ const string LocalFSStore::drvsLogDir = "drvs";
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
{
auto path = path_.clone();
auto path = path_;
if (!path.isDerivation()) {
try {
auto info = queryPathInfo(path);
if (!info->deriver) return nullptr;
path = info->deriver->clone();
path = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
}

View file

@ -584,7 +584,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use()
(printStorePath(info.path))
(info.narHash.to_string(Base16))
(info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@ -599,7 +599,7 @@ uint64_t LocalStore::addValidPath(State & state,
efficiently query whether a path is an output of some
derivation. */
if (info.path.isDerivation()) {
auto drv = readDerivation(*this, realStoreDir + "/" + std::string(info.path.to_string()));
auto drv = readDerivation(info.path);
/* Verify that the output paths in the derivation are correct
(i.e., follow the scheme for computing output paths from
@ -619,7 +619,7 @@ uint64_t LocalStore::addValidPath(State & state,
{
auto state_(Store::state.lock());
state_->pathInfoCache.upsert(storePathToHash(printStorePath(info.path)),
state_->pathInfoCache.upsert(std::string(info.path.hashPart()),
PathInfoCacheValue{ .value = std::make_shared<const ValidPathInfo>(info) });
}
@ -631,7 +631,7 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
try {
auto info = std::make_shared<ValidPathInfo>(path.clone());
auto info = std::make_shared<ValidPathInfo>(path);
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
auto state(_state.lock());
@ -684,7 +684,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0)
(info.narHash.to_string(Base16))
(info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(info.ca, !info.ca.empty())
@ -721,7 +721,7 @@ StorePathSet LocalStore::queryValidPaths(const StorePathSet & paths, SubstituteF
{
StorePathSet res;
for (auto & i : paths)
if (isValidPath(i)) res.insert(i.clone());
if (isValidPath(i)) res.insert(i);
return res;
}
@ -789,26 +789,9 @@ StorePathSet LocalStore::queryDerivationOutputs(const StorePath & path)
}
StringSet LocalStore::queryDerivationOutputNames(const StorePath & path)
{
return retrySQLite<StringSet>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
StringSet outputNames;
while (useQueryDerivationOutputs.next())
outputNames.insert(useQueryDerivationOutputs.getStr(0));
return outputNames;
});
}
std::optional<StorePath> LocalStore::queryPathFromHashPart(const std::string & hashPart)
{
if (hashPart.size() != storePathHashLen) throw Error("invalid hash part");
if (hashPart.size() != StorePath::HashLen) throw Error("invalid hash part");
Path prefix = storeDir + "/" + hashPart;
@ -833,7 +816,7 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
StorePathSet remaining;
for (auto & i : paths)
remaining.insert(i.clone());
remaining.insert(i);
StorePathSet res;
@ -847,9 +830,9 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
StorePathSet remaining2;
for (auto & path : remaining)
if (valid.count(path))
res.insert(path.clone());
res.insert(path);
else
remaining2.insert(path.clone());
remaining2.insert(path);
std::swap(remaining, remaining2);
}
@ -871,9 +854,9 @@ void LocalStore::querySubstitutablePathInfos(const StorePathSet & paths,
auto info = sub->queryPathInfo(path);
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
std::shared_ptr<const ValidPathInfo>(info));
infos.insert_or_assign(path.clone(), SubstitutablePathInfo{
info->deriver ? info->deriver->clone() : std::optional<StorePath>(),
cloneStorePathSet(info->references),
infos.insert_or_assign(path, SubstitutablePathInfo{
info->deriver,
info->references,
narInfo ? narInfo->fileSize : 0,
info->narSize});
} catch (InvalidPath &) {
@ -917,7 +900,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
updatePathInfo(*state, i);
else
addValidPath(*state, i, false);
paths.insert(i.path.clone());
paths.insert(i.path);
}
for (auto & i : infos) {
@ -932,8 +915,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
for (auto & i : infos)
if (i.path.isDerivation()) {
// FIXME: inefficient; we already loaded the derivation in addValidPath().
checkDerivationOutputs(i.path,
readDerivation(*this, realStoreDir + "/" + std::string(i.path.to_string())));
checkDerivationOutputs(i.path, readDerivation(i.path));
}
/* Do a topological sort of the paths. This will throw an
@ -960,7 +942,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
{
auto state_(Store::state.lock());
state_->pathInfoCache.erase(storePathToHash(printStorePath(path)));
state_->pathInfoCache.erase(std::string(path.hashPart()));
}
}
@ -1012,7 +994,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (info.ca == "" || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, storePathToHash(printStorePath(info.path)));
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
size_t n = source.read(data, len);
@ -1026,7 +1008,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(), hashResult.first.to_string());
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1092,7 +1074,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
optimisePath(realPath); // FIXME: combine with hashPath()
ValidPathInfo info(dstPath.clone());
ValidPathInfo info(dstPath);
info.narHash = hash.first;
info.narSize = hash.second;
info.ca = makeFixedOutputCA(method, h);
@ -1155,11 +1137,11 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
optimisePath(realPath);
ValidPathInfo info(dstPath.clone());
ValidPathInfo info(dstPath);
info.narHash = narHash;
info.narSize = sink.s->size();
info.references = cloneStorePathSet(references);
info.ca = "text:" + hash.to_string();
info.references = references;
info.ca = "text:" + hash.to_string(Base32, true);
registerValidPath(info);
}
@ -1273,7 +1255,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
if (info->ca == "" || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, storePathToHash(printStorePath(info->path)));
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
@ -1282,7 +1264,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(), current.first.to_string())
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
});
if (repair) repairPath(i); else errors = true;
} else {

View file

@ -135,8 +135,6 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
StringSet queryDerivationOutputNames(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;

View file

@ -6,7 +6,7 @@ libstore_DIR := $(d)
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc)
libstore_LIBS = libutil libnixrust
libstore_LIBS = libutil
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifneq ($(OS), FreeBSD)

View file

@ -103,7 +103,7 @@ void Store::computeFSClosure(const StorePath & startPath,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
StorePathSet paths;
paths.insert(startPath.clone());
paths.insert(startPath);
computeFSClosure(paths, paths_, flipDirection, includeOutputs, includeDerivers);
}
@ -141,11 +141,11 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
auto mustBuildDrv = [&](const StorePath & drvPath, const Derivation & drv) {
{
auto state(state_.lock());
state->willBuild.insert(drvPath.clone());
state->willBuild.insert(drvPath);
}
for (auto & i : drv.inputDrvs)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(i.first, i.second)));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { i.first, i.second }));
};
auto checkOutput = [&](
@ -157,9 +157,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
auto outPath = parseStorePath(outPathS);
SubstitutablePathInfos infos;
StorePathSet paths; // FIXME
paths.insert(outPath.clone());
querySubstitutablePathInfos(paths, infos);
querySubstitutablePathInfos({outPath}, infos);
if (infos.empty()) {
drvState_->lock()->done = true;
@ -170,10 +168,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (drvState->done) return;
assert(drvState->left);
drvState->left--;
drvState->outPaths.insert(outPath.clone());
drvState->outPaths.insert(outPath);
if (!drvState->left) {
for (auto & path : drvState->outPaths)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(path.clone())));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { path } ));
}
}
}
@ -190,12 +188,12 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (!isValidPath(path.path)) {
// FIXME: we could try to substitute the derivation.
auto state(state_.lock());
state->unknown.insert(path.path.clone());
state->unknown.insert(path.path);
return;
}
auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(path.path.clone(), *drv);
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid;
for (auto & j : drv->outputs)
@ -216,13 +214,11 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
if (isValidPath(path.path)) return;
SubstitutablePathInfos infos;
StorePathSet paths; // FIXME
paths.insert(path.path.clone());
querySubstitutablePathInfos(paths, infos);
querySubstitutablePathInfos({path.path}, infos);
if (infos.empty()) {
auto state(state_.lock());
state->unknown.insert(path.path.clone());
state->unknown.insert(path.path);
return;
}
@ -231,13 +227,13 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
{
auto state(state_.lock());
state->willSubstitute.insert(path.path.clone());
state->willSubstitute.insert(path.path);
state->downloadSize += info->second.downloadSize;
state->narSize += info->second.narSize;
}
for (auto & ref : info->second.references)
pool.enqueue(std::bind(doPath, StorePathWithOutputs(ref)));
pool.enqueue(std::bind(doPath, StorePathWithOutputs { ref }));
}
};
@ -260,12 +256,12 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
throw BuildError("cycle detected in the references of '%s' from '%s'",
printStorePath(path), printStorePath(*parent));
if (!visited.insert(path.clone()).second) return;
parents.insert(path.clone());
if (!visited.insert(path).second) return;
parents.insert(path);
StorePathSet references;
try {
references = cloneStorePathSet(queryPathInfo(path)->references);
references = queryPathInfo(path)->references;
} catch (InvalidPath &) {
}
@ -275,7 +271,7 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
if (i != path && paths.count(i))
dfsVisit(i, &path);
sorted.push_back(path.clone());
sorted.push_back(path);
parents.erase(path);
};

View file

@ -189,7 +189,7 @@ public:
return {oInvalid, 0};
auto namePart = queryNAR.getStr(1);
auto narInfo = make_ref<NarInfo>(StorePath::fromBaseName(hashPart + "-" + namePart));
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart));
narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4))
@ -198,9 +198,9 @@ public:
narInfo->narHash = Hash(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath::fromBaseName(r));
narInfo->references.insert(StorePath(r));
if (!queryNAR.isNull(9))
narInfo->deriver = StorePath::fromBaseName(queryNAR.getStr(9));
narInfo->deriver = StorePath(queryNAR.getStr(9));
for (auto & sig : tokenizeString<Strings>(queryNAR.getStr(10), " "))
narInfo->sigs.insert(sig);
narInfo->ca = queryNAR.getStr(11);
@ -230,9 +230,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string() : "", narInfo && narInfo->fileHash)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string())
(info->narHash.to_string(Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -4,7 +4,7 @@
namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
: ValidPathInfo(StorePath::dummy.clone()) // FIXME: hack
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
{
auto corrupt = [&]() {
throw Error("NAR info file '%1%' is corrupt", whence);
@ -56,11 +56,11 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
auto refs = tokenizeString<Strings>(value, " ");
if (!references.empty()) corrupt();
for (auto & r : refs)
references.insert(StorePath::fromBaseName(r));
references.insert(StorePath(r));
}
else if (name == "Deriver") {
if (value != "unknown-deriver")
deriver = StorePath::fromBaseName(value);
deriver = StorePath(value);
}
else if (name == "System")
system = value;
@ -87,10 +87,10 @@ std::string NarInfo::to_string(const Store & store) const
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash.type == htSHA256);
res += "FileHash: " + fileHash.to_string(Base32) + "\n";
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: " + narHash.to_string(Base32) + "\n";
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -153,7 +153,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
Hash hash = hashPath(htSHA256, path).first;
debug(format("'%1%' has hash '%2%'") % path % hash.to_string());
debug(format("'%1%' has hash '%2%'") % path % hash.to_string(Base32, true));
/* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);

View file

@ -4,8 +4,8 @@
namespace nix {
ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
: drvPath(std::move(drvPath)), drv(drv)
ParsedDerivation::ParsedDerivation(const StorePath & drvPath, BasicDerivation & drv)
: drvPath(drvPath), drv(drv)
{
/* Parse the __json attribute, if any. */
auto jsonAttr = drv.env.find("__json");
@ -117,4 +117,9 @@ bool ParsedDerivation::substitutesAllowed() const
return getBoolAttr("allowSubstitutes", true);
}
bool ParsedDerivation::contentAddressed() const
{
return getBoolAttr("__contentAddressed", false);
}
}

View file

@ -12,7 +12,7 @@ class ParsedDerivation
public:
ParsedDerivation(StorePath && drvPath, BasicDerivation & drv);
ParsedDerivation(const StorePath & drvPath, BasicDerivation & drv);
~ParsedDerivation();
@ -34,6 +34,8 @@ public:
bool willBuildLocally() const;
bool substitutesAllowed() const;
bool contentAddressed() const;
};
}

View file

@ -2,38 +2,38 @@
namespace nix {
extern "C" {
rust::Result<StorePath> ffi_StorePath_new(rust::StringSlice path, rust::StringSlice storeDir);
rust::Result<StorePath> ffi_StorePath_new2(unsigned char hash[20], rust::StringSlice storeDir);
rust::Result<StorePath> ffi_StorePath_fromBaseName(rust::StringSlice baseName);
rust::String ffi_StorePath_to_string(const StorePath & _this);
StorePath ffi_StorePath_clone(const StorePath & _this);
rust::StringSlice ffi_StorePath_name(const StorePath & _this);
MakeError(BadStorePath, Error);
static void checkName(std::string_view path, std::string_view name)
{
if (name.empty())
throw BadStorePath("store path '%s' has an empty name", path);
if (name.size() > 211)
throw BadStorePath("store path '%s' has a name longer than 211 characters", path);
for (auto c : name)
if (!((c >= '0' && c <= '9')
|| (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z')
|| c == '+' || c == '-' || c == '.' || c == '_' || c == '?' || c == '='))
throw BadStorePath("store path '%s' contains illegal character '%s'", path, c);
}
StorePath StorePath::make(std::string_view path, std::string_view storeDir)
StorePath::StorePath(std::string_view _baseName)
: baseName(_baseName)
{
return ffi_StorePath_new((rust::StringSlice) path, (rust::StringSlice) storeDir).unwrap();
if (baseName.size() < HashLen + 1)
throw BadStorePath("'%s' is too short to be a valid store path", baseName);
for (auto c : hashPart())
if (c == 'e' || c == 'o' || c == 'u' || c == 't'
|| !((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z')))
throw BadStorePath("store path '%s' contains illegal base-32 character '%s'", baseName, c);
checkName(baseName, name());
}
StorePath StorePath::make(unsigned char hash[20], std::string_view name)
StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(Base32, false) + "-").append(std::string(_name)))
{
return ffi_StorePath_new2(hash, (rust::StringSlice) name).unwrap();
}
StorePath StorePath::fromBaseName(std::string_view baseName)
{
return ffi_StorePath_fromBaseName((rust::StringSlice) baseName).unwrap();
}
rust::String StorePath::to_string() const
{
return ffi_StorePath_to_string(*this);
}
StorePath StorePath::clone() const
{
return ffi_StorePath_clone(*this);
checkName(baseName, name());
}
bool StorePath::isDerivation() const
@ -41,18 +41,14 @@ bool StorePath::isDerivation() const
return hasSuffix(name(), drvExtension);
}
std::string_view StorePath::name() const
{
return ffi_StorePath_name(*this);
}
StorePath StorePath::dummy(
StorePath::make(
(unsigned char *) "xxxxxxxxxxxxxxxxxxxx", "x"));
StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath Store::parseStorePath(std::string_view path) const
{
return StorePath::make(path, storeDir);
auto p = canonPath(std::string(path));
if (dirOf(p) != storeDir)
throw BadStorePath("path '%s' is not in the Nix store", p);
return StorePath(baseNameOf(p));
}
std::optional<StorePath> Store::maybeParseStorePath(std::string_view path) const
@ -78,9 +74,7 @@ StorePathSet Store::parseStorePathSet(const PathSet & paths) const
std::string Store::printStorePath(const StorePath & path) const
{
auto s = storeDir + "/";
s += (std::string_view) path.to_string();
return s;
return (storeDir + "/").append(path.to_string());
}
PathSet Store::printStorePathSet(const StorePathSet & paths) const
@ -90,29 +84,6 @@ PathSet Store::printStorePathSet(const StorePathSet & paths) const
return res;
}
StorePathSet cloneStorePathSet(const StorePathSet & paths)
{
StorePathSet res;
for (auto & p : paths)
res.insert(p.clone());
return res;
}
StorePathSet storePathsToSet(const StorePaths & paths)
{
StorePathSet res;
for (auto & p : paths)
res.insert(p.clone());
return res;
}
StorePathSet singleton(const StorePath & path)
{
StorePathSet res;
res.insert(path.clone());
return res;
}
std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s)
{
size_t n = s.find("!");

View file

@ -1,59 +1,59 @@
#pragma once
#include "rust-ffi.hh"
#include "types.hh"
namespace nix {
/* See path.rs. */
struct StorePath;
class Store;
struct Hash;
extern "C" {
void ffi_StorePath_drop(void *);
bool ffi_StorePath_less_than(const StorePath & a, const StorePath & b);
bool ffi_StorePath_eq(const StorePath & a, const StorePath & b);
unsigned char * ffi_StorePath_hash_data(const StorePath & p);
}
struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>
class StorePath
{
std::string baseName;
public:
/* Size of the hash part of store paths, in base-32 characters. */
constexpr static size_t HashLen = 32; // i.e. 160 bits
StorePath() = delete;
static StorePath make(std::string_view path, std::string_view storeDir);
StorePath(std::string_view baseName);
static StorePath make(unsigned char hash[20], std::string_view name);
StorePath(const Hash & hash, std::string_view name);
static StorePath fromBaseName(std::string_view baseName);
rust::String to_string() const;
std::string_view to_string() const
{
return baseName;
}
bool operator < (const StorePath & other) const
{
return ffi_StorePath_less_than(*this, other);
return baseName < other.baseName;
}
bool operator == (const StorePath & other) const
{
return ffi_StorePath_eq(*this, other);
return baseName == other.baseName;
}
bool operator != (const StorePath & other) const
{
return !(*this == other);
return baseName != other.baseName;
}
StorePath clone() const;
/* Check whether a file name ends with the extension for
derivations. */
bool isDerivation() const;
std::string_view name() const;
unsigned char * hashData() const
std::string_view name() const
{
return ffi_StorePath_hash_data(*this);
return std::string_view(baseName).substr(HashLen + 1);
}
std::string_view hashPart() const
{
return std::string_view(baseName).substr(0, HashLen);
}
static StorePath dummy;
@ -62,14 +62,6 @@ struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>
typedef std::set<StorePath> StorePathSet;
typedef std::vector<StorePath> StorePaths;
StorePathSet cloneStorePathSet(const StorePathSet & paths);
StorePathSet storePathsToSet(const StorePaths & paths);
StorePathSet singleton(const StorePath & path);
/* Size of the hash part of store paths, in base-32 characters. */
const size_t storePathHashLen = 32; // i.e. 160 bits
/* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv";
@ -83,18 +75,6 @@ struct StorePathWithOutputs
StorePath path;
std::set<std::string> outputs;
StorePathWithOutputs(const StorePath & path, const std::set<std::string> & outputs = {})
: path(path.clone()), outputs(outputs)
{ }
StorePathWithOutputs(StorePath && path, std::set<std::string> && outputs)
: path(std::move(path)), outputs(std::move(outputs))
{ }
StorePathWithOutputs(const StorePathWithOutputs & other)
: path(other.path.clone()), outputs(other.outputs)
{ }
std::string to_string(const Store & store) const;
};
@ -107,7 +87,7 @@ namespace std {
template<> struct hash<nix::StorePath> {
std::size_t operator()(const nix::StorePath & path) const noexcept
{
return * (std::size_t *) path.hashData();
return * (std::size_t *) path.to_string().data();
}
};

View file

@ -19,7 +19,7 @@ RemoteFSAccessor::RemoteFSAccessor(ref<Store> store, const Path & cacheDir)
Path RemoteFSAccessor::makeCacheFile(const Path & storePath, const std::string & ext)
{
assert(cacheDir != "");
return fmt("%s/%s.%s", cacheDir, storePathToHash(storePath), ext);
return fmt("%s/%s.%s", cacheDir, store->parseStorePath(storePath).hashPart(), ext);
}
void RemoteFSAccessor::addToCache(const Path & storePath, const std::string & nar,

View file

@ -228,7 +228,7 @@ struct ConnectionHandle
~ConnectionHandle()
{
if (!daemonException && std::uncaught_exception()) {
if (!daemonException && std::uncaught_exceptions()) {
handle.markBad();
debug("closing daemon connection because of an exception");
}
@ -268,7 +268,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
StorePathSet res;
for (auto & i : paths)
if (isValidPath(i)) res.insert(i.clone());
if (isValidPath(i)) res.insert(i);
return res;
} else {
conn->to << wopQueryValidPaths;
@ -296,7 +296,7 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
for (auto & i : paths) {
conn->to << wopHasSubstitutes << printStorePath(i);
conn.processStderr();
if (readInt(conn->from)) res.insert(i.clone());
if (readInt(conn->from)) res.insert(i);
}
return res;
} else {
@ -329,7 +329,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathSet & paths,
info.references = readStorePaths<StorePathSet>(*this, conn->from);
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
infos.insert_or_assign(i.clone(), std::move(info));
infos.insert_or_assign(i, std::move(info));
}
} else {
@ -372,7 +372,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
info = std::make_shared<ValidPathInfo>(path.clone());
info = std::make_shared<ValidPathInfo>(StorePath(path));
auto deriver = readString(conn->from);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->narHash = Hash(readString(conn->from), htSHA256);
@ -396,7 +396,7 @@ void RemoteStore::queryReferrers(const StorePath & path,
conn->to << wopQueryReferrers << printStorePath(path);
conn.processStderr();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
referrers.insert(i.clone());
referrers.insert(i);
}
@ -418,15 +418,6 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
}
PathSet RemoteStore::queryDerivationOutputNames(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputNames << printStorePath(path);
conn.processStderr();
return readStrings<PathSet>(conn->from);
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
{
auto conn(getConnection());

View file

@ -51,8 +51,6 @@ public:
StorePathSet queryDerivationOutputs(const StorePath & path) override;
StringSet queryDerivationOutputNames(const StorePath & path) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;

View file

@ -204,10 +204,10 @@ void handleSQLiteBusy(const SQLiteBusy & e)
if (now > lastWarned + 10) {
lastWarned = now;
logWarning(
ErrorInfo { .name = "Sqlite busy",
.hint = hintfmt(e.what())
});
logWarning({
.name = "Sqlite busy",
.hint = hintfmt(e.what())
});
}
/* Sleep for a while since retrying the transaction right away

View file

@ -55,21 +55,13 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view path) const
{
auto [path2, outputs] = nix::parsePathWithOutputs(path);
return StorePathWithOutputs(followLinksToStorePath(path2), std::move(outputs));
}
string storePathToHash(const Path & path)
{
auto base = baseNameOf(path);
assert(base.size() >= storePathHashLen);
return string(base, 0, storePathHashLen);
return StorePathWithOutputs { followLinksToStorePath(path2), std::move(outputs) };
}
/* Store paths have the following form:
<store>/<h>-<name>
<realized-path> = <store>/<h>-<name>
where
@ -93,11 +85,14 @@ string storePathToHash(const Path & path)
<type> = one of:
"text:<r1>:<r2>:...<rN>"
for plain text files written to the store using
addTextToStore(); <r1> ... <rN> are the references of the
path.
"source"
addTextToStore(); <r1> ... <rN> are the store paths referenced
by this path, in the form described by <realized-path>
"source:<r1>:<r2>:...:<rN>:self"
for paths copied to the store using addToStore() when recursive
= true and hashAlgo = "sha256"
= true and hashAlgo = "sha256". Just like in the text case, we
can have the store paths referenced by the path.
Additionally, we can have an optional :self label to denote self
reference.
"output:<id>"
for either the outputs created by derivations, OR paths copied
to the store using addToStore() with recursive != true or
@ -125,6 +120,12 @@ string storePathToHash(const Path & path)
the contents of the path (or expected contents of the
path for fixed-output derivations)
Note that since an output derivation has always type output, while
something added by addToStore can have type output or source depending
on the hash, this means that the same input can be hashed differently
if added to the store via addToStore or via a derivation, in the sha256
recursive case.
It would have been nicer to handle fixed-output derivations under
"source", e.g. have something like "source:<rec><algo>", but we're
stuck with this for now...
@ -142,9 +143,9 @@ StorePath Store::makeStorePath(const string & type,
const Hash & hash, std::string_view name) const
{
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
string s = type + ":" + hash.to_string(Base16) + ":" + storeDir + ":" + std::string(name);
string s = type + ":" + hash.to_string(Base16, true) + ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(htSHA256, s), 20);
return StorePath::make(h.hash, name);
return StorePath(h, name);
}
@ -186,7 +187,7 @@ StorePath Store::makeFixedOutputPath(
hashString(htSHA256,
"fixed:out:"
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
+ hash.to_string(Base16) + ":"),
+ hash.to_string(Base16, true) + ":"),
name);
}
}
@ -243,7 +244,7 @@ bool Store::PathInfoCacheValue::isKnownNow()
bool Store::isValidPath(const StorePath & storePath)
{
auto hashPart = storePathToHash(printStorePath(storePath));
std::string hashPart(storePath.hashPart());
{
auto state_(state.lock());
@ -311,7 +312,7 @@ void Store::queryPathInfo(const StorePath & storePath,
std::string hashPart;
try {
hashPart = storePathToHash(printStorePath(storePath));
hashPart = storePath.hashPart();
{
auto res = state.lock()->pathInfoCache.get(hashPart);
@ -461,7 +462,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath);
jsonPath
.attr("narHash", info->narHash.to_string(hashBase))
.attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize);
{
@ -504,7 +505,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
if (!narInfo->url.empty())
jsonPath.attr("url", narInfo->url);
if (narInfo->fileHash)
jsonPath.attr("downloadHash", narInfo->fileHash.to_string());
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true));
if (narInfo->fileSize)
jsonPath.attr("downloadSize", narInfo->fileSize);
if (showClosureSize)
@ -553,7 +554,7 @@ void Store::buildPaths(const std::vector<StorePathWithOutputs> & paths, BuildMod
for (auto & path : paths) {
if (path.path.isDerivation())
unsupported("buildPaths");
paths2.insert(path.path.clone());
paths2.insert(path.path);
}
if (queryValidPaths(paths2).size() != paths2.size())
@ -693,21 +694,6 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
}
ValidPathInfo::ValidPathInfo(const ValidPathInfo & other)
: path(other.path.clone())
, deriver(other.deriver ? other.deriver->clone(): std::optional<StorePath>{})
, narHash(other.narHash)
, references(cloneStorePathSet(other.references))
, registrationTime(other.registrationTime)
, narSize(other.narSize)
, id(other.id)
, ultimate(other.ultimate)
, sigs(other.sigs)
, ca(other.ca)
{
}
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven)
{
std::string path;
@ -760,7 +746,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(Base32) + ";"
+ narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}
@ -783,7 +769,7 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
};
if (hasPrefix(ca, "text:")) {
Hash hash(std::string(ca, 5));
Hash hash(ca.substr(5));
if (store.makeTextPath(path.name(), hash, references) == path)
return true;
else
@ -792,8 +778,8 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
else if (hasPrefix(ca, "fixed:")) {
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
Hash hash(std::string(ca, recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = cloneStorePathSet(references);
Hash hash(ca.substr(recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = references;
bool hasSelfReference = false;
if (refs.count(path)) {
hasSelfReference = true;
@ -840,7 +826,7 @@ std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
{
return "fixed:"
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
+ hash.to_string();
+ hash.to_string(Base32, true);
}

View file

@ -189,8 +189,9 @@ struct ValidPathInfo
Strings shortRefs() const;
ValidPathInfo(const StorePath & path) : path(path) { }
ValidPathInfo(StorePath && path) : path(std::move(path)) { }
explicit ValidPathInfo(const ValidPathInfo & other);
virtual ~ValidPathInfo() { }
};
@ -430,10 +431,6 @@ public:
virtual StorePathSet queryDerivationOutputs(const StorePath & path)
{ unsupported("queryDerivationOutputs"); }
/* Query the output names of the derivation denoted by `path'. */
virtual StringSet queryDerivationOutputNames(const StorePath & path)
{ unsupported("queryDerivationOutputNames"); }
/* Query the full store path given the hash part of a valid store
path, or empty if the path doesn't exist. */
virtual std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) = 0;
@ -587,6 +584,9 @@ public:
ensurePath(). */
Derivation derivationFromPath(const StorePath & drvPath);
/* Read a derivation (which must already be valid). */
Derivation readDerivation(const StorePath & drvPath);
/* Place in `out' the set of all store paths in the file system
closure of `storePath'; that is, all paths than can be directly
or indirectly reached from it. `out' is not cleared. If
@ -732,10 +732,6 @@ public:
};
/* Extract the hash part of the given store path. */
string storePathToHash(const Path & path);
/* Copy a path from one store to another. */
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
const StorePath & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs);

View file

@ -36,7 +36,7 @@ typedef enum {
wopClearFailedPaths = 25,
wopQueryPathInfo = 26,
wopImportPaths = 27, // obsolete
wopQueryDerivationOutputNames = 28,
wopQueryDerivationOutputNames = 28, // obsolete
wopQueryPathFromHashPart = 29,
wopQuerySubstitutablePathInfos = 30,
wopQueryValidPaths = 31,

View file

@ -11,5 +11,7 @@ namespace nix {
#define ANSI_GREEN "\e[32;1m"
#define ANSI_YELLOW "\e[33;1m"
#define ANSI_BLUE "\e[34;1m"
#define ANSI_MAGENTA "\e[35m;1m"
#define ANSI_CYAN "\e[36m;1m"
}

View file

@ -217,10 +217,15 @@ MultiCommand::MultiCommand(const Commands & commands)
{
expectedArgs.push_back(ExpectedArg{"command", 1, true, [=](std::vector<std::string> ss) {
assert(!command);
auto i = commands.find(ss[0]);
auto cmd = ss[0];
if (auto alias = get(deprecatedAliases, cmd)) {
warn("'%s' is a deprecated alias for '%s'", cmd, *alias);
cmd = *alias;
}
auto i = commands.find(cmd);
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", ss[0]);
command = {ss[0], i->second()};
throw UsageError("'%s' is not a recognised command", cmd);
command = {cmd, i->second()};
}});
categories[Command::catDefault] = "Available commands";

View file

@ -234,6 +234,8 @@ public:
std::map<Command::Category, std::string> categories;
std::map<std::string, std::string> deprecatedAliases;
// Selected command, if any.
std::optional<std::pair<std::string, ref<Command>>> command;

View file

@ -1,3 +1,4 @@
#include <cassert>
#include <map>
#include <set>

View file

@ -176,7 +176,7 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
{
int errwidth = 80;
auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = "";
string levelString;
@ -229,12 +229,10 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
}
}
int ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length();
int dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl;
auto ndl = prefix.length() + levelString.length() + 3 + einfo.name.length() + einfo.programName.value_or("").length();
auto dashwidth = ndl > (errwidth - 3) ? 3 : errwidth - ndl;
string dashes;
for (int i = 0; i < dashwidth; ++i)
dashes.append("-");
std::string dashes(dashwidth, '-');
// divider.
if (einfo.name != "")

View file

@ -4,6 +4,7 @@
#include "ref.hh"
#include "types.hh"
#include <cstring>
#include <list>
#include <memory>
#include <map>
@ -127,9 +128,9 @@ public:
{ }
template<typename... Args>
BaseError(const Args & ... args)
BaseError(const std::string & fs, const Args & ... args)
: err { .level = lvlError,
.hint = hintfmt(args...)
.hint = hintfmt(fs, args...)
}
{ }
@ -139,7 +140,11 @@ public:
}
{ }
BaseError(ErrorInfo e)
BaseError(ErrorInfo && e)
: err(std::move(e))
{ }
BaseError(const ErrorInfo & e)
: err(e)
{ }

View file

@ -125,7 +125,7 @@ std::string Hash::to_string(Base base, bool includeType) const
}
Hash::Hash(const std::string & s, HashType type)
Hash::Hash(std::string_view s, HashType type)
: type(type)
{
size_t pos = 0;
@ -194,7 +194,7 @@ Hash::Hash(const std::string & s, HashType type)
}
else if (isSRI || size == base64Len()) {
auto d = base64Decode(std::string(s, pos));
auto d = base64Decode(s.substr(pos));
if (d.size() != hashSize)
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s);
assert(hashSize);
@ -205,6 +205,16 @@ Hash::Hash(const std::string & s, HashType type)
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(type));
}
Hash newHashAllowEmpty(std::string hashStr, HashType ht)
{
if (hashStr.empty()) {
Hash h(ht);
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h;
} else
return Hash(hashStr, ht);
}
union Ctx
{

View file

@ -42,7 +42,7 @@ struct Hash
Subresource Integrity hash expression). If the 'type' argument
is htUnknown, then the hash type must be specified in the
string. */
Hash(const std::string & s, HashType type = htUnknown);
Hash(std::string_view s, HashType type = htUnknown);
void init();
@ -79,7 +79,7 @@ struct Hash
/* Return a string representation of the hash, in base-16, base-32
or base-64. By default, this is prefixed by the hash type
(e.g. "sha256:"). */
std::string to_string(Base base = Base32, bool includeType = true) const;
std::string to_string(Base base, bool includeType) const;
std::string gitRev() const
{
@ -94,6 +94,8 @@ struct Hash
}
};
/* Helper that defaults empty hashes to the 0 hash. */
Hash newHashAllowEmpty(std::string hashStr, HashType ht);
/* Print a hash in base-16 if it's MD5, or base-32 otherwise. */
string printHash16or32(const Hash & hash);

View file

@ -173,7 +173,7 @@ JSONObject JSONPlaceholder::object()
JSONPlaceholder::~JSONPlaceholder()
{
assert(!first || std::uncaught_exception());
assert(!first || std::uncaught_exceptions());
}
}

View file

@ -7,5 +7,3 @@ libutil_DIR := $(d)
libutil_SOURCES := $(wildcard $(d)/*.cc)
libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
libutil_LIBS = libnixrust

View file

@ -18,7 +18,7 @@ void setCurActivity(const ActivityId activityId)
curActivity = activityId;
}
Logger * logger = makeDefaultLogger();
Logger * logger = makeSimpleLogger(true);
void Logger::warn(const std::string & msg)
{
@ -35,13 +35,19 @@ class SimpleLogger : public Logger
public:
bool systemd, tty;
bool printBuildLogs;
SimpleLogger()
SimpleLogger(bool printBuildLogs)
: printBuildLogs(printBuildLogs)
{
systemd = getEnv("IN_SYSTEMD") == "1";
tty = isatty(STDERR_FILENO);
}
bool isVerbose() override {
return printBuildLogs;
}
void log(Verbosity lvl, const FormatOrString & fs) override
{
if (lvl > verbosity) return;
@ -78,6 +84,18 @@ public:
if (lvl <= verbosity && !s.empty())
log(lvl, s + "...");
}
void result(ActivityId act, ResultType type, const Fields & fields) override
{
if (type == resBuildLogLine && printBuildLogs) {
auto lastLine = fields[0].s;
printError(lastLine);
}
else if (type == resPostBuildLogLine && printBuildLogs) {
auto lastLine = fields[0].s;
printError("post-build-hook: " + lastLine);
}
}
};
Verbosity verbosity = lvlInfo;
@ -102,9 +120,9 @@ void writeToStderr(const string & s)
}
}
Logger * makeDefaultLogger()
Logger * makeSimpleLogger(bool printBuildLogs)
{
return new SimpleLogger();
return new SimpleLogger(printBuildLogs);
}
std::atomic<uint64_t> nextId{(uint64_t) getpid() << 32};
@ -121,6 +139,10 @@ struct JSONLogger : Logger {
JSONLogger(Logger & prevLogger) : prevLogger(prevLogger) { }
bool isVerbose() override {
return true;
}
void addFields(nlohmann::json & json, const Fields & fields)
{
if (fields.empty()) return;

View file

@ -18,6 +18,7 @@ typedef enum {
actSubstitute = 108,
actQueryPathInfo = 109,
actPostBuildHook = 110,
actBuildWaiting = 111,
} ActivityType;
typedef enum {
@ -54,6 +55,11 @@ public:
virtual ~Logger() { }
virtual void stop() { };
// Whether the logger prints the whole build log
virtual bool isVerbose() { return false; }
virtual void log(Verbosity lvl, const FormatOrString & fs) = 0;
void log(const FormatOrString & fs)
@ -140,7 +146,7 @@ struct PushActivity
extern Logger * logger;
Logger * makeDefaultLogger();
Logger * makeSimpleLogger(bool printBuildLogs = true);
Logger * makeJSONLogger(Logger & prevLogger);

View file

@ -1,3 +1,4 @@
#if 0
#include "logging.hh"
#include "rust-ffi.hh"
@ -20,3 +21,4 @@ std::ostream & operator << (std::ostream & str, const String & s)
}
}
#endif

View file

@ -1,4 +1,5 @@
#pragma once
#if 0
#include "serialise.hh"
@ -185,3 +186,4 @@ struct Result
};
}
#endif

View file

@ -52,10 +52,10 @@ size_t threshold = 256 * 1024 * 1024;
static void warnLargeDump()
{
logWarning(ErrorInfo {
logWarning({
.name = "Large path",
.description = "dumping very large path (> 256 MiB); this may run out of memory"
});
});
}

View file

@ -11,28 +11,28 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s1 = "";
auto hash = hashString(HashType::htMD5, s1);
ASSERT_EQ(hash.to_string(Base::Base16), "md5:d41d8cd98f00b204e9800998ecf8427e");
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
}
TEST(hashString, testKnownMD5Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s2 = "abc";
auto hash = hashString(HashType::htMD5, s2);
ASSERT_EQ(hash.to_string(Base::Base16), "md5:900150983cd24fb0d6963f7d28e17f72");
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
}
TEST(hashString, testKnownSHA1Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abc";
auto hash = hashString(HashType::htSHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
}
TEST(hashString, testKnownSHA1Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
}
TEST(hashString, testKnownSHA256Hashes1) {
@ -40,7 +40,7 @@ namespace nix {
auto s = "abc";
auto hash = hashString(HashType::htSHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16),
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
}
@ -48,7 +48,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16),
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
}
@ -56,7 +56,7 @@ namespace nix {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
auto hash = hashString(HashType::htSHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16),
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
"454d4423643ce80e2a9ac94fa54ca49f");
@ -67,7 +67,7 @@ namespace nix {
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
auto hash = hashString(HashType::htSHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16),
ASSERT_EQ(hash.to_string(Base::Base16, true),
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
"c7d329eeb6dd26545e96e55b874be909");

View file

@ -22,7 +22,7 @@ namespace nix {
logger->logEI(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(),"\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError ------------------------------------ error-unit-test\x1B[0m\nan error for testing purposes\n");
ASSERT_STREQ(str.c_str(),"\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\nan error for testing purposes\n");
}
}
@ -42,7 +42,7 @@ namespace nix {
logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError ------------------------------------ error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
}
}
@ -60,7 +60,7 @@ namespace nix {
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError ------------------------------------- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
}
}
@ -74,7 +74,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name ------------------------------------- error-unit-test\x1B[0m\nInfo description\n");
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
}
TEST(logEI, loggingErrorOnTalkativeLevel) {
@ -88,7 +88,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name -------------------------------- error-unit-test\x1B[0m\nTalkative description\n");
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
}
TEST(logEI, loggingErrorOnChattyLevel) {
@ -102,7 +102,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name ----------------------------------- error-unit-test\x1B[0m\nTalkative description\n");
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
}
TEST(logEI, loggingErrorOnDebugLevel) {
@ -116,7 +116,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name ----------------------------------- error-unit-test\x1B[0m\nDebug description\n");
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
}
TEST(logEI, loggingErrorOnVomitLevel) {
@ -130,7 +130,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name ----------------------------------- error-unit-test\x1B[0m\nVomit description\n");
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
}
/* ----------------------------------------------------------------------------
@ -147,7 +147,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name ----------------------------------------- error-unit-test\x1B[0m\nerror description\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
}
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
@ -171,7 +171,7 @@ namespace nix {
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithoutLinesOfCode) {
@ -190,7 +190,7 @@ namespace nix {
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithOnlyHintAndName) {
@ -206,7 +206,7 @@ namespace nix {
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n");
}
@ -224,7 +224,7 @@ namespace nix {
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --------------------------------------- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
}
TEST(logWarning, logWarningWithFileLineNumAndCode) {
@ -249,7 +249,7 @@ namespace nix {
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name ------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
}

127
src/libutil/tests/pool.cc Normal file
View file

@ -0,0 +1,127 @@
#include "pool.hh"
#include <gtest/gtest.h>
namespace nix {
struct TestResource
{
TestResource() {
static int counter = 0;
num = counter++;
}
int dummyValue = 1;
bool good = true;
int num;
};
/* ----------------------------------------------------------------------------
* Pool
* --------------------------------------------------------------------------*/
TEST(Pool, freshPoolHasZeroCountAndSpecifiedCapacity) {
auto isGood = [](const ref<TestResource> & r) { return r->good; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
ASSERT_EQ(pool.count(), 0);
ASSERT_EQ(pool.capacity(), 1);
}
TEST(Pool, freshPoolCanGetAResource) {
auto isGood = [](const ref<TestResource> & r) { return r->good; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
ASSERT_EQ(pool.count(), 0);
TestResource r = *(pool.get());
ASSERT_EQ(pool.count(), 1);
ASSERT_EQ(pool.capacity(), 1);
ASSERT_EQ(r.dummyValue, 1);
ASSERT_EQ(r.good, true);
}
TEST(Pool, capacityCanBeIncremented) {
auto isGood = [](const ref<TestResource> & r) { return r->good; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
ASSERT_EQ(pool.capacity(), 1);
pool.incCapacity();
ASSERT_EQ(pool.capacity(), 2);
}
TEST(Pool, capacityCanBeDecremented) {
auto isGood = [](const ref<TestResource> & r) { return r->good; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
ASSERT_EQ(pool.capacity(), 1);
pool.decCapacity();
ASSERT_EQ(pool.capacity(), 0);
}
TEST(Pool, flushBadDropsOutOfScopeResources) {
auto isGood = [](const ref<TestResource> & r) { return false; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
{
auto _r = pool.get();
ASSERT_EQ(pool.count(), 1);
}
pool.flushBad();
ASSERT_EQ(pool.count(), 0);
}
// Test that the resources we allocate are being reused when they are still good.
TEST(Pool, reuseResource) {
auto isGood = [](const ref<TestResource> & r) { return true; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
// Compare the instance counter between the two handles. We expect them to be equal
// as the pool should hand out the same (still) good one again.
int counter = -1;
{
Pool<TestResource>::Handle h = pool.get();
counter = h->num;
} // the first handle goes out of scope
{ // the second handle should contain the same resource (with the same counter value)
Pool<TestResource>::Handle h = pool.get();
ASSERT_EQ(h->num, counter);
}
}
// Test that the resources we allocate are being thrown away when they are no longer good.
TEST(Pool, badResourceIsNotReused) {
auto isGood = [](const ref<TestResource> & r) { return false; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
// Compare the instance counter between the two handles. We expect them
// to *not* be equal as the pool should hand out a new instance after
// the first one was returned.
int counter = -1;
{
Pool<TestResource>::Handle h = pool.get();
counter = h->num;
} // the first handle goes out of scope
{
// the second handle should contain a different resource (with a
//different counter value)
Pool<TestResource>::Handle h = pool.get();
ASSERT_NE(h->num, counter);
}
}
}

View file

@ -1,6 +1,5 @@
#pragma once
#include "ref.hh"
#include <list>
@ -25,7 +24,6 @@ typedef string Path;
typedef list<Path> Paths;
typedef set<Path> PathSet;
/* Helper class to run code at startup. */
template<typename T>
struct OnStartup
@ -33,5 +31,4 @@ struct OnStartup
OnStartup(T && t) { t(); }
};
}

View file

@ -35,7 +35,7 @@
#endif
extern char * * environ;
extern char * * environ __attribute__((weak));
namespace nix {
@ -972,7 +972,7 @@ pid_t startProcess(std::function<void()> fun, const ProcessOptions & options)
{
auto wrapper = [&]() {
if (!options.allowVfork)
logger = makeDefaultLogger();
logger = makeSimpleLogger();
try {
#if __linux__
if (options.dieWithParent && prctl(PR_SET_PDEATHSIG, SIGKILL) == -1)
@ -1199,7 +1199,7 @@ void _interrupted()
/* Block user interrupts while an exception is being handled.
Throwing an exception while another exception is being handled
kills the program! */
if (!interruptThrown && !std::uncaught_exception()) {
if (!interruptThrown && !std::uncaught_exceptions()) {
interruptThrown = true;
throw Interrupted("interrupted by the user");
}
@ -1297,7 +1297,7 @@ bool statusOk(int status)
}
bool hasPrefix(const string & s, const string & prefix)
bool hasPrefix(std::string_view s, std::string_view prefix)
{
return s.compare(0, prefix.size(), prefix) == 0;
}
@ -1391,7 +1391,7 @@ std::string filterANSIEscapes(const std::string & s, bool filterAll, unsigned in
static char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
string base64Encode(const string & s)
string base64Encode(std::string_view s)
{
string res;
int data = 0, nbits = 0;
@ -1412,7 +1412,7 @@ string base64Encode(const string & s)
}
string base64Decode(const string & s)
string base64Decode(std::string_view s)
{
bool init = false;
char decode[256];

View file

@ -417,7 +417,7 @@ template<class N> bool string2Float(const string & s, N & n)
/* Return true iff `s' starts with `prefix'. */
bool hasPrefix(const string & s, const string & prefix);
bool hasPrefix(std::string_view s, std::string_view prefix);
/* Return true iff `s' ends in `suffix'. */
@ -456,12 +456,11 @@ std::string filterANSIEscapes(const std::string & s,
/* Base64 encoding/decoding. */
string base64Encode(const string & s);
string base64Decode(const string & s);
string base64Encode(std::string_view s);
string base64Decode(std::string_view s);
/* Get a value for the specified key from an associate container, or a
default value if the key doesn't exist. */
/* Get a value for the specified key from an associate container. */
template <class T>
std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
{

View file

@ -21,7 +21,7 @@
using namespace nix;
using namespace std::string_literals;
extern char * * environ;
extern char * * environ __attribute__((weak));
/* Recreate the effect of the perl shellwords function, breaking up a
* string into arguments like a shell word, including escapes
@ -363,17 +363,16 @@ static void _main(int argc, char * * argv)
if (!drv)
throw Error("the 'bashInteractive' attribute in <nixpkgs> did not evaluate to a derivation");
pathsToBuild.emplace_back(store->parseStorePath(drv->queryDrvPath()));
pathsToBuild.push_back({store->parseStorePath(drv->queryDrvPath())});
shell = drv->queryOutPath() + "/bin/bash";
} catch (Error & e) {
logWarning(
ErrorInfo {
.name = "bashInteractive",
.hint = hintfmt("%s; will use bash from your environment",
(e.info().hint ? e.info().hint->str() : ""))
});
logWarning({
.name = "bashInteractive",
.hint = hintfmt("%s; will use bash from your environment",
(e.info().hint ? e.info().hint->str() : ""))
});
shell = "bash";
}
}
@ -382,9 +381,9 @@ static void _main(int argc, char * * argv)
for (const auto & input : drv.inputDrvs)
if (std::all_of(envExclude.cbegin(), envExclude.cend(),
[&](const string & exclude) { return !std::regex_search(store->printStorePath(input.first), std::regex(exclude)); }))
pathsToBuild.emplace_back(input.first, input.second);
pathsToBuild.push_back({input.first, input.second});
for (const auto & src : drv.inputSrcs)
pathsToBuild.emplace_back(src);
pathsToBuild.push_back({src});
buildPaths(pathsToBuild);
@ -477,6 +476,8 @@ static void _main(int argc, char * * argv)
restoreSignals();
logger->stop();
execvp(shell->c_str(), argPtrs.data());
throw SysError("executing shell '%s'", *shell);
@ -498,7 +499,7 @@ static void _main(int argc, char * * argv)
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", drvPath);
pathsToBuild.emplace_back(store->parseStorePath(drvPath), StringSet{outputName});
pathsToBuild.push_back({store->parseStorePath(drvPath), {outputName}});
std::string drvPrefix;
auto i = drvPrefixes.find(drvPath);
@ -526,6 +527,8 @@ static void _main(int argc, char * * argv)
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first), true);
logger->stop();
for (auto & path : outPaths)
std::cout << path << '\n';
}

Some files were not shown because too many files have changed in this diff Show more