diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml
index 9c55526a3..d0f1b09ca 100644
--- a/doc/manual/command-ref/conf-file.xml
+++ b/doc/manual/command-ref/conf-file.xml
@@ -370,6 +370,33 @@ false.
+ hashed-mirrors
+
+ A list of web servers used by
+ builtins.fetchurl to obtain files by hash.
+ Given a hash type ht and a base-16 hash
+ h, Nix will try to download the file
+ from
+ hashed-mirror/ht/h.
+ This allows files to be downloaded even if they have disappeared
+ from their original URI. For example, given the hashed mirror
+ http://tarballs.example.com/, when building the
+ derivation
+
+
+builtins.fetchurl {
+ url = "https://example.org/foo-1.2.3.tar.xz";
+ sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
+}
+
+
+ Nix will attempt to download this file from
+ http://tarballs.example.com/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae
+ first. If it is not available there, if will try the original URI.
+
+
+
+
http-connections
The maximum number of parallel TCP connections
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index de60fb939..4e038866e 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
- auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
+ auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
- auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
+ auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);
@@ -303,10 +303,10 @@ SV * derivationFromPath(char * drvPath)
hash = newHV();
HV * outputs = newHV();
- for (auto & i : drv.outputs)
+ for (auto & i : drv.outputsAndPaths(*store()))
hv_store(
outputs, i.first.c_str(), i.first.size(),
- newSVpv(store()->printStorePath(i.second.path(*store(), drv.name)).c_str(), 0),
+ newSVpv(store()->printStorePath(i.second.second).c_str(), 0),
0);
hv_stores(hash, "outputs", newRV((SV *) outputs));
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 5247cefa6..ce5127113 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -38,9 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
}
-static bool allSupportedLocally(const std::set& requiredFeatures) {
+static bool allSupportedLocally(Store & store, const std::set& requiredFeatures) {
for (auto & feature : requiredFeatures)
- if (!settings.systemFeatures.get().count(feature)) return false;
+ if (!store.systemFeatures.get().count(feature)) return false;
return true;
}
@@ -106,7 +106,7 @@ static int _main(int argc, char * * argv)
auto canBuildLocally = amWilling
&& ( neededSystem == settings.thisSystem
|| settings.extraPlatforms.get().count(neededSystem) > 0)
- && allSupportedLocally(requiredFeatures);
+ && allSupportedLocally(*store, requiredFeatures);
/* Error ignored here, will be caught later */
mkdir(currentLoad.c_str(), 0777);
@@ -201,7 +201,7 @@ static int _main(int argc, char * * argv)
% concatStringsSep(", ", m.mandatoryFeatures);
}
- logError({
+ logErrorInfo(lvlInfo, {
.name = "Remote build",
.description = "Failed to find a machine for remote build!",
.hint = hint
@@ -224,15 +224,7 @@ static int _main(int argc, char * * argv)
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
- Store::Params storeParams;
- if (hasPrefix(bestMachine->storeUri, "ssh://")) {
- storeParams["max-connections"] = "1";
- storeParams["log-fd"] = "4";
- if (bestMachine->sshKey != "")
- storeParams["ssh-key"] = bestMachine->sshKey;
- }
-
- sshStore = openStore(bestMachine->storeUri, storeParams);
+ sshStore = bestMachine->openStore();
sshStore->connect();
storeUri = bestMachine->storeUri;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 65d36ca0e..30f4c3529 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -113,9 +113,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0;
- for (const auto & o : drv.outputs) {
+ for (const auto & o : drv.outputsAndPaths(*state.store)) {
v2 = state.allocAttr(w, state.symbols.create(o.first));
- mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path});
+ mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
}
@@ -781,7 +781,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
- if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
+ drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash {
@@ -795,7 +795,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else if (contentAddressed) {
HashType ht = parseHashType(outputHashAlgo);
for (auto & i : outputs) {
- if (!jsonObject) drv.env[i] = hashPlaceholder(i);
+ drv.env[i] = hashPlaceholder(i);
drv.outputs.insert_or_assign(i, DerivationOutput {
.output = DerivationOutputCAFloating {
.method = ingestionMethod,
@@ -813,7 +813,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
that changes in the set of output names do get reflected in
the hash. */
for (auto & i : outputs) {
- if (!jsonObject) drv.env[i] = "";
+ drv.env[i] = "";
drv.outputs.insert_or_assign(i,
DerivationOutput {
.output = DerivationOutputInputAddressed {
@@ -828,7 +828,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName);
- if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
+ drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i,
DerivationOutput {
.output = DerivationOutputInputAddressed {
@@ -839,7 +839,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
/* Write the resulting term into the Nix store directory. */
- auto drvPath = writeDerivation(state.store, drv, drvName, state.repair);
+ auto drvPath = writeDerivation(state.store, drv, state.repair);
auto drvPathS = state.store->printStorePath(drvPath);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
@@ -852,9 +852,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(*state.store)) {
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
- state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS});
+ state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
}
v.attrs->sort();
}
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index cddcf0e59..0dbf4ae1d 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
- *url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
+ *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
}
if (state.allowedPaths)
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 9c69fc564..eaa635595 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -130,12 +130,12 @@ std::pair Input::fetch(ref store) const
tree.actualPath = store->toRealPath(tree.storePath);
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
- input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
+ input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
- to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
+ to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
}
if (auto prevLastModified = getLastModified()) {
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 8b6e047f1..5ca0f8521 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -269,7 +269,7 @@ struct GitInputScheme : InputScheme
// modified dirty file?
input.attrs.insert_or_assign(
"lastModified",
- haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0);
+ haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return {
Tree(store->printStorePath(storePath), std::move(storePath)),
@@ -421,7 +421,7 @@ struct GitInputScheme : InputScheme
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
- auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
+ auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 55158cece..a2d16365e 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data);
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
- info.narHash = hashString(htSHA256, *sink.s);
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
+ hashString(htSHA256, *sink.s),
+ };
info.narSize = sink.s->size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 3f7d99a1d..be3c06a38 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -362,7 +362,7 @@ public:
auto width = getWindowSize().second;
if (width <= 0) width = std::numeric_limits::max();
- writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K");
+ writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
}
std::string getStatus(State & state)
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 9682db730..5433fe50d 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -143,7 +143,7 @@ struct FileSource : FdSource
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
{
- assert(info.narHash && info.narSize);
+ assert(info.narSize);
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
@@ -219,7 +219,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
}
- upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json");
+ upsertFile(std::string(info.path.hashPart()) + ".ls", jsonOut.str(), "application/json");
}
/* Optionally maintain an index of DWARF debug info files
@@ -312,14 +312,10 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
{
auto info = queryPathInfo(storePath).cast();
- uint64_t narSize = 0;
+ LengthSink narSize;
+ TeeSink tee { sink, narSize };
- LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
- sink(data, len);
- narSize += len;
- });
-
- auto decompressor = makeDecompressionSink(info->compression, wrapperSink);
+ auto decompressor = makeDecompressionSink(info->compression, tee);
try {
getFile(info->url, *decompressor);
@@ -331,7 +327,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
stats.narRead++;
//stats.narReadCompressedBytes += nar->size(); // FIXME
- stats.narReadBytes += narSize;
+ stats.narReadBytes += narSize.length;
}
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
@@ -385,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s);
}
- ValidPathInfo info(makeFixedOutputPath(method, *h, name));
+ ValidPathInfo info {
+ makeFixedOutputPath(method, *h, name),
+ Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
+ };
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
@@ -396,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
- ValidPathInfo info(computeStorePathForText(name, s, references));
+ ValidPathInfo info {
+ computeStorePathForText(name, s, references),
+ Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
+ };
info.references = references;
if (repair || !isValidPath(info.path)) {
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index d052d3c4c..092aa77ec 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -1181,8 +1181,8 @@ void DerivationGoal::haveDerivation()
retrySubstitution = false;
- for (auto & i : drv->outputs)
- worker.store.addTempRoot(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ worker.store.addTempRoot(i.second.second);
/* Check what outputs paths are not already valid. */
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
@@ -1288,14 +1288,14 @@ void DerivationGoal::repairClosure()
/* Get the output closure. */
StorePathSet outputClosure;
- for (auto & i : drv->outputs) {
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue;
- worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure);
+ worker.store.computeFSClosure(i.second.second, outputClosure);
}
/* Filter out our own outputs (which we have already checked). */
- for (auto & i : drv->outputs)
- outputClosure.erase(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ outputClosure.erase(i.second.second);
/* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output
@@ -1306,8 +1306,8 @@ void DerivationGoal::repairClosure()
for (auto & i : inputClosure)
if (i.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(i);
- for (auto & j : drv.outputs)
- outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i);
+ for (auto & j : drv.outputsAndPaths(worker.store))
+ outputsToDrv.insert_or_assign(j.second.second, i);
}
/* Check each path (slow!). */
@@ -1466,16 +1466,16 @@ void DerivationGoal::tryToBuild()
/* If any of the outputs already exist but are not valid, delete
them. */
- for (auto & i : drv->outputs) {
- if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue;
- debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name)));
- deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name)));
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ if (worker.store.isValidPath(i.second.second)) continue;
+ debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
+ deletePath(worker.store.Store::toRealPath(i.second.second));
}
/* Don't do a remote build if the derivation has the attribute
`preferLocalBuild' set. Also, check and repair modes are only
supported for local builds. */
- bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
+ bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Is the build hook willing to accept this job? */
if (!buildLocally) {
@@ -1919,8 +1919,8 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
for (auto & j : paths2) {
if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j);
- for (auto & k : drv.outputs)
- worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths);
+ for (auto & k : drv.outputsAndPaths(worker.store))
+ worker.store.computeFSClosure(k.second.second, paths);
}
}
@@ -1964,13 +1964,13 @@ void linkOrCopy(const Path & from, const Path & to)
void DerivationGoal::startBuilder()
{
/* Right platform? */
- if (!parsedDrv->canBuildLocally())
+ if (!parsedDrv->canBuildLocally(worker.store))
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath),
settings.thisSystem,
- concatStringsSep(", ", settings.systemFeatures));
+ concatStringsSep(", ", worker.store.systemFeatures));
if (drv->isBuiltin())
preloadNSS();
@@ -2014,8 +2014,8 @@ void DerivationGoal::startBuilder()
chownToBuilder(tmpDir);
/* Substitute output placeholders with the actual output paths. */
- for (auto & output : drv->outputs)
- inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name));
+ for (auto & output : drv->outputsAndPaths(worker.store))
+ inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
/* Construct the environment passed to the builder. */
initEnv();
@@ -2199,8 +2199,8 @@ void DerivationGoal::startBuilder()
rebuilding a path that is in settings.dirsInChroot
(typically the dependencies of /bin/sh). Throw them
out. */
- for (auto & i : drv->outputs)
- dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name)));
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ dirsInChroot.erase(worker.store.printStorePath(i.second.second));
#elif __APPLE__
/* We don't really have any parent prep work to do (yet?)
@@ -2612,8 +2612,8 @@ void DerivationGoal::writeStructuredAttrs()
/* Add an "outputs" object containing the output paths. */
nlohmann::json outputs;
- for (auto & i : drv->outputs)
- outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites);
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
json["outputs"] = outputs;
/* Handle exportReferencesGraph. */
@@ -2819,9 +2819,9 @@ struct RestrictedStore : public LocalFSStore
if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
auto drv = derivationFromPath(path.path);
- for (auto & output : drv.outputs)
+ for (auto & output : drv.outputsAndPaths(*this))
if (wantOutput(output.first, path.outputs))
- newPaths.insert(output.second.path(*this, drv.name));
+ newPaths.insert(output.second.second);
} else if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
}
@@ -2924,7 +2924,8 @@ void DerivationGoal::startDaemon()
FdSink to(remote.get());
try {
daemon::processConnection(store, from, to,
- daemon::NotTrusted, daemon::Recursive, "nobody", 65535);
+ daemon::NotTrusted, daemon::Recursive,
+ [&](Store & store) { store.createUser("nobody", 65535); });
debug("terminated daemon connection");
} catch (SysError &) {
ignoreException();
@@ -3183,7 +3184,7 @@ void DerivationGoal::runChild()
createDirs(chrootRootDir + "/dev/shm");
createDirs(chrootRootDir + "/dev/pts");
ss.push_back("/dev/full");
- if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
+ if (worker.store.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
ss.push_back("/dev/kvm");
ss.push_back("/dev/null");
ss.push_back("/dev/random");
@@ -3620,8 +3621,8 @@ void DerivationGoal::registerOutputs()
to do anything here. */
if (hook) {
bool allValid = true;
- for (auto & i : drv->outputs)
- if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false;
+ for (auto & i : drv->outputsAndPaths(worker.store))
+ if (!worker.store.isValidPath(i.second.second)) allValid = false;
if (allValid) return;
}
@@ -3642,23 +3643,23 @@ void DerivationGoal::registerOutputs()
Nix calls. */
StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p);
- for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
for (auto & p : addedPaths) referenceablePaths.insert(p);
/* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all
output paths read-only. */
- for (auto & i : drv->outputs) {
- auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
- if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue;
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ auto path = worker.store.printStorePath(i.second.second);
+ if (!missingPaths.count(i.second.second)) continue;
Path actualPath = path;
if (needsHashRewrite()) {
- auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name));
+ auto r = redirectedOutputs.find(i.second.second);
if (r != redirectedOutputs.end()) {
auto redirected = worker.store.Store::toRealPath(r->second);
if (buildMode == bmRepair
- && redirectedBadOutputs.count(i.second.path(worker.store, drv->name))
+ && redirectedBadOutputs.count(i.second.second)
&& pathExists(redirected))
replaceValidPath(path, redirected);
if (buildMode == bmCheck)
@@ -3725,7 +3726,7 @@ void DerivationGoal::registerOutputs()
hash). */
std::optional ca;
- if (! std::holds_alternative(i.second.output)) {
+ if (! std::holds_alternative(i.second.first.output)) {
DerivationOutputCAFloating outputHash;
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
@@ -3740,7 +3741,7 @@ void DerivationGoal::registerOutputs()
[&](DerivationOutputCAFloating dof) {
outputHash = dof;
},
- }, i.second.output);
+ }, i.second.first.output);
if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
@@ -3757,12 +3758,12 @@ void DerivationGoal::registerOutputs()
? hashPath(outputHash.hashType, actualPath).first
: hashFile(outputHash.hashType, actualPath);
- auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name());
+ auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
// true if either floating CA, or incorrect fixed hash.
bool needsMove = true;
- if (auto p = std::get_if(& i.second.output)) {
+ if (auto p = std::get_if(& i.second.first.output)) {
Hash & h = p->hash.hash;
if (h != h2) {
@@ -3868,8 +3869,10 @@ void DerivationGoal::registerOutputs()
worker.markContentsGood(worker.store.parseStorePath(path));
}
- ValidPathInfo info(worker.store.parseStorePath(path));
- info.narHash = hash.first;
+ ValidPathInfo info {
+ worker.store.parseStorePath(path),
+ hash.first,
+ };
info.narSize = hash.second;
info.references = std::move(references);
info.deriver = drvPath;
@@ -3925,8 +3928,8 @@ void DerivationGoal::registerOutputs()
/* If this is the first round of several, then move the output out of the way. */
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
- for (auto & i : drv->outputs) {
- auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ auto path = worker.store.printStorePath(i.second.second);
Path prev = path + checkSuffix;
deletePath(prev);
Path dst = path + checkSuffix;
@@ -3943,8 +3946,8 @@ void DerivationGoal::registerOutputs()
/* Remove the .check directories if we're done. FIXME: keep them
if the result was not determistic? */
if (curRound == nrRounds) {
- for (auto & i : drv->outputs) {
- Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix;
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
+ Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
deletePath(prev);
}
}
@@ -4242,12 +4245,12 @@ void DerivationGoal::flushLine()
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
{
StorePathSet result;
- for (auto & i : drv->outputs) {
+ for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue;
bool good =
- worker.store.isValidPath(i.second.path(worker.store, drv->name)) &&
- (!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name)));
- if (good == returnValid) result.insert(i.second.path(worker.store, drv->name));
+ worker.store.isValidPath(i.second.second) &&
+ (!checkHash || worker.pathContentsGood(i.second.second));
+ if (good == returnValid) result.insert(i.second.second);
}
return result;
}
@@ -5074,7 +5077,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
- HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
+ HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index 6585a480d..4fb5d8a06 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
}
};
+ /* Try the hashed mirrors first. */
+ if (getAttr("outputHashMode") == "flat")
+ for (auto hashedMirror : settings.hashedMirrors.get())
+ try {
+ if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
+ std::optional ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
+ Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
+ fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
+ return;
+ } catch (Error & e) {
+ debug(e.what());
+ }
+
+ /* Otherwise try the specified URL. */
fetch(mainUrl);
}
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 148bd5cc9..a467f288e 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref store,
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork();
- to << hash->to_string(Base16, false);
+ to << hash.to_string(Base16, false);
break;
}
@@ -454,8 +454,46 @@ static void performOp(TunnelLogger * logger, ref store,
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
BuildMode buildMode = (BuildMode) readInt(from);
logger->startWork();
- if (!trusted)
- throw Error("you are not privileged to build derivations");
+
+ /* Content-addressed derivations are trustless because their output paths
+ are verified by their content alone, so any derivation is free to
+ try to produce such a path.
+
+ Input-addressed derivation output paths, however, are calculated
+ from the derivation closure that produced them---even knowing the
+ root derivation is not enough. That the output data actually came
+ from those derivations is fundamentally unverifiable, but the daemon
+ trusts itself on that matter. The question instead is whether the
+ submitted plan has rights to the output paths it wants to fill, and
+ at least the derivation closure proves that.
+
+ It would have been nice if input-address algorithm merely depended
+ on the build time closure, rather than depending on the derivation
+ closure. That would mean input-addressed paths used at build time
+ would just be trusted and not need their own evidence. This is in
+ fact fine as the same guarantees would hold *inductively*: either
+ the remote builder has those paths and already trusts them, or it
+ needs to build them too and thus their evidence must be provided in
+ turn. The advantage of this variant algorithm is that the evidence
+ for input-addressed paths which the remote builder already has
+ doesn't need to be sent again.
+
+ That said, now that we have floating CA derivations, it is better
+ that people just migrate to those which also solve this problem, and
+ others. It's the same migration difficulty with strictly more
+ benefit.
+
+ Lastly, do note that when we parse fixed-output content-addressed
+ derivations, we throw out the precomputed output paths and just
+ store the hashes, so there aren't two competing sources of truth an
+ attacker could exploit. */
+ if (drv.type() == DerivationType::InputAddressed && !trusted)
+ throw Error("you are not privileged to build input-addressed derivations");
+
+ /* Make sure that the non-input-addressed derivations that got this far
+ are in fact content-addressed if we don't trust them. */
+ assert(derivationIsCA(drv.type()) || trusted);
+
auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork();
to << res.status << res.errorMsg;
@@ -638,7 +676,7 @@ static void performOp(TunnelLogger * logger, ref store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
- << info->narHash->to_string(Base16, false);
+ << info->narHash.to_string(Base16, false);
WorkerProto::write(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
@@ -688,17 +726,18 @@ static void performOp(TunnelLogger * logger, ref store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
logger->stopWork();
- dumpPath(store->printStorePath(path), to);
+ dumpPath(store->toRealPath(path), to);
break;
}
case wopAddToStoreNar: {
bool repair, dontCheckSigs;
- ValidPathInfo info(store->parseStorePath(readString(from)));
+ auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
+ auto narHash = Hash::parseAny(readString(from), htSHA256);
+ ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.narHash = Hash::parseAny(readString(from), htSHA256);
info.references = WorkerProto::read(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings(from);
@@ -817,8 +856,7 @@ void processConnection(
FdSink & to,
TrustedFlag trusted,
RecursiveFlag recursive,
- const std::string & userName,
- uid_t userId)
+ std::function authHook)
{
auto monitor = !recursive ? std::make_unique(from.fd) : nullptr;
@@ -859,15 +897,7 @@ void processConnection(
/* If we can't accept clientVersion, then throw an error
*here* (not above). */
-
-#if 0
- /* Prevent users from doing something very dangerous. */
- if (geteuid() == 0 &&
- querySetting("build-users-group", "") == "")
- throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
-#endif
-
- store->createUser(userName, userId);
+ authHook(*store);
tunnelLogger->stopWork();
to.flush();
diff --git a/src/libstore/daemon.hh b/src/libstore/daemon.hh
index 266932013..841ace316 100644
--- a/src/libstore/daemon.hh
+++ b/src/libstore/daemon.hh
@@ -12,7 +12,10 @@ void processConnection(
FdSink & to,
TrustedFlag trusted,
RecursiveFlag recursive,
- const std::string & userName,
- uid_t userId);
+ /* Arbitrary hook to check authorization / initialize user data / whatever
+ after the protocol has been negotiated. The idea is that this function
+ and everything it calls doesn't know about this stuff, and the
+ `nix-daemon` handles that instead. */
+ std::function authHook);
}
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index bf2758ae5..607763075 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -62,7 +62,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref store,
- const Derivation & drv, std::string_view name, RepairFlag repair)
+ const Derivation & drv, RepairFlag repair)
{
auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs)
@@ -70,7 +70,7 @@ StorePath writeDerivation(ref store,
/* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be
held during a garbage collection). */
- auto suffix = std::string(name) + drvExtension;
+ auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(*store, false);
return settings.readOnlyMode
? store->computeStorePathForText(suffix, contents, references)
@@ -139,18 +139,14 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
}
-static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
+static DerivationOutput parseDerivationOutput(const Store & store,
+ StorePath path, std::string_view hashAlgo, std::string_view hash)
{
- expect(str, ","); auto path = store.parseStorePath(parsePath(str));
- expect(str, ","); auto hashAlgo = parseString(str);
- expect(str, ","); const auto hash = parseString(str);
- expect(str, ")");
-
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
- hashAlgo = string(hashAlgo, 2);
+ hashAlgo = hashAlgo.substr(2);
}
const HashType hashType = parseHashType(hashAlgo);
@@ -178,6 +174,16 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
};
}
+static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
+{
+ expect(str, ","); auto path = store.parseStorePath(parsePath(str));
+ expect(str, ","); const auto hashAlgo = parseString(str);
+ expect(str, ","); const auto hash = parseString(str);
+ expect(str, ")");
+
+ return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
+}
+
static Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
{
@@ -474,12 +480,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: {
std::map outputHashes;
- for (const auto & i : drv.outputs) {
- auto & dof = std::get(i.second.output);
+ for (const auto & i : drv.outputsAndPaths(store)) {
+ auto & dof = std::get(i.second.first.output);
auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":"
- + store.printStorePath(i.second.path(store, drv.name)));
+ + store.printStorePath(i.second.second));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
return outputHashes;
@@ -533,46 +539,18 @@ bool wantOutput(const string & output, const std::set & wanted)
StorePathSet BasicDerivation::outputPaths(const Store & store) const
{
StorePathSet paths;
- for (auto & i : outputs)
- paths.insert(i.second.path(store, name));
+ for (auto & i : outputsAndPaths(store))
+ paths.insert(i.second.second);
return paths;
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
- auto hashAlgo = readString(in);
- auto hash = readString(in);
+ const auto hashAlgo = readString(in);
+ const auto hash = readString(in);
- if (hashAlgo != "") {
- auto method = FileIngestionMethod::Flat;
- if (string(hashAlgo, 0, 2) == "r:") {
- method = FileIngestionMethod::Recursive;
- hashAlgo = string(hashAlgo, 2);
- }
- auto hashType = parseHashType(hashAlgo);
- return hash != ""
- ? DerivationOutput {
- .output = DerivationOutputCAFixed {
- .hash = FixedOutputHash {
- .method = std::move(method),
- .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
- },
- }
- }
- : (settings.requireExperimentalFeature("ca-derivations"),
- DerivationOutput {
- .output = DerivationOutputCAFloating {
- .method = std::move(method),
- .hashType = std::move(hashType),
- },
- });
- } else
- return DerivationOutput {
- .output = DerivationOutputInputAddressed {
- .path = std::move(path),
- }
- };
+ return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
}
StringSet BasicDerivation::outputNames() const
@@ -583,6 +561,27 @@ StringSet BasicDerivation::outputNames() const
return names;
}
+DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
+ DerivationOutputsAndPaths outsAndPaths;
+ for (auto output : outputs)
+ outsAndPaths.insert(std::make_pair(
+ output.first,
+ std::make_pair(output.second, output.second.path(store, name))
+ )
+ );
+ return outsAndPaths;
+}
+
+DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
+ DerivationOutputsAndOptPaths outsAndOptPaths;
+ for (auto output : outputs)
+ outsAndOptPaths.insert(std::make_pair(
+ output.first,
+ std::make_pair(output.second, output.second.pathOpt(store, output.first))
+ )
+ );
+ return outsAndOptPaths;
+}
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
auto nameWithSuffix = drvPath.name();
@@ -623,9 +622,9 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(store)) {
out << i.first
- << store.printStorePath(i.second.path(store, drv.name));
+ << store.printStorePath(i.second.second);
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
out << "" << "";
@@ -638,7 +637,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
- }, i.second.output);
+ }, i.second.first.output);
}
WorkerProto::write(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 14e0e947a..3aae30ab2 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -47,6 +47,9 @@ struct DerivationOutput
DerivationOutputCAFloating
> output;
std::optional hashAlgoOpt(const Store & store) const;
+ /* Note, when you use this function you should make sure that you're passing
+ the right derivation name. When in doubt, you should use the safer
+ interface provided by BasicDerivation::outputsAndPaths */
std::optional pathOpt(const Store & store, std::string_view drvName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const {
@@ -58,6 +61,15 @@ struct DerivationOutput
typedef std::map DerivationOutputs;
+/* These are analogues to the previous DerivationOutputs data type, but they
+ also contains, for each output, the (optional) store path in which it would
+ be written. To calculate values of these types, see the corresponding
+ functions in BasicDerivation */
+typedef std::map>
+ DerivationOutputsAndPaths;
+typedef std::map>>
+ DerivationOutputsAndOptPaths;
+
/* For inputs that are sub-derivations, we specify exactly which
output IDs we are interested in. */
typedef std::map DerivationInputs;
@@ -107,6 +119,13 @@ struct BasicDerivation
/* Return the output names of a derivation. */
StringSet outputNames() const;
+ /* Calculates the maps that contains all the DerivationOutputs, but
+ augmented with knowledge of the Store paths they would be written into.
+ The first one of these functions will be removed when the CA work is
+ completed */
+ DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
+ DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
+
static std::string_view nameFromPath(const StorePath & storePath);
};
@@ -128,7 +147,7 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref store,
- const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
+ const Derivation & drv, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */
Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name);
diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc
index e2e52b0af..b59b34dee 100644
--- a/src/libstore/export-import.cc
+++ b/src/libstore/export-import.cc
@@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first;
- if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
+ if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
- printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
+ printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
teeSink
<< exportMagic
@@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format");
- ValidPathInfo info(parseStorePath(readString(source)));
+ auto path = parseStorePath(readString(source));
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
- info.references = WorkerProto::read(*this, source);
-
+ auto references = WorkerProto::read(*this, source);
auto deriver = readString(source);
+ auto narHash = hashString(htSHA256, *saved.s);
+
+ ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = parseStorePath(deriver);
-
- info.narHash = hashString(htSHA256, *saved.s);
+ info.references = references;
info.narSize = saved.s->size();
// Ignore optional legacy signature.
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 3406a9331..e3bb4cf84 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -335,6 +335,9 @@ public:
"setuid/setgid bits or with file capabilities."};
#endif
+ Setting hashedMirrors{this, {}, "hashed-mirrors",
+ "A list of servers used by builtins.fetchurl to fetch files by hash."};
+
Setting minFree{this, 0, "min-free",
"Automatically run the garbage collector when free disk space drops below the specified amount."};
diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc
index ee07b7156..74e1a498e 100644
--- a/src/libstore/legacy-ssh-store.cc
+++ b/src/libstore/legacy-ssh-store.cc
@@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
try {
auto conn(connections->get());
+ /* No longer support missing NAR hash */
+ assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
+
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
@@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
auto p = readString(conn->from);
if (p.empty()) return callback(nullptr);
- auto info = std::make_shared(parseStorePath(p));
- assert(path == info->path);
+ auto path2 = parseStorePath(p);
+ assert(path == path2);
+ /* Hash will be set below. FIXME construct ValidPathInfo at end. */
+ auto info = std::make_shared(path, Hash::dummy);
PathSet references;
auto deriver = readString(conn->from);
@@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from);
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
+ {
auto s = readString(conn->from);
- info->narHash = s.empty() ? std::optional{} : Hash::parseAnyPrefixed(s);
- info->ca = parseContentAddressOpt(readString(conn->from));
- info->sigs = readStrings(conn->from);
+ if (s == "")
+ throw Error("NAR hash is now mandatory");
+ info->narHash = Hash::parseAnyPrefixed(s);
}
+ info->ca = parseContentAddressOpt(readString(conn->from));
+ info->sigs = readStrings(conn->from);
auto s = readString(conn->from);
assert(s == "");
@@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash->to_string(Base16, false);
+ << info.narHash.to_string(Base16, false);
WorkerProto::write(*this, conn->to, info.references);
conn->to
<< info.registrationTime
@@ -202,6 +209,24 @@ struct LegacySSHStore : public Store
const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); }
+private:
+
+ void putBuildSettings(Connection & conn)
+ {
+ conn.to
+ << settings.maxSilentTime
+ << settings.buildTimeout;
+ if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
+ conn.to
+ << settings.maxLogSize;
+ if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
+ conn.to
+ << settings.buildRepeat
+ << settings.enforceDeterminism;
+ }
+
+public:
+
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override
{
@@ -211,16 +236,8 @@ struct LegacySSHStore : public Store
<< cmdBuildDerivation
<< printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
- conn->to
- << settings.maxSilentTime
- << settings.buildTimeout;
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2)
- conn->to
- << settings.maxLogSize;
- if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
- conn->to
- << settings.buildRepeat
- << settings.enforceDeterminism;
+
+ putBuildSettings(*conn);
conn->to.flush();
@@ -234,6 +251,29 @@ struct LegacySSHStore : public Store
return status;
}
+ void buildPaths(const std::vector & drvPaths, BuildMode buildMode) override
+ {
+ auto conn(connections->get());
+
+ conn->to << cmdBuildPaths;
+ Strings ss;
+ for (auto & p : drvPaths)
+ ss.push_back(p.to_string(*this));
+ conn->to << ss;
+
+ putBuildSettings(*conn);
+
+ conn->to.flush();
+
+ BuildResult result;
+ result.status = (BuildResult::Status) readInt(conn->from);
+
+ if (!result.success()) {
+ conn->from >> result.errorMsg;
+ throw Error(result.status, result.errorMsg);
+ }
+ }
+
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index e96091aae..8f29f90ef 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use()
(printStorePath(info.path))
- (info.narHash->to_string(Base16, true))
+ (info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@@ -618,11 +618,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv);
- for (auto & i : drv.outputs) {
+ for (auto & i : drv.outputsAndPaths(*this)) {
state.stmtAddDerivationOutput.use()
(id)
(i.first)
- (printStorePath(i.second.path(*this, drv.name)))
+ (printStorePath(i.second.second))
.exec();
}
}
@@ -641,25 +641,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback> callback) noexcept
{
try {
- auto info = std::make_shared(path);
-
callback(retrySQLite>([&]() {
auto state(_state.lock());
/* Get the path info. */
- auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path)));
+ auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
if (!useQueryPathInfo.next())
return std::shared_ptr();
- info->id = useQueryPathInfo.getInt(0);
+ auto id = useQueryPathInfo.getInt(0);
+ auto narHash = Hash::dummy;
try {
- info->narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
+ narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
} catch (BadHash & e) {
- throw Error("in valid-path entry for '%s': %s", printStorePath(path), e.what());
+ throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
}
+ auto info = std::make_shared(path, narHash);
+
+ info->id = id;
+
info->registrationTime = useQueryPathInfo.getInt(2);
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
@@ -694,7 +697,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0)
- (info.narHash->to_string(Base16, true))
+ (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
@@ -924,7 +927,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & i : infos) {
- assert(i.narHash && i.narHash->type == htSHA256);
+ assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@@ -988,9 +991,6 @@ const PublicKeys & LocalStore::getPublicKeys()
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
- if (!info.narHash)
- throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
-
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
@@ -1025,11 +1025,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
else
hashSink = std::make_unique(htSHA256, std::string(info.path.hashPart()));
- LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
- size_t n = source.read(data, len);
- (*hashSink)(data, n);
- return n;
- });
+ TeeSource wrapperSource { source, *hashSink };
restorePath(realPath, wrapperSource);
@@ -1037,7 +1033,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
- printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
+ printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@@ -1159,8 +1155,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
optimisePath(realPath);
- ValidPathInfo info(dstPath);
- info.narHash = narHash.first;
+ ValidPathInfo info { dstPath, narHash.first };
info.narSize = narHash.second;
info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info);
@@ -1203,8 +1198,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
optimisePath(realPath);
- ValidPathInfo info(dstPath);
- info.narHash = narHash;
+ ValidPathInfo info { dstPath, narHash };
info.narSize = sink.s->size();
info.references = references;
info.ca = TextHash { .hash = hash };
@@ -1319,9 +1313,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr hashSink;
if (!info->ca || !info->references.count(info->path))
- hashSink = std::make_unique(info->narHash->type);
+ hashSink = std::make_unique(info->narHash.type);
else
- hashSink = std::make_unique(info->narHash->type, std::string(info->path.hashPart()));
+ hashSink = std::make_unique(info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
@@ -1330,7 +1324,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
- printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
+ printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
});
if (repair) repairPath(i); else errors = true;
} else {
diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc
index f848582da..7db2556f4 100644
--- a/src/libstore/machines.cc
+++ b/src/libstore/machines.cc
@@ -1,6 +1,7 @@
#include "machines.hh"
#include "util.hh"
#include "globals.hh"
+#include "store-api.hh"
#include
@@ -48,6 +49,29 @@ bool Machine::mandatoryMet(const std::set & features) const {
});
}
+ref Machine::openStore() const {
+ Store::Params storeParams;
+ if (hasPrefix(storeUri, "ssh://")) {
+ storeParams["max-connections"] = "1";
+ storeParams["log-fd"] = "4";
+ if (sshKey != "")
+ storeParams["ssh-key"] = sshKey;
+ }
+ {
+ auto & fs = storeParams["system-features"];
+ auto append = [&](auto feats) {
+ for (auto & f : feats) {
+ if (fs.size() > 0) fs += ' ';
+ fs += f;
+ }
+ };
+ append(supportedFeatures);
+ append(mandatoryFeatures);
+ }
+
+ return nix::openStore(storeUri, storeParams);
+}
+
void parseMachines(const std::string & s, Machines & machines)
{
for (auto line : tokenizeString>(s, "\n;")) {
diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh
index de92eb924..341d9bd97 100644
--- a/src/libstore/machines.hh
+++ b/src/libstore/machines.hh
@@ -4,6 +4,8 @@
namespace nix {
+class Store;
+
struct Machine {
const string storeUri;
@@ -28,6 +30,8 @@ struct Machine {
decltype(supportedFeatures) supportedFeatures,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey);
+
+ ref openStore() const;
};
typedef std::vector Machines;
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 0ae1ceaad..f6aa570bb 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -207,10 +207,10 @@ void Store::queryMissing(const std::vector & targets,
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid;
- for (auto & j : drv->outputs)
+ for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs)
- && !isValidPath(j.second.path(*this, drv->name)))
- invalid.insert(printStorePath(j.second.path(*this, drv->name)));
+ && !isValidPath(j.second.second))
+ invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 92da14e23..8541cc51f 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -189,13 +189,14 @@ public:
return {oInvalid, 0};
auto namePart = queryNAR.getStr(1);
- auto narInfo = make_ref(StorePath(hashPart + "-" + namePart));
+ auto narInfo = make_ref(
+ StorePath(hashPart + "-" + namePart),
+ Hash::parseAnyPrefixed(queryNAR.getStr(6)));
narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4))
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
narInfo->fileSize = queryNAR.getInt(5);
- narInfo->narHash = Hash::parseAnyPrefixed(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath(r));
@@ -232,7 +233,7 @@ public:
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
- (info->narHash->to_string(Base32, true))
+ (info->narHash.to_string(Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index 5812aa4ac..3454f34bb 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -1,10 +1,11 @@
#include "globals.hh"
#include "nar-info.hh"
+#include "store-api.hh"
namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
- : ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
+ : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
{
auto corrupt = [&]() {
return Error("NAR info file '%1%' is corrupt", whence);
@@ -19,6 +20,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
};
bool havePath = false;
+ bool haveNarHash = false;
size_t pos = 0;
while (pos < s.size()) {
@@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileSize") {
if (!string2Int(value, fileSize)) throw corrupt();
}
- else if (name == "NarHash")
+ else if (name == "NarHash") {
narHash = parseHashField(value);
+ haveNarHash = true;
+ }
else if (name == "NarSize") {
if (!string2Int(value, narSize)) throw corrupt();
}
@@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2";
- if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
+ if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
}
std::string NarInfo::to_string(const Store & store) const
@@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
- assert(narHash && narHash->type == htSHA256);
- res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
+ assert(narHash.type == htSHA256);
+ res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
diff --git a/src/libstore/nar-info.hh b/src/libstore/nar-info.hh
index eff19f0ef..39ced76e5 100644
--- a/src/libstore/nar-info.hh
+++ b/src/libstore/nar-info.hh
@@ -2,10 +2,12 @@
#include "types.hh"
#include "hash.hh"
-#include "store-api.hh"
+#include "path-info.hh"
namespace nix {
+class Store;
+
struct NarInfo : ValidPathInfo
{
std::string url;
@@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
std::string system;
NarInfo() = delete;
- NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { }
+ NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const Store & store, const std::string & s, const std::string & whence);
diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc
index 24f848e46..e7b7202d4 100644
--- a/src/libstore/parsed-derivations.cc
+++ b/src/libstore/parsed-derivations.cc
@@ -94,7 +94,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
return res;
}
-bool ParsedDerivation::canBuildLocally() const
+bool ParsedDerivation::canBuildLocally(Store & localStore) const
{
if (drv.platform != settings.thisSystem.get()
&& !settings.extraPlatforms.get().count(drv.platform)
@@ -102,14 +102,14 @@ bool ParsedDerivation::canBuildLocally() const
return false;
for (auto & feature : getRequiredSystemFeatures())
- if (!settings.systemFeatures.get().count(feature)) return false;
+ if (!localStore.systemFeatures.get().count(feature)) return false;
return true;
}
-bool ParsedDerivation::willBuildLocally() const
+bool ParsedDerivation::willBuildLocally(Store & localStore) const
{
- return getBoolAttr("preferLocalBuild") && canBuildLocally();
+ return getBoolAttr("preferLocalBuild") && canBuildLocally(localStore);
}
bool ParsedDerivation::substitutesAllowed() const
diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh
index 6ee172d81..3fa09f34f 100644
--- a/src/libstore/parsed-derivations.hh
+++ b/src/libstore/parsed-derivations.hh
@@ -29,9 +29,9 @@ public:
StringSet getRequiredSystemFeatures() const;
- bool canBuildLocally() const;
+ bool canBuildLocally(Store & localStore) const;
- bool willBuildLocally() const;
+ bool willBuildLocally(Store & localStore) const;
bool substitutesAllowed() const;
};
diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh
index 2a015ea3c..8ff5c466e 100644
--- a/src/libstore/path-info.hh
+++ b/src/libstore/path-info.hh
@@ -1,5 +1,6 @@
#pragma once
+#include "crypto.hh"
#include "path.hh"
#include "hash.hh"
#include "content-address.hh"
@@ -29,7 +30,7 @@ struct ValidPathInfo
StorePath path;
std::optional deriver;
// TODO document this
- std::optional narHash;
+ Hash narHash;
StorePathSet references;
time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown
@@ -100,8 +101,8 @@ struct ValidPathInfo
ValidPathInfo(const ValidPathInfo & other) = default;
- ValidPathInfo(StorePath && path) : path(std::move(path)) { };
- ValidPathInfo(const StorePath & path) : path(path) { };
+ ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
+ ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
virtual ~ValidPathInfo() { }
};
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index afd72ad61..07c19922c 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -416,10 +416,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
- info = std::make_shared(StorePath(path));
auto deriver = readString(conn->from);
+ auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
+ info = std::make_shared(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
- info->narHash = Hash::parseAny(readString(conn->from), htSHA256);
info->references = WorkerProto::read(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
@@ -517,7 +517,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
- << info.narHash->to_string(Base16, false);
+ << info.narHash.to_string(Base16, false);
WorkerProto::write(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index e66c04df4..52e515d87 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -320,8 +320,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath);
- ValidPathInfo info(makeFixedOutputPath(method, hash, name));
- info.narHash = narHash;
+ ValidPathInfo info {
+ makeFixedOutputPath(method, hash, name),
+ narHash,
+ };
info.narSize = narSize;
info.ca = FixedOutputHash { .method = method, .hash = hash };
@@ -576,7 +578,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i);
if (showHash) {
- s += info->narHash->to_string(Base16, false) + "\n";
+ s += info->narHash.to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str();
}
@@ -608,7 +610,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath);
jsonPath
- .attr("narHash", info->narHash->to_string(hashBase, true))
+ .attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize);
{
@@ -736,20 +738,6 @@ void copyStorePath(ref srcStore, ref dstStore,
info = info2;
}
- if (!info->narHash) {
- StringSink sink;
- srcStore->narFromPath({storePath}, sink);
- auto info2 = make_ref(*info);
- info2->narHash = hashString(htSHA256, *sink.s);
- if (!info->narSize) info2->narSize = sink.s->size();
- if (info->ultimate) info2->ultimate = false;
- info = info2;
-
- StringSource source(*sink.s);
- dstStore->addToStore(*info, source, repair, checkSigs);
- return;
- }
-
if (info->ultimate) {
auto info2 = make_ref(*info);
info2->ultimate = false;
@@ -757,12 +745,12 @@ void copyStorePath(ref srcStore, ref dstStore,
}
auto source = sinkToSource([&](Sink & sink) {
- LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
- sink(data, len);
+ LambdaSink progressSink([&](const unsigned char * data, size_t len) {
total += len;
act.progress(total, info->narSize);
});
- srcStore->narFromPath(storePath, wrapperSink);
+ TeeSink tee { sink, progressSink };
+ srcStore->narFromPath(storePath, tee);
}, [&]() {
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
});
@@ -874,19 +862,22 @@ void copyClosure(ref srcStore, ref dstStore,
}
-std::optional decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven)
+std::optional decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven)
{
std::string path;
getline(str, path);
if (str.eof()) { return {}; }
- ValidPathInfo info(store.parseStorePath(path));
- if (hashGiven) {
+ if (!hashGiven) {
string s;
getline(str, s);
- info.narHash = Hash::parseAny(s, htSHA256);
+ auto narHash = Hash::parseAny(s, htSHA256);
getline(str, s);
- if (!string2Int(s, info.narSize)) throw Error("number expected");
+ uint64_t narSize;
+ if (!string2Int(s, narSize)) throw Error("number expected");
+ hashGiven = { narHash, narSize };
}
+ ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
+ info.narSize = hashGiven->second;
std::string deriver;
getline(str, deriver);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
@@ -921,12 +912,12 @@ string showPaths(const PathSet & paths)
std::string ValidPathInfo::fingerprint(const Store & store) const
{
- if (narSize == 0 || !narHash)
- throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known",
+ if (narSize == 0)
+ throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
- + narHash->to_string(Base32, true) + ";"
+ + narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 9003ab541..041f72a35 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -4,7 +4,6 @@
#include "hash.hh"
#include "content-address.hh"
#include "serialise.hh"
-#include "crypto.hh"
#include "lru-cache.hh"
#include "sync.hh"
#include "globals.hh"
@@ -164,6 +163,10 @@ public:
Setting wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
+ Setting systemFeatures{this, settings.systemFeatures,
+ "system-features",
+ "Optional features that the system this store builds on implements (like \"kvm\")."};
+
protected:
struct PathInfoCacheValue {
@@ -767,7 +770,7 @@ string showPaths(const PathSet & paths);
std::optional decodeValidPathInfo(
const Store & store,
std::istream & str,
- bool hashGiven = false);
+ std::optional hashGiven = std::nullopt);
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair splitUriAndParams(const std::string & uri);
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index f3039dc98..2c8cb2e77 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
-#define PROTOCOL_VERSION 0x117
+#define PROTOCOL_VERSION 0x118
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index ce7cf9754..14399dea3 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -366,11 +366,7 @@ void copyNAR(Source & source, Sink & sink)
ParseSink parseSink; /* null sink; just parse the NAR */
- LambdaSource wrapper([&](unsigned char * data, size_t len) {
- auto n = source.read(data, len);
- sink(data, n);
- return n;
- });
+ TeeSource wrapper { source, sink };
parseDump(parseSink, wrapper);
}
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index dfb3668f1..4a94f0dfd 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -136,6 +136,8 @@ std::string Hash::to_string(Base base, bool includeType) const
return s;
}
+Hash Hash::dummy(htSHA256);
+
Hash Hash::parseSRI(std::string_view original) {
auto rest = original;
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 00ce7bb6f..6d6eb70ca 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -59,9 +59,6 @@ private:
Hash(std::string_view s, HashType type, bool isSRI);
public:
- /* Check whether a hash is set. */
- operator bool () const { return (bool) type; }
-
/* Check whether two hash are equal. */
bool operator == (const Hash & h2) const;
@@ -105,6 +102,8 @@ public:
assert(type == htSHA1);
return std::string(to_string(Base16, false), 0, 7);
}
+
+ static Hash dummy;
};
/* Helper that defaults empty hashes to the 0 hash. */
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index c29c6b29b..69ae0874a 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -225,6 +225,17 @@ struct SizedSource : Source
}
};
+/* A sink that that just counts the number of bytes given to it */
+struct LengthSink : Sink
+{
+ uint64_t length = 0;
+
+ virtual void operator () (const unsigned char * _, size_t len)
+ {
+ length += len;
+ }
+};
+
/* Convert a function into a sink. */
struct LambdaSink : Sink
{
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index bcb86cbce..cfa634a44 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -239,7 +239,15 @@ static void daemonLoop(char * * argv)
// Handle the connection.
FdSource from(remote.get());
FdSink to(remote.get());
- processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid);
+ processConnection(openUncachedStore(), from, to, trusted, NotRecursive, [&](Store & store) {
+#if 0
+ /* Prevent users from doing something very dangerous. */
+ if (geteuid() == 0 &&
+ querySetting("build-users-group", "") == "")
+ throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
+#endif
+ store.createUser(user, peer.uid);
+ });
exit(0);
}, options);
@@ -324,7 +332,10 @@ static int _main(int argc, char * * argv)
} else {
FdSource from(STDIN_FILENO);
FdSink to(STDOUT_FILENO);
- processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0);
+ /* Auth hook is empty because in this mode we blindly trust the
+ standard streams. Limitting access to thoses is explicitly
+ not `nix-daemon`'s responsibility. */
+ processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
}
} else {
daemonLoop(argv);
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index e59009d11..fb7a336e3 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -218,8 +218,8 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath);
StorePathSet outputs;
- for (auto & i : drv.outputs)
- outputs.insert(i.second.path(*store, drv.name));
+ for (auto & i : drv.outputsAndPaths(*store))
+ outputs.insert(i.second.second);
return outputs;
}
else return {storePath};
@@ -312,8 +312,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
auto i2 = store->followLinksToStorePath(i);
if (forceRealise) realisePath({i2});
Derivation drv = store->derivationFromPath(i2);
- for (auto & j : drv.outputs)
- cout << fmt("%1%\n", store->printStorePath(j.second.path(*store, drv.name)));
+ for (auto & j : drv.outputsAndPaths(*store))
+ cout << fmt("%1%\n", store->printStorePath(j.second.second));
}
break;
}
@@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j);
if (query == qHash) {
- assert(info->narHash && info->narHash->type == htSHA256);
- cout << fmt("%s\n", info->narHash->to_string(Base32, true));
+ assert(info->narHash.type == htSHA256);
+ cout << fmt("%s\n", info->narHash.to_string(Base32, true));
} else if (query == qSize)
cout << fmt("%d\n", info->narSize);
}
@@ -495,7 +495,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
ValidPathInfos infos;
while (1) {
- auto info = decodeValidPathInfo(*store, cin, hashGiven);
+ // We use a dummy value because we'll set it below. FIXME be correct by
+ // construction and avoid dummy value.
+ auto hashResultOpt = !hashGiven ? std::optional { {Hash::dummy, -1} } : std::nullopt;
+ auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
if (!info) break;
if (!store->isValidPath(info->path) || reregister) {
/* !!! races */
@@ -723,7 +726,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path);
- HashSink sink(info->narHash->type);
+ HashSink sink(info->narHash.type);
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
@@ -732,7 +735,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
- info->narHash->to_string(Base32, true),
+ info->narHash.to_string(Base32, true),
current.first.to_string(Base32, true))
});
status = 1;
@@ -862,7 +865,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
- out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
+ out << info->narHash.to_string(Base32, true)
<< renderContentAddress(info->ca)
<< info->sigs;
} catch (InvalidPath &) {
@@ -944,11 +947,13 @@ static void opServe(Strings opFlags, Strings opArgs)
if (!writeAllowed) throw Error("importing paths is not allowed");
auto path = readString(in);
- ValidPathInfo info(store->parseStorePath(path));
auto deriver = readString(in);
+ ValidPathInfo info {
+ store->parseStorePath(path),
+ Hash::parseAny(readString(in), htSHA256),
+ };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
- info.narHash = Hash::parseAny(readString(in), htSHA256);
info.references = WorkerProto::read(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings(in);
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index e183cb8b5..713155840 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -60,8 +60,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
hash = hsink.finish().first;
}
- ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart));
- info.narHash = narHash;
+ ValidPathInfo info {
+ store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
+ narHash,
+ };
info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash {
.method = ingestionMethod,
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 12658078a..434088da7 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -124,10 +124,7 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
- auto drvName = std::string(drvPath.name());
- assert(hasSuffix(drvName, ".drv"));
- drvName.resize(drvName.size() - 4);
- drvName += "-env";
+ drv.name += "-env";
for (auto & output : drv.outputs)
drv.env.erase(output.first);
drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}};
@@ -136,12 +133,12 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath)
drv.env["outputs"] = "out";
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
- auto shellOutPath = store->makeOutputPath("out", h, drvName);
+ auto shellOutPath = store->makeOutputPath("out", h, drv.name);
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed {
.path = shellOutPath
} });
drv.env["out"] = store->printStorePath(shellOutPath);
- auto shellDrvPath2 = writeDerivation(store, drv, drvName);
+ auto shellDrvPath2 = writeDerivation(store, drv);
/* Build the derivation. */
store->buildPaths({{shellDrvPath2}});
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index ea152709f..d34f87982 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -304,8 +304,8 @@ struct InstallableStorePath : Installable
if (storePath.isDerivation()) {
std::map outputs;
auto drv = store->readDerivation(storePath);
- for (auto & [name, output] : drv.outputs)
- outputs.emplace(name, output.path(*store, drv.name));
+ for (auto & i : drv.outputsAndPaths(*store))
+ outputs.emplace(i.first, i.second.second);
return {
BuildableFromDrv {
.drvPath = storePath,
diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc
index 2fe2e2fb2..38b60fc38 100644
--- a/src/nix/make-content-addressable.cc
+++ b/src/nix/make-content-addressable.cc
@@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto narHash = hashModuloSink.finish().first;
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
+ narHash,
+ };
info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path);
- info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
- .hash = *info.narHash,
+ .hash = info.narHash,
};
if (!json)
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 7dcc0b6d4..cffc9ee44 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -129,11 +129,13 @@ struct ProfileManifest
auto narHash = hashString(htSHA256, *sink.s);
- ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
+ ValidPathInfo info {
+ store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
+ narHash,
+ };
info.references = std::move(references);
- info.narHash = narHash;
info.narSize = sink.s->size();
- info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
+ info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
auto source = StringSource { *sink.s };
store->addToStore(info, source);
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index c3c9e54a8..a74655200 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -490,8 +490,8 @@ bool NixRepl::processLine(string line)
if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
- for (auto & i : drv.outputs)
- std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.path(*state->store, drv.name)));
+ for (auto & i : drv.outputsAndPaths(*state->store))
+ std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
}
} else if (command == ":i") {
runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});
diff --git a/src/nix/show-derivation.cc b/src/nix/show-derivation.cc
index 1b51d114f..8c4bfb03e 100644
--- a/src/nix/show-derivation.cc
+++ b/src/nix/show-derivation.cc
@@ -67,9 +67,9 @@ struct CmdShowDerivation : InstallablesCommand
{
auto outputsObj(drvObj.object("outputs"));
- for (auto & output : drv.outputs) {
+ for (auto & output : drv.outputsAndPaths(*store)) {
auto outputObj(outputsObj.object(output.first));
- outputObj.attr("path", store->printStorePath(output.second.path(*store, drv.name)));
+ outputObj.attr("path", store->printStorePath(output.second.second));
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
@@ -81,7 +81,7 @@ struct CmdShowDerivation : InstallablesCommand
[&](DerivationOutputCAFloating dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
- }, output.second.output);
+ }, output.second.first.output);
}
}
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index fc7a9765c..26f755fd9 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr hashSink;
if (!info->ca)
- hashSink = std::make_unique(info->narHash->type);
+ hashSink = std::make_unique(info->narHash.type);
else
- hashSink = std::make_unique(info->narHash->type, std::string(info->path.hashPart()));
+ hashSink = std::make_unique(info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish();
- if (hash.first != *info->narHash) {
+ if (hash.first != info->narHash) {
corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({
@@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
- info->narHash->to_string(Base32, true),
+ info->narHash.to_string(Base32, true),
hash.first.to_string(Base32, true))
});
}
diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh
index b05a7d167..fe4ddec8d 100644
--- a/tests/binary-cache.sh
+++ b/tests/binary-cache.sh
@@ -219,7 +219,7 @@ outPath=$(nix-build --no-out-link -E '
nix copy --to file://$cacheDir?write-nar-listing=1 $outPath
diff -u \
- <(jq -S < $cacheDir/$(basename $outPath).ls) \
+ <(jq -S < $cacheDir/$(basename $outPath | cut -c1-32).ls) \
<(echo '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' | jq -S)
diff --git a/tests/build-hook-ca.nix b/tests/build-hook-ca.nix
new file mode 100644
index 000000000..98db473fc
--- /dev/null
+++ b/tests/build-hook-ca.nix
@@ -0,0 +1,45 @@
+{ busybox }:
+
+with import ./config.nix;
+
+let
+
+ mkDerivation = args:
+ derivation ({
+ inherit system;
+ builder = busybox;
+ args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
+ outputHashMode = "recursive";
+ outputHashAlgo = "sha256";
+ } // removeAttrs args ["builder" "meta"])
+ // { meta = args.meta or {}; };
+
+ input1 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-1";
+ buildCommand = "echo FOO > $out";
+ requiredSystemFeatures = ["foo"];
+ outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
+ };
+
+ input2 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-2";
+ buildCommand = "echo BAR > $out";
+ requiredSystemFeatures = ["bar"];
+ outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
+ };
+
+in
+
+ mkDerivation {
+ shell = busybox;
+ name = "build-remote";
+ buildCommand =
+ ''
+ read x < ${input1}
+ read y < ${input2}
+ echo "$x $y" > $out
+ '';
+ outputHash = "sha256-3YGhlOfbGUm9hiPn2teXXTT8M1NEpDFvfXkxMaJRld0=";
+ }
diff --git a/tests/build-hook.nix b/tests/build-hook.nix
index a19c10dde..eb16676f0 100644
--- a/tests/build-hook.nix
+++ b/tests/build-hook.nix
@@ -23,6 +23,17 @@ let
shell = busybox;
name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
+ requiredSystemFeatures = ["bar"];
+ };
+
+ input3 = mkDerivation {
+ shell = busybox;
+ name = "build-remote-input-3";
+ buildCommand = ''
+ read x < ${input2}
+ echo $x BAZ > $out
+ '';
+ requiredSystemFeatures = ["baz"];
};
in
@@ -33,7 +44,7 @@ in
buildCommand =
''
read x < ${input1}
- read y < ${input2}
- echo $x$y > $out
+ read y < ${input3}
+ echo "$x $y" > $out
'';
}
diff --git a/tests/build-remote-content-addressed-fixed.sh b/tests/build-remote-content-addressed-fixed.sh
new file mode 100644
index 000000000..1408a19d5
--- /dev/null
+++ b/tests/build-remote-content-addressed-fixed.sh
@@ -0,0 +1,5 @@
+source common.sh
+
+file=build-hook-ca.nix
+
+source build-remote.sh
diff --git a/tests/build-remote-input-addressed.sh b/tests/build-remote-input-addressed.sh
new file mode 100644
index 000000000..b34caa061
--- /dev/null
+++ b/tests/build-remote-input-addressed.sh
@@ -0,0 +1,5 @@
+source common.sh
+
+file=build-hook.nix
+
+source build-remote.sh
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index 4dfb753e1..ca6d1de09 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -1,31 +1,47 @@
-source common.sh
-
-clearStore
-
if ! canUseSandbox; then exit; fi
if ! [[ $busybox =~ busybox ]]; then exit; fi
-chmod -R u+w $TEST_ROOT/machine0 || true
-chmod -R u+w $TEST_ROOT/machine1 || true
-chmod -R u+w $TEST_ROOT/machine2 || true
-rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
-rm -f $TEST_ROOT/result
-
unset NIX_STORE_DIR
unset NIX_STATE_DIR
+function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
+
+builders=(
+ # system-features will automatically be added to the outer URL, but not inner
+ # remote-store URL.
+ "ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=foo - - 1 1 foo"
+ "$TEST_ROOT/machine2 - - 1 1 bar"
+ "ssh-ng://localhost?remote-store=$TEST_ROOT/machine3?system-features=baz - - 1 1 baz"
+)
+
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
-nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
+# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
+nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
- --builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
- --system-features foo
+ --builders "$(join_by '; ' "${builders[@]}")"
outPath=$(readlink -f $TEST_ROOT/result)
-cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
+grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
-# Ensure that input1 was built on store2 due to the required feature.
-(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
-nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
+set -o pipefail
+
+# Ensure that input1 was built on store1 due to the required feature.
+nix path-info --store $TEST_ROOT/machine1 --all \
+ | grep builder-build-remote-input-1.sh \
+ | grep -v builder-build-remote-input-2.sh \
+ | grep -v builder-build-remote-input-3.sh
+
+# Ensure that input2 was built on store2 due to the required feature.
+nix path-info --store $TEST_ROOT/machine2 --all \
+ | grep -v builder-build-remote-input-1.sh \
+ | grep builder-build-remote-input-2.sh \
+ | grep -v builder-build-remote-input-3.sh
+
+# Ensure that input3 was built on store3 due to the required feature.
+nix path-info --store $TEST_ROOT/machine3 --all \
+ | grep -v builder-build-remote-input-1.sh \
+ | grep -v builder-build-remote-input-2.sh \
+ | grep builder-build-remote-input-3.sh
diff --git a/tests/local.mk b/tests/local.mk
index 0f3bfe606..53035da41 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -1,5 +1,5 @@
nix_tests = \
- init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
+ hash.sh lang.sh add.sh simple.sh dependencies.sh \
config.sh \
gc.sh \
gc-concurrent.sh \
@@ -14,7 +14,7 @@ nix_tests = \
placeholders.sh nix-shell.sh \
linux-sandbox.sh \
build-dry.sh \
- build-remote.sh \
+ build-remote-input-addressed.sh \
nar-access.sh \
structured-attrs.sh \
fetchGit.sh \
@@ -34,6 +34,7 @@ nix_tests = \
recursive.sh \
flakes.sh
# parallel.sh
+ # build-remote-content-addressed-fixed.sh \
install-tests += $(foreach x, $(nix_tests), tests/$(x))
diff --git a/tests/remote-store.sh b/tests/remote-store.sh
index 4cc73465a..3a61946f9 100644
--- a/tests/remote-store.sh
+++ b/tests/remote-store.sh
@@ -2,6 +2,9 @@ source common.sh
clearStore
+# Ensure "fake ssh" remote store works just as legacy fake ssh would.
+nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store doctor
+
startDaemon
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh