Merge branch 'master' of github.com:NixOS/nix into templated-daemon-protocol

This commit is contained in:
John Ericson 2020-08-19 03:17:41 +00:00
commit be0d429b95
55 changed files with 614 additions and 342 deletions

View file

@ -370,6 +370,33 @@ false</literal>.</para>
</varlistentry> </varlistentry>
<varlistentry xml:id="conf-hashed-mirrors"><term><literal>hashed-mirrors</literal></term>
<listitem><para>A list of web servers used by
<function>builtins.fetchurl</function> to obtain files by hash.
Given a hash type <replaceable>ht</replaceable> and a base-16 hash
<replaceable>h</replaceable>, Nix will try to download the file
from
<literal>hashed-mirror/<replaceable>ht</replaceable>/<replaceable>h</replaceable></literal>.
This allows files to be downloaded even if they have disappeared
from their original URI. For example, given the hashed mirror
<literal>http://tarballs.example.com/</literal>, when building the
derivation
<programlisting>
builtins.fetchurl {
url = "https://example.org/foo-1.2.3.tar.xz";
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
}
</programlisting>
Nix will attempt to download this file from
<literal>http://tarballs.example.com/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae</literal>
first. If it is not available there, if will try the original URI.</para></listitem>
</varlistentry>
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term> <varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
<listitem><para>The maximum number of parallel TCP connections <listitem><para>The maximum number of parallel TCP connections

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path) SV * queryPathHash(char * path)
PPCODE: PPCODE:
try { try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true); auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true); auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime); mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize); mXPUSHi(info->narSize);
@ -303,10 +303,10 @@ SV * derivationFromPath(char * drvPath)
hash = newHV(); hash = newHV();
HV * outputs = newHV(); HV * outputs = newHV();
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*store()))
hv_store( hv_store(
outputs, i.first.c_str(), i.first.size(), outputs, i.first.c_str(), i.first.size(),
newSVpv(store()->printStorePath(i.second.path(*store(), drv.name)).c_str(), 0), newSVpv(store()->printStorePath(i.second.second).c_str(), 0),
0); 0);
hv_stores(hash, "outputs", newRV((SV *) outputs)); hv_stores(hash, "outputs", newRV((SV *) outputs));

View file

@ -38,9 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true); return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
} }
static bool allSupportedLocally(const std::set<std::string>& requiredFeatures) { static bool allSupportedLocally(Store & store, const std::set<std::string>& requiredFeatures) {
for (auto & feature : requiredFeatures) for (auto & feature : requiredFeatures)
if (!settings.systemFeatures.get().count(feature)) return false; if (!store.systemFeatures.get().count(feature)) return false;
return true; return true;
} }
@ -106,7 +106,7 @@ static int _main(int argc, char * * argv)
auto canBuildLocally = amWilling auto canBuildLocally = amWilling
&& ( neededSystem == settings.thisSystem && ( neededSystem == settings.thisSystem
|| settings.extraPlatforms.get().count(neededSystem) > 0) || settings.extraPlatforms.get().count(neededSystem) > 0)
&& allSupportedLocally(requiredFeatures); && allSupportedLocally(*store, requiredFeatures);
/* Error ignored here, will be caught later */ /* Error ignored here, will be caught later */
mkdir(currentLoad.c_str(), 0777); mkdir(currentLoad.c_str(), 0777);
@ -201,7 +201,7 @@ static int _main(int argc, char * * argv)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures); % concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
} }
logError({ logErrorInfo(lvlInfo, {
.name = "Remote build", .name = "Remote build",
.description = "Failed to find a machine for remote build!", .description = "Failed to find a machine for remote build!",
.hint = hint .hint = hint
@ -224,15 +224,7 @@ static int _main(int argc, char * * argv)
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri)); Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
Store::Params storeParams; sshStore = bestMachine->openStore();
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
if (bestMachine->sshKey != "")
storeParams["ssh-key"] = bestMachine->sshKey;
}
sshStore = openStore(bestMachine->storeUri, storeParams);
sshStore->connect(); sshStore->connect();
storeUri = bestMachine->storeUri; storeUri = bestMachine->storeUri;

View file

@ -113,9 +113,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
state.mkList(*outputsVal, drv.outputs.size()); state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0; unsigned int outputs_index = 0;
for (const auto & o : drv.outputs) { for (const auto & o : drv.outputsAndPaths(*state.store)) {
v2 = state.allocAttr(w, state.symbols.create(o.first)); v2 = state.allocAttr(w, state.symbols.create(o.first));
mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path}); mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
outputsVal->listElems()[outputs_index] = state.allocValue(); outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first); mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
} }
@ -781,7 +781,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
Hash h = newHashAllowEmpty(*outputHash, ht); Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName); auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput { drv.outputs.insert_or_assign("out", DerivationOutput {
.output = DerivationOutputCAFixed { .output = DerivationOutputCAFixed {
.hash = FixedOutputHash { .hash = FixedOutputHash {
@ -795,7 +795,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
else if (contentAddressed) { else if (contentAddressed) {
HashType ht = parseHashType(outputHashAlgo); HashType ht = parseHashType(outputHashAlgo);
for (auto & i : outputs) { for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = hashPlaceholder(i); drv.env[i] = hashPlaceholder(i);
drv.outputs.insert_or_assign(i, DerivationOutput { drv.outputs.insert_or_assign(i, DerivationOutput {
.output = DerivationOutputCAFloating { .output = DerivationOutputCAFloating {
.method = ingestionMethod, .method = ingestionMethod,
@ -813,7 +813,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
that changes in the set of output names do get reflected in that changes in the set of output names do get reflected in
the hash. */ the hash. */
for (auto & i : outputs) { for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = ""; drv.env[i] = "";
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
@ -828,7 +828,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
for (auto & i : outputs) { for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName); auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath); drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
@ -839,7 +839,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
} }
/* Write the resulting term into the Nix store directory. */ /* Write the resulting term into the Nix store directory. */
auto drvPath = writeDerivation(state.store, drv, drvName, state.repair); auto drvPath = writeDerivation(state.store, drv, state.repair);
auto drvPathS = state.store->printStorePath(drvPath); auto drvPathS = state.store->printStorePath(drvPath);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS); printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
@ -852,9 +852,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.mkAttrs(v, 1 + drv.outputs.size()); state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS}); mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(*state.store)) {
mkString(*state.allocAttr(v, state.symbols.create(i.first)), mkString(*state.allocAttr(v, state.symbols.create(i.first)),
state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS}); state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
} }
v.attrs->sort(); v.attrs->sort();
} }

View file

@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path); : hashFile(htSHA256, path);
if (hash != *expectedHash) if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true)); *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
} }
if (state.allowedPaths) if (state.allowedPaths)

View file

@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
tree.actualPath = store->toRealPath(tree.storePath); tree.actualPath = store->toRealPath(tree.storePath);
auto narHash = store->queryPathInfo(tree.storePath)->narHash; auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true)); input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
if (auto prevNarHash = getNarHash()) { if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash) if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'", throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true)); to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
} }
if (auto prevLastModified = getLastModified()) { if (auto prevLastModified = getLastModified()) {

View file

@ -269,7 +269,7 @@ struct GitInputScheme : InputScheme
// modified dirty file? // modified dirty file?
input.attrs.insert_or_assign( input.attrs.insert_or_assign(
"lastModified", "lastModified",
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0); haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return { return {
Tree(store->printStorePath(storePath), std::move(storePath)), Tree(store->printStorePath(storePath), std::move(storePath)),
@ -421,7 +421,7 @@ struct GitInputScheme : InputScheme
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() })); auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
Attrs infoAttrs({ Attrs infoAttrs({
{"rev", input.getRev()->gitRev()}, {"rev", input.getRev()->gitRev()},

View file

@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
StringSink sink; StringSink sink;
dumpString(*res.data, sink); dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data); auto hash = hashString(htSHA256, *res.data);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name)); ValidPathInfo info {
info.narHash = hashString(htSHA256, *sink.s); store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
hashString(htSHA256, *sink.s),
};
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,

View file

@ -362,7 +362,7 @@ public:
auto width = getWindowSize().second; auto width = getWindowSize().second;
if (width <= 0) width = std::numeric_limits<decltype(width)>::max(); if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K"); writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
} }
std::string getStatus(State & state) std::string getStatus(State & state)

View file

@ -143,7 +143,7 @@ struct FileSource : FdSource
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) RepairFlag repair, CheckSigsFlag checkSigs)
{ {
assert(info.narHash && info.narSize); assert(info.narSize);
if (!repair && isValidPath(info.path)) { if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink // FIXME: copyNAR -> null sink
@ -219,7 +219,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
} }
} }
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json"); upsertFile(std::string(info.path.hashPart()) + ".ls", jsonOut.str(), "application/json");
} }
/* Optionally maintain an index of DWARF debug info files /* Optionally maintain an index of DWARF debug info files
@ -312,14 +312,10 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
{ {
auto info = queryPathInfo(storePath).cast<const NarInfo>(); auto info = queryPathInfo(storePath).cast<const NarInfo>();
uint64_t narSize = 0; LengthSink narSize;
TeeSink tee { sink, narSize };
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { auto decompressor = makeDecompressionSink(info->compression, tee);
sink(data, len);
narSize += len;
});
auto decompressor = makeDecompressionSink(info->compression, wrapperSink);
try { try {
getFile(info->url, *decompressor); getFile(info->url, *decompressor);
@ -331,7 +327,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
stats.narRead++; stats.narRead++;
//stats.narReadCompressedBytes += nar->size(); // FIXME //stats.narReadCompressedBytes += nar->size(); // FIXME
stats.narReadBytes += narSize; stats.narReadBytes += narSize.length;
} }
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
@ -385,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s); h = hashString(hashAlgo, s);
} }
ValidPathInfo info(makeFixedOutputPath(method, *h, name)); ValidPathInfo info {
makeFixedOutputPath(method, *h, name),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs); addToStore(info, source, repair, CheckSigs);
@ -396,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
ValidPathInfo info(computeStorePathForText(name, s, references)); ValidPathInfo info {
computeStorePathForText(name, s, references),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
info.references = references; info.references = references;
if (repair || !isValidPath(info.path)) { if (repair || !isValidPath(info.path)) {

View file

@ -1181,8 +1181,8 @@ void DerivationGoal::haveDerivation()
retrySubstitution = false; retrySubstitution = false;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
worker.store.addTempRoot(i.second.path(worker.store, drv->name)); worker.store.addTempRoot(i.second.second);
/* Check what outputs paths are not already valid. */ /* Check what outputs paths are not already valid. */
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair); auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
@ -1288,14 +1288,14 @@ void DerivationGoal::repairClosure()
/* Get the output closure. */ /* Get the output closure. */
StorePathSet outputClosure; StorePathSet outputClosure;
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue; if (!wantOutput(i.first, wantedOutputs)) continue;
worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure); worker.store.computeFSClosure(i.second.second, outputClosure);
} }
/* Filter out our own outputs (which we have already checked). */ /* Filter out our own outputs (which we have already checked). */
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
outputClosure.erase(i.second.path(worker.store, drv->name)); outputClosure.erase(i.second.second);
/* Get all dependencies of this derivation so that we know which /* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output derivation is responsible for which path in the output
@ -1306,8 +1306,8 @@ void DerivationGoal::repairClosure()
for (auto & i : inputClosure) for (auto & i : inputClosure)
if (i.isDerivation()) { if (i.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(i); Derivation drv = worker.store.derivationFromPath(i);
for (auto & j : drv.outputs) for (auto & j : drv.outputsAndPaths(worker.store))
outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i); outputsToDrv.insert_or_assign(j.second.second, i);
} }
/* Check each path (slow!). */ /* Check each path (slow!). */
@ -1466,16 +1466,16 @@ void DerivationGoal::tryToBuild()
/* If any of the outputs already exist but are not valid, delete /* If any of the outputs already exist but are not valid, delete
them. */ them. */
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue; if (worker.store.isValidPath(i.second.second)) continue;
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name))); debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name))); deletePath(worker.store.Store::toRealPath(i.second.second));
} }
/* Don't do a remote build if the derivation has the attribute /* Don't do a remote build if the derivation has the attribute
`preferLocalBuild' set. Also, check and repair modes are only `preferLocalBuild' set. Also, check and repair modes are only
supported for local builds. */ supported for local builds. */
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(); bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Is the build hook willing to accept this job? */ /* Is the build hook willing to accept this job? */
if (!buildLocally) { if (!buildLocally) {
@ -1919,8 +1919,8 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
for (auto & j : paths2) { for (auto & j : paths2) {
if (j.isDerivation()) { if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j); Derivation drv = worker.store.derivationFromPath(j);
for (auto & k : drv.outputs) for (auto & k : drv.outputsAndPaths(worker.store))
worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths); worker.store.computeFSClosure(k.second.second, paths);
} }
} }
@ -1964,13 +1964,13 @@ void linkOrCopy(const Path & from, const Path & to)
void DerivationGoal::startBuilder() void DerivationGoal::startBuilder()
{ {
/* Right platform? */ /* Right platform? */
if (!parsedDrv->canBuildLocally()) if (!parsedDrv->canBuildLocally(worker.store))
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}", throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
drv->platform, drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()), concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath), worker.store.printStorePath(drvPath),
settings.thisSystem, settings.thisSystem,
concatStringsSep<StringSet>(", ", settings.systemFeatures)); concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
if (drv->isBuiltin()) if (drv->isBuiltin())
preloadNSS(); preloadNSS();
@ -2014,8 +2014,8 @@ void DerivationGoal::startBuilder()
chownToBuilder(tmpDir); chownToBuilder(tmpDir);
/* Substitute output placeholders with the actual output paths. */ /* Substitute output placeholders with the actual output paths. */
for (auto & output : drv->outputs) for (auto & output : drv->outputsAndPaths(worker.store))
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name)); inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
/* Construct the environment passed to the builder. */ /* Construct the environment passed to the builder. */
initEnv(); initEnv();
@ -2199,8 +2199,8 @@ void DerivationGoal::startBuilder()
rebuilding a path that is in settings.dirsInChroot rebuilding a path that is in settings.dirsInChroot
(typically the dependencies of /bin/sh). Throw them (typically the dependencies of /bin/sh). Throw them
out. */ out. */
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name))); dirsInChroot.erase(worker.store.printStorePath(i.second.second));
#elif __APPLE__ #elif __APPLE__
/* We don't really have any parent prep work to do (yet?) /* We don't really have any parent prep work to do (yet?)
@ -2612,8 +2612,8 @@ void DerivationGoal::writeStructuredAttrs()
/* Add an "outputs" object containing the output paths. */ /* Add an "outputs" object containing the output paths. */
nlohmann::json outputs; nlohmann::json outputs;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites); outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
json["outputs"] = outputs; json["outputs"] = outputs;
/* Handle exportReferencesGraph. */ /* Handle exportReferencesGraph. */
@ -2819,9 +2819,9 @@ struct RestrictedStore : public LocalFSStore
if (!goal.isAllowed(path.path)) if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
auto drv = derivationFromPath(path.path); auto drv = derivationFromPath(path.path);
for (auto & output : drv.outputs) for (auto & output : drv.outputsAndPaths(*this))
if (wantOutput(output.first, path.outputs)) if (wantOutput(output.first, path.outputs))
newPaths.insert(output.second.path(*this, drv.name)); newPaths.insert(output.second.second);
} else if (!goal.isAllowed(path.path)) } else if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
} }
@ -2924,7 +2924,8 @@ void DerivationGoal::startDaemon()
FdSink to(remote.get()); FdSink to(remote.get());
try { try {
daemon::processConnection(store, from, to, daemon::processConnection(store, from, to,
daemon::NotTrusted, daemon::Recursive, "nobody", 65535); daemon::NotTrusted, daemon::Recursive,
[&](Store & store) { store.createUser("nobody", 65535); });
debug("terminated daemon connection"); debug("terminated daemon connection");
} catch (SysError &) { } catch (SysError &) {
ignoreException(); ignoreException();
@ -3183,7 +3184,7 @@ void DerivationGoal::runChild()
createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/shm");
createDirs(chrootRootDir + "/dev/pts"); createDirs(chrootRootDir + "/dev/pts");
ss.push_back("/dev/full"); ss.push_back("/dev/full");
if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) if (worker.store.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
ss.push_back("/dev/kvm"); ss.push_back("/dev/kvm");
ss.push_back("/dev/null"); ss.push_back("/dev/null");
ss.push_back("/dev/random"); ss.push_back("/dev/random");
@ -3620,8 +3621,8 @@ void DerivationGoal::registerOutputs()
to do anything here. */ to do anything here. */
if (hook) { if (hook) {
bool allValid = true; bool allValid = true;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false; if (!worker.store.isValidPath(i.second.second)) allValid = false;
if (allValid) return; if (allValid) return;
} }
@ -3642,23 +3643,23 @@ void DerivationGoal::registerOutputs()
Nix calls. */ Nix calls. */
StorePathSet referenceablePaths; StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p); for (auto & p : inputPaths) referenceablePaths.insert(p);
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name)); for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
for (auto & p : addedPaths) referenceablePaths.insert(p); for (auto & p : addedPaths) referenceablePaths.insert(p);
/* Check whether the output paths were created, and grep each /* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all output path to determine what other paths it references. Also make all
output paths read-only. */ output paths read-only. */
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name)); auto path = worker.store.printStorePath(i.second.second);
if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue; if (!missingPaths.count(i.second.second)) continue;
Path actualPath = path; Path actualPath = path;
if (needsHashRewrite()) { if (needsHashRewrite()) {
auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name)); auto r = redirectedOutputs.find(i.second.second);
if (r != redirectedOutputs.end()) { if (r != redirectedOutputs.end()) {
auto redirected = worker.store.Store::toRealPath(r->second); auto redirected = worker.store.Store::toRealPath(r->second);
if (buildMode == bmRepair if (buildMode == bmRepair
&& redirectedBadOutputs.count(i.second.path(worker.store, drv->name)) && redirectedBadOutputs.count(i.second.second)
&& pathExists(redirected)) && pathExists(redirected))
replaceValidPath(path, redirected); replaceValidPath(path, redirected);
if (buildMode == bmCheck) if (buildMode == bmCheck)
@ -3725,7 +3726,7 @@ void DerivationGoal::registerOutputs()
hash). */ hash). */
std::optional<ContentAddress> ca; std::optional<ContentAddress> ca;
if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.output)) { if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.first.output)) {
DerivationOutputCAFloating outputHash; DerivationOutputCAFloating outputHash;
std::visit(overloaded { std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) { [&](DerivationOutputInputAddressed doi) {
@ -3740,7 +3741,7 @@ void DerivationGoal::registerOutputs()
[&](DerivationOutputCAFloating dof) { [&](DerivationOutputCAFloating dof) {
outputHash = dof; outputHash = dof;
}, },
}, i.second.output); }, i.second.first.output);
if (outputHash.method == FileIngestionMethod::Flat) { if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */ /* The output path should be a regular file without execute permission. */
@ -3757,12 +3758,12 @@ void DerivationGoal::registerOutputs()
? hashPath(outputHash.hashType, actualPath).first ? hashPath(outputHash.hashType, actualPath).first
: hashFile(outputHash.hashType, actualPath); : hashFile(outputHash.hashType, actualPath);
auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name()); auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
// true if either floating CA, or incorrect fixed hash. // true if either floating CA, or incorrect fixed hash.
bool needsMove = true; bool needsMove = true;
if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.output)) { if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.first.output)) {
Hash & h = p->hash.hash; Hash & h = p->hash.hash;
if (h != h2) { if (h != h2) {
@ -3868,8 +3869,10 @@ void DerivationGoal::registerOutputs()
worker.markContentsGood(worker.store.parseStorePath(path)); worker.markContentsGood(worker.store.parseStorePath(path));
} }
ValidPathInfo info(worker.store.parseStorePath(path)); ValidPathInfo info {
info.narHash = hash.first; worker.store.parseStorePath(path),
hash.first,
};
info.narSize = hash.second; info.narSize = hash.second;
info.references = std::move(references); info.references = std::move(references);
info.deriver = drvPath; info.deriver = drvPath;
@ -3925,8 +3928,8 @@ void DerivationGoal::registerOutputs()
/* If this is the first round of several, then move the output out of the way. */ /* If this is the first round of several, then move the output out of the way. */
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) { if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name)); auto path = worker.store.printStorePath(i.second.second);
Path prev = path + checkSuffix; Path prev = path + checkSuffix;
deletePath(prev); deletePath(prev);
Path dst = path + checkSuffix; Path dst = path + checkSuffix;
@ -3943,8 +3946,8 @@ void DerivationGoal::registerOutputs()
/* Remove the .check directories if we're done. FIXME: keep them /* Remove the .check directories if we're done. FIXME: keep them
if the result was not determistic? */ if the result was not determistic? */
if (curRound == nrRounds) { if (curRound == nrRounds) {
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix; Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
deletePath(prev); deletePath(prev);
} }
} }
@ -4242,12 +4245,12 @@ void DerivationGoal::flushLine()
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash) StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
{ {
StorePathSet result; StorePathSet result;
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue; if (!wantOutput(i.first, wantedOutputs)) continue;
bool good = bool good =
worker.store.isValidPath(i.second.path(worker.store, drv->name)) && worker.store.isValidPath(i.second.second) &&
(!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name))); (!checkHash || worker.pathContentsGood(i.second.second));
if (good == returnValid) result.insert(i.second.path(worker.store, drv->name)); if (good == returnValid) result.insert(i.second.second);
} }
return result; return result;
} }
@ -5074,7 +5077,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path))) if (!pathExists(store.printStorePath(path)))
res = false; res = false;
else { else {
HashResult current = hashPath(info->narHash->type, store.printStorePath(path)); HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256); Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }

View file

@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
} }
}; };
/* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat")
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
debug(e.what());
}
/* Otherwise try the specified URL. */
fetch(mainUrl); fetch(mainUrl);
} }

View file

@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto hash = store->queryPathInfo(path)->narHash; auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork(); logger->stopWork();
to << hash->to_string(Base16, false); to << hash.to_string(Base16, false);
break; break;
} }
@ -454,8 +454,46 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath)); readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
BuildMode buildMode = (BuildMode) readInt(from); BuildMode buildMode = (BuildMode) readInt(from);
logger->startWork(); logger->startWork();
if (!trusted)
throw Error("you are not privileged to build derivations"); /* Content-addressed derivations are trustless because their output paths
are verified by their content alone, so any derivation is free to
try to produce such a path.
Input-addressed derivation output paths, however, are calculated
from the derivation closure that produced them---even knowing the
root derivation is not enough. That the output data actually came
from those derivations is fundamentally unverifiable, but the daemon
trusts itself on that matter. The question instead is whether the
submitted plan has rights to the output paths it wants to fill, and
at least the derivation closure proves that.
It would have been nice if input-address algorithm merely depended
on the build time closure, rather than depending on the derivation
closure. That would mean input-addressed paths used at build time
would just be trusted and not need their own evidence. This is in
fact fine as the same guarantees would hold *inductively*: either
the remote builder has those paths and already trusts them, or it
needs to build them too and thus their evidence must be provided in
turn. The advantage of this variant algorithm is that the evidence
for input-addressed paths which the remote builder already has
doesn't need to be sent again.
That said, now that we have floating CA derivations, it is better
that people just migrate to those which also solve this problem, and
others. It's the same migration difficulty with strictly more
benefit.
Lastly, do note that when we parse fixed-output content-addressed
derivations, we throw out the precomputed output paths and just
store the hashes, so there aren't two competing sources of truth an
attacker could exploit. */
if (drv.type() == DerivationType::InputAddressed && !trusted)
throw Error("you are not privileged to build input-addressed derivations");
/* Make sure that the non-input-addressed derivations that got this far
are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted);
auto res = store->buildDerivation(drvPath, drv, buildMode); auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork(); logger->stopWork();
to << res.status << res.errorMsg; to << res.status << res.errorMsg;
@ -638,7 +676,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17) if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1; to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "") to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash->to_string(Base16, false); << info->narHash.to_string(Base16, false);
WorkerProto<StorePathSet>::write(*store, to, info->references); WorkerProto<StorePathSet>::write(*store, to, info->references);
to << info->registrationTime << info->narSize; to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
@ -688,17 +726,18 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from)); auto path = store->parseStorePath(readString(from));
logger->startWork(); logger->startWork();
logger->stopWork(); logger->stopWork();
dumpPath(store->printStorePath(path), to); dumpPath(store->toRealPath(path), to);
break; break;
} }
case wopAddToStoreNar: { case wopAddToStoreNar: {
bool repair, dontCheckSigs; bool repair, dontCheckSigs;
ValidPathInfo info(store->parseStorePath(readString(from))); auto path = store->parseStorePath(readString(from));
auto deriver = readString(from); auto deriver = readString(from);
auto narHash = Hash::parseAny(readString(from), htSHA256);
ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.narHash = Hash::parseAny(readString(from), htSHA256);
info.references = WorkerProto<StorePathSet>::read(*store, from); info.references = WorkerProto<StorePathSet>::read(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
@ -817,8 +856,7 @@ void processConnection(
FdSink & to, FdSink & to,
TrustedFlag trusted, TrustedFlag trusted,
RecursiveFlag recursive, RecursiveFlag recursive,
const std::string & userName, std::function<void(Store &)> authHook)
uid_t userId)
{ {
auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr; auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr;
@ -859,15 +897,7 @@ void processConnection(
/* If we can't accept clientVersion, then throw an error /* If we can't accept clientVersion, then throw an error
*here* (not above). */ *here* (not above). */
authHook(*store);
#if 0
/* Prevent users from doing something very dangerous. */
if (geteuid() == 0 &&
querySetting("build-users-group", "") == "")
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
#endif
store->createUser(userName, userId);
tunnelLogger->stopWork(); tunnelLogger->stopWork();
to.flush(); to.flush();

View file

@ -12,7 +12,10 @@ void processConnection(
FdSink & to, FdSink & to,
TrustedFlag trusted, TrustedFlag trusted,
RecursiveFlag recursive, RecursiveFlag recursive,
const std::string & userName, /* Arbitrary hook to check authorization / initialize user data / whatever
uid_t userId); after the protocol has been negotiated. The idea is that this function
and everything it calls doesn't know about this stuff, and the
`nix-daemon` handles that instead. */
std::function<void(Store &)> authHook);
} }

View file

@ -62,7 +62,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref<Store> store, StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair) const Derivation & drv, RepairFlag repair)
{ {
auto references = drv.inputSrcs; auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs) for (auto & i : drv.inputDrvs)
@ -70,7 +70,7 @@ StorePath writeDerivation(ref<Store> store,
/* Note that the outputs of a derivation are *not* references /* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be (that can be missing (of course) and should not necessarily be
held during a garbage collection). */ held during a garbage collection). */
auto suffix = std::string(name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(*store, false); auto contents = drv.unparse(*store, false);
return settings.readOnlyMode return settings.readOnlyMode
? store->computeStorePathForText(suffix, contents, references) ? store->computeStorePathForText(suffix, contents, references)
@ -139,18 +139,14 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
} }
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str) static DerivationOutput parseDerivationOutput(const Store & store,
StorePath path, std::string_view hashAlgo, std::string_view hash)
{ {
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
if (hashAlgo != "") { if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") { if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2); hashAlgo = hashAlgo.substr(2);
} }
const HashType hashType = parseHashType(hashAlgo); const HashType hashType = parseHashType(hashAlgo);
@ -178,6 +174,16 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
}; };
} }
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
{
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); const auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
}
static Derivation parseDerivation(const Store & store, std::string && s, std::string_view name) static Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
{ {
@ -474,12 +480,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations"); throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: { case DerivationType::CAFixed: {
std::map<std::string, Hash> outputHashes; std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputs) { for (const auto & i : drv.outputsAndPaths(store)) {
auto & dof = std::get<DerivationOutputCAFixed>(i.second.output); auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
auto hash = hashString(htSHA256, "fixed:out:" auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":" + dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":" + dof.hash.hash.to_string(Base16, false) + ":"
+ store.printStorePath(i.second.path(store, drv.name))); + store.printStorePath(i.second.second));
outputHashes.insert_or_assign(i.first, std::move(hash)); outputHashes.insert_or_assign(i.first, std::move(hash));
} }
return outputHashes; return outputHashes;
@ -533,46 +539,18 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
StorePathSet BasicDerivation::outputPaths(const Store & store) const StorePathSet BasicDerivation::outputPaths(const Store & store) const
{ {
StorePathSet paths; StorePathSet paths;
for (auto & i : outputs) for (auto & i : outputsAndPaths(store))
paths.insert(i.second.path(store, name)); paths.insert(i.second.second);
return paths; return paths;
} }
static DerivationOutput readDerivationOutput(Source & in, const Store & store) static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{ {
auto path = store.parseStorePath(readString(in)); auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in); const auto hashAlgo = readString(in);
auto hash = readString(in); const auto hash = readString(in);
if (hashAlgo != "") { return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
auto hashType = parseHashType(hashAlgo);
return hash != ""
? DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash {
.method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed(hash, hashType),
},
}
}
: (settings.requireExperimentalFeature("ca-derivations"),
DerivationOutput {
.output = DerivationOutputCAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
},
});
} else
return DerivationOutput {
.output = DerivationOutputInputAddressed {
.path = std::move(path),
}
};
} }
StringSet BasicDerivation::outputNames() const StringSet BasicDerivation::outputNames() const
@ -583,6 +561,27 @@ StringSet BasicDerivation::outputNames() const
return names; return names;
} }
DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
DerivationOutputsAndPaths outsAndPaths;
for (auto output : outputs)
outsAndPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.path(store, name))
)
);
return outsAndPaths;
}
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto output : outputs)
outsAndOptPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.pathOpt(store, output.first))
)
);
return outsAndOptPaths;
}
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) { std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
auto nameWithSuffix = drvPath.name(); auto nameWithSuffix = drvPath.name();
@ -623,9 +622,9 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(store)) {
out << i.first out << i.first
<< store.printStorePath(i.second.path(store, drv.name)); << store.printStorePath(i.second.second);
std::visit(overloaded { std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) { [&](DerivationOutputInputAddressed doi) {
out << "" << ""; out << "" << "";
@ -638,7 +637,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)) out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< ""; << "";
}, },
}, i.second.output); }, i.second.first.output);
} }
WorkerProto<StorePathSet>::write(store, out, drv.inputSrcs); WorkerProto<StorePathSet>::write(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;

View file

@ -47,6 +47,9 @@ struct DerivationOutput
DerivationOutputCAFloating DerivationOutputCAFloating
> output; > output;
std::optional<HashType> hashAlgoOpt(const Store & store) const; std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndPaths */
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const; std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */ /* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const { StorePath path(const Store & store, std::string_view drvName) const {
@ -58,6 +61,15 @@ struct DerivationOutput
typedef std::map<string, DerivationOutput> DerivationOutputs; typedef std::map<string, DerivationOutput> DerivationOutputs;
/* These are analogues to the previous DerivationOutputs data type, but they
also contains, for each output, the (optional) store path in which it would
be written. To calculate values of these types, see the corresponding
functions in BasicDerivation */
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
DerivationOutputsAndPaths;
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
DerivationOutputsAndOptPaths;
/* For inputs that are sub-derivations, we specify exactly which /* For inputs that are sub-derivations, we specify exactly which
output IDs we are interested in. */ output IDs we are interested in. */
typedef std::map<StorePath, StringSet> DerivationInputs; typedef std::map<StorePath, StringSet> DerivationInputs;
@ -107,6 +119,13 @@ struct BasicDerivation
/* Return the output names of a derivation. */ /* Return the output names of a derivation. */
StringSet outputNames() const; StringSet outputNames() const;
/* Calculates the maps that contains all the DerivationOutputs, but
augmented with knowledge of the Store paths they would be written into.
The first one of these functions will be removed when the CA work is
completed */
DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath); static std::string_view nameFromPath(const StorePath & storePath);
}; };
@ -128,7 +147,7 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref<Store> store, StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair); const Derivation & drv, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */ /* Read a derivation from a file. */
Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name); Derivation readDerivation(const Store & store, const Path & drvPath, std::string_view name);

View file

@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines. filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */ Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first; Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash->type)) if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!", throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true)); printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
teeSink teeSink
<< exportMagic << exportMagic
@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (magic != exportMagic) if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format"); throw Error("Nix archive cannot be imported; wrong format");
ValidPathInfo info(parseStorePath(readString(source))); auto path = parseStorePath(readString(source));
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
info.references = WorkerProto<StorePathSet>::read(*this, source); auto references = WorkerProto<StorePathSet>::read(*this, source);
auto deriver = readString(source); auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = parseStorePath(deriver); info.deriver = parseStorePath(deriver);
info.references = references;
info.narHash = hashString(htSHA256, *saved.s);
info.narSize = saved.s->size(); info.narSize = saved.s->size();
// Ignore optional legacy signature. // Ignore optional legacy signature.

View file

@ -335,6 +335,9 @@ public:
"setuid/setgid bits or with file capabilities."}; "setuid/setgid bits or with file capabilities."};
#endif #endif
Setting<Strings> hashedMirrors{this, {}, "hashed-mirrors",
"A list of servers used by builtins.fetchurl to fetch files by hash."};
Setting<uint64_t> minFree{this, 0, "min-free", Setting<uint64_t> minFree{this, 0, "min-free",
"Automatically run the garbage collector when free disk space drops below the specified amount."}; "Automatically run the garbage collector when free disk space drops below the specified amount."};

View file

@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
try { try {
auto conn(connections->get()); auto conn(connections->get());
/* No longer support missing NAR hash */
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)}; conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
auto p = readString(conn->from); auto p = readString(conn->from);
if (p.empty()) return callback(nullptr); if (p.empty()) return callback(nullptr);
auto info = std::make_shared<ValidPathInfo>(parseStorePath(p)); auto path2 = parseStorePath(p);
assert(path == info->path); assert(path == path2);
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
PathSet references; PathSet references;
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
readLongLong(conn->from); // download size readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from); info->narSize = readLongLong(conn->from);
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { {
auto s = readString(conn->from); auto s = readString(conn->from);
info->narHash = s.empty() ? std::optional<Hash>{} : Hash::parseAnyPrefixed(s); if (s == "")
throw Error("NAR hash is now mandatory");
info->narHash = Hash::parseAnyPrefixed(s);
}
info->ca = parseContentAddressOpt(readString(conn->from)); info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
}
auto s = readString(conn->from); auto s = readString(conn->from);
assert(s == ""); assert(s == "");
@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar << cmdAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash->to_string(Base16, false); << info.narHash.to_string(Base16, false);
WorkerProto<StorePathSet>::write(*this, conn->to, info.references); WorkerProto<StorePathSet>::write(*this, conn->to, info.references);
conn->to conn->to
<< info.registrationTime << info.registrationTime
@ -202,6 +209,24 @@ struct LegacySSHStore : public Store
const StorePathSet & references, RepairFlag repair) override const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); } { unsupported("addTextToStore"); }
private:
void putBuildSettings(Connection & conn)
{
conn.to
<< settings.maxSilentTime
<< settings.buildTimeout;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
conn.to
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
conn.to
<< settings.buildRepeat
<< settings.enforceDeterminism;
}
public:
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override BuildMode buildMode) override
{ {
@ -211,16 +236,8 @@ struct LegacySSHStore : public Store
<< cmdBuildDerivation << cmdBuildDerivation
<< printStorePath(drvPath); << printStorePath(drvPath);
writeDerivation(conn->to, *this, drv); writeDerivation(conn->to, *this, drv);
conn->to
<< settings.maxSilentTime putBuildSettings(*conn);
<< settings.buildTimeout;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2)
conn->to
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
conn->to
<< settings.buildRepeat
<< settings.enforceDeterminism;
conn->to.flush(); conn->to.flush();
@ -234,6 +251,29 @@ struct LegacySSHStore : public Store
return status; return status;
} }
void buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode) override
{
auto conn(connections->get());
conn->to << cmdBuildPaths;
Strings ss;
for (auto & p : drvPaths)
ss.push_back(p.to_string(*this));
conn->to << ss;
putBuildSettings(*conn);
conn->to.flush();
BuildResult result;
result.status = (BuildResult::Status) readInt(conn->from);
if (!result.success()) {
conn->from >> result.errorMsg;
throw Error(result.status, result.errorMsg);
}
}
void ensurePath(const StorePath & path) override void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); } { unsupported("ensurePath"); }

View file

@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use() state.stmtRegisterValidPath.use()
(printStorePath(info.path)) (printStorePath(info.path))
(info.narHash->to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime) (info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
@ -618,11 +618,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */ registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv); if (checkOutputs) checkDerivationOutputs(info.path, drv);
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(*this)) {
state.stmtAddDerivationOutput.use() state.stmtAddDerivationOutput.use()
(id) (id)
(i.first) (i.first)
(printStorePath(i.second.path(*this, drv.name))) (printStorePath(i.second.second))
.exec(); .exec();
} }
} }
@ -641,25 +641,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{ {
try { try {
auto info = std::make_shared<ValidPathInfo>(path);
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() { callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
auto state(_state.lock()); auto state(_state.lock());
/* Get the path info. */ /* Get the path info. */
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path))); auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
if (!useQueryPathInfo.next()) if (!useQueryPathInfo.next())
return std::shared_ptr<ValidPathInfo>(); return std::shared_ptr<ValidPathInfo>();
info->id = useQueryPathInfo.getInt(0); auto id = useQueryPathInfo.getInt(0);
auto narHash = Hash::dummy;
try { try {
info->narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1)); narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
} catch (BadHash & e) { } catch (BadHash & e) {
throw Error("in valid-path entry for '%s': %s", printStorePath(path), e.what()); throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
} }
auto info = std::make_shared<ValidPathInfo>(path, narHash);
info->id = id;
info->registrationTime = useQueryPathInfo.getInt(2); info->registrationTime = useQueryPathInfo.getInt(2);
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3); auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
@ -694,7 +697,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{ {
state.stmtUpdatePathInfo.use() state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.narHash->to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca) (renderContentAddress(info.ca), (bool) info.ca)
@ -924,7 +927,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths; StorePathSet paths;
for (auto & i : infos) { for (auto & i : infos) {
assert(i.narHash && i.narHash->type == htSHA256); assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path)) if (isValidPath_(*state, i.path))
updatePathInfo(*state, i); updatePathInfo(*state, i);
else else
@ -988,9 +991,6 @@ const PublicKeys & LocalStore::getPublicKeys()
void LocalStore::addToStore(const ValidPathInfo & info, Source & source, void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) RepairFlag repair, CheckSigsFlag checkSigs)
{ {
if (!info.narHash)
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys())) if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path)); throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
@ -1025,11 +1025,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
else else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart())); hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t { TeeSource wrapperSource { source, *hashSink };
size_t n = source.read(data, len);
(*hashSink)(data, n);
return n;
});
restorePath(realPath, wrapperSource); restorePath(realPath, wrapperSource);
@ -1037,7 +1033,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash) if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true)); printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize) if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1159,8 +1155,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
optimisePath(realPath); optimisePath(realPath);
ValidPathInfo info(dstPath); ValidPathInfo info { dstPath, narHash.first };
info.narHash = narHash.first;
info.narSize = narHash.second; info.narSize = narHash.second;
info.ca = FixedOutputHash { .method = method, .hash = hash }; info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info); registerValidPath(info);
@ -1203,8 +1198,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
optimisePath(realPath); optimisePath(realPath);
ValidPathInfo info(dstPath); ValidPathInfo info { dstPath, narHash };
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.references = references; info.references = references;
info.ca = TextHash { .hash = hash }; info.ca = TextHash { .hash = hash };
@ -1319,9 +1313,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path)) if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash->type); hashSink = std::make_unique<HashSink>(info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink); dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish(); auto current = hashSink->finish();
@ -1330,7 +1324,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({ logError({
.name = "Invalid hash - path modified", .name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'", .hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true)) printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
}); });
if (repair) repairPath(i); else errors = true; if (repair) repairPath(i); else errors = true;
} else { } else {

View file

@ -1,6 +1,7 @@
#include "machines.hh" #include "machines.hh"
#include "util.hh" #include "util.hh"
#include "globals.hh" #include "globals.hh"
#include "store-api.hh"
#include <algorithm> #include <algorithm>
@ -48,6 +49,29 @@ bool Machine::mandatoryMet(const std::set<string> & features) const {
}); });
} }
ref<Store> Machine::openStore() const {
Store::Params storeParams;
if (hasPrefix(storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
if (sshKey != "")
storeParams["ssh-key"] = sshKey;
}
{
auto & fs = storeParams["system-features"];
auto append = [&](auto feats) {
for (auto & f : feats) {
if (fs.size() > 0) fs += ' ';
fs += f;
}
};
append(supportedFeatures);
append(mandatoryFeatures);
}
return nix::openStore(storeUri, storeParams);
}
void parseMachines(const std::string & s, Machines & machines) void parseMachines(const std::string & s, Machines & machines)
{ {
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) { for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {

View file

@ -4,6 +4,8 @@
namespace nix { namespace nix {
class Store;
struct Machine { struct Machine {
const string storeUri; const string storeUri;
@ -28,6 +30,8 @@ struct Machine {
decltype(supportedFeatures) supportedFeatures, decltype(supportedFeatures) supportedFeatures,
decltype(mandatoryFeatures) mandatoryFeatures, decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey); decltype(sshPublicHostKey) sshPublicHostKey);
ref<Store> openStore() const;
}; };
typedef std::vector<Machine> Machines; typedef std::vector<Machine> Machines;

View file

@ -207,10 +207,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
ParsedDerivation parsedDrv(StorePath(path.path), *drv); ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid; PathSet invalid;
for (auto & j : drv->outputs) for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs) if (wantOutput(j.first, path.outputs)
&& !isValidPath(j.second.path(*this, drv->name))) && !isValidPath(j.second.second))
invalid.insert(printStorePath(j.second.path(*this, drv->name))); invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return; if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) { if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {

View file

@ -189,13 +189,14 @@ public:
return {oInvalid, 0}; return {oInvalid, 0};
auto namePart = queryNAR.getStr(1); auto namePart = queryNAR.getStr(1);
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart)); auto narInfo = make_ref<NarInfo>(
StorePath(hashPart + "-" + namePart),
Hash::parseAnyPrefixed(queryNAR.getStr(6)));
narInfo->url = queryNAR.getStr(2); narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3); narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4)) if (!queryNAR.isNull(4))
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4)); narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
narInfo->fileSize = queryNAR.getInt(5); narInfo->fileSize = queryNAR.getInt(5);
narInfo->narHash = Hash::parseAnyPrefixed(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7); narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " ")) for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath(r)); narInfo->references.insert(StorePath(r));
@ -232,7 +233,7 @@ public:
(narInfo ? narInfo->compression : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash) (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash->to_string(Base32, true)) (info->narHash.to_string(Base32, true))
(info->narSize) (info->narSize)
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -1,10 +1,11 @@
#include "globals.hh" #include "globals.hh"
#include "nar-info.hh" #include "nar-info.hh"
#include "store-api.hh"
namespace nix { namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence) NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
{ {
auto corrupt = [&]() { auto corrupt = [&]() {
return Error("NAR info file '%1%' is corrupt", whence); return Error("NAR info file '%1%' is corrupt", whence);
@ -19,6 +20,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
}; };
bool havePath = false; bool havePath = false;
bool haveNarHash = false;
size_t pos = 0; size_t pos = 0;
while (pos < s.size()) { while (pos < s.size()) {
@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileSize") { else if (name == "FileSize") {
if (!string2Int(value, fileSize)) throw corrupt(); if (!string2Int(value, fileSize)) throw corrupt();
} }
else if (name == "NarHash") else if (name == "NarHash") {
narHash = parseHashField(value); narHash = parseHashField(value);
haveNarHash = true;
}
else if (name == "NarSize") { else if (name == "NarSize") {
if (!string2Int(value, narSize)) throw corrupt(); if (!string2Int(value, narSize)) throw corrupt();
} }
@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2"; if (compression == "") compression = "bzip2";
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt(); if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
} }
std::string NarInfo::to_string(const Store & store) const std::string NarInfo::to_string(const Store & store) const
@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
assert(fileHash && fileHash->type == htSHA256); assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n"; res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash && narHash->type == htSHA256); assert(narHash.type == htSHA256);
res += "NarHash: " + narHash->to_string(Base32, true) + "\n"; res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -2,10 +2,12 @@
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "store-api.hh" #include "path-info.hh"
namespace nix { namespace nix {
class Store;
struct NarInfo : ValidPathInfo struct NarInfo : ValidPathInfo
{ {
std::string url; std::string url;
@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
std::string system; std::string system;
NarInfo() = delete; NarInfo() = delete;
NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { } NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { } NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const Store & store, const std::string & s, const std::string & whence); NarInfo(const Store & store, const std::string & s, const std::string & whence);

View file

@ -94,7 +94,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
return res; return res;
} }
bool ParsedDerivation::canBuildLocally() const bool ParsedDerivation::canBuildLocally(Store & localStore) const
{ {
if (drv.platform != settings.thisSystem.get() if (drv.platform != settings.thisSystem.get()
&& !settings.extraPlatforms.get().count(drv.platform) && !settings.extraPlatforms.get().count(drv.platform)
@ -102,14 +102,14 @@ bool ParsedDerivation::canBuildLocally() const
return false; return false;
for (auto & feature : getRequiredSystemFeatures()) for (auto & feature : getRequiredSystemFeatures())
if (!settings.systemFeatures.get().count(feature)) return false; if (!localStore.systemFeatures.get().count(feature)) return false;
return true; return true;
} }
bool ParsedDerivation::willBuildLocally() const bool ParsedDerivation::willBuildLocally(Store & localStore) const
{ {
return getBoolAttr("preferLocalBuild") && canBuildLocally(); return getBoolAttr("preferLocalBuild") && canBuildLocally(localStore);
} }
bool ParsedDerivation::substitutesAllowed() const bool ParsedDerivation::substitutesAllowed() const

View file

@ -29,9 +29,9 @@ public:
StringSet getRequiredSystemFeatures() const; StringSet getRequiredSystemFeatures() const;
bool canBuildLocally() const; bool canBuildLocally(Store & localStore) const;
bool willBuildLocally() const; bool willBuildLocally(Store & localStore) const;
bool substitutesAllowed() const; bool substitutesAllowed() const;
}; };

View file

@ -1,5 +1,6 @@
#pragma once #pragma once
#include "crypto.hh"
#include "path.hh" #include "path.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
@ -29,7 +30,7 @@ struct ValidPathInfo
StorePath path; StorePath path;
std::optional<StorePath> deriver; std::optional<StorePath> deriver;
// TODO document this // TODO document this
std::optional<Hash> narHash; Hash narHash;
StorePathSet references; StorePathSet references;
time_t registrationTime = 0; time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown uint64_t narSize = 0; // 0 = unknown
@ -100,8 +101,8 @@ struct ValidPathInfo
ValidPathInfo(const ValidPathInfo & other) = default; ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path) : path(std::move(path)) { }; ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
ValidPathInfo(const StorePath & path) : path(path) { }; ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
virtual ~ValidPathInfo() { } virtual ~ValidPathInfo() { }
}; };

View file

@ -416,10 +416,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid; bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path)); if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
} }
info = std::make_shared<ValidPathInfo>(StorePath(path));
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
info = std::make_shared<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver); if (deriver != "") info->deriver = parseStorePath(deriver);
info->narHash = Hash::parseAny(readString(conn->from), htSHA256);
info->references = WorkerProto<StorePathSet>::read(*this, conn->from); info->references = WorkerProto<StorePathSet>::read(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize; conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
@ -517,7 +517,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar conn->to << wopAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash->to_string(Base16, false); << info.narHash.to_string(Base16, false);
WorkerProto<StorePathSet>::write(*this, conn->to, info.references); WorkerProto<StorePathSet>::write(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca) << info.ultimate << info.sigs << renderContentAddress(info.ca)

View file

@ -320,8 +320,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash) if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath); throw Error("hash mismatch for '%s'", srcPath);
ValidPathInfo info(makeFixedOutputPath(method, hash, name)); ValidPathInfo info {
info.narHash = narHash; makeFixedOutputPath(method, hash, name),
narHash,
};
info.narSize = narSize; info.narSize = narSize;
info.ca = FixedOutputHash { .method = method, .hash = hash }; info.ca = FixedOutputHash { .method = method, .hash = hash };
@ -576,7 +578,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i); auto info = queryPathInfo(i);
if (showHash) { if (showHash) {
s += info->narHash->to_string(Base16, false) + "\n"; s += info->narHash.to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str(); s += (format("%1%\n") % info->narSize).str();
} }
@ -608,7 +610,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath); auto info = queryPathInfo(storePath);
jsonPath jsonPath
.attr("narHash", info->narHash->to_string(hashBase, true)) .attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize); .attr("narSize", info->narSize);
{ {
@ -736,20 +738,6 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
info = info2; info = info2;
} }
if (!info->narHash) {
StringSink sink;
srcStore->narFromPath({storePath}, sink);
auto info2 = make_ref<ValidPathInfo>(*info);
info2->narHash = hashString(htSHA256, *sink.s);
if (!info->narSize) info2->narSize = sink.s->size();
if (info->ultimate) info2->ultimate = false;
info = info2;
StringSource source(*sink.s);
dstStore->addToStore(*info, source, repair, checkSigs);
return;
}
if (info->ultimate) { if (info->ultimate) {
auto info2 = make_ref<ValidPathInfo>(*info); auto info2 = make_ref<ValidPathInfo>(*info);
info2->ultimate = false; info2->ultimate = false;
@ -757,12 +745,12 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
} }
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { LambdaSink progressSink([&](const unsigned char * data, size_t len) {
sink(data, len);
total += len; total += len;
act.progress(total, info->narSize); act.progress(total, info->narSize);
}); });
srcStore->narFromPath(storePath, wrapperSink); TeeSink tee { sink, progressSink };
srcStore->narFromPath(storePath, tee);
}, [&]() { }, [&]() {
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri()); throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
}); });
@ -874,19 +862,22 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
} }
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven) std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
{ {
std::string path; std::string path;
getline(str, path); getline(str, path);
if (str.eof()) { return {}; } if (str.eof()) { return {}; }
ValidPathInfo info(store.parseStorePath(path)); if (!hashGiven) {
if (hashGiven) {
string s; string s;
getline(str, s); getline(str, s);
info.narHash = Hash::parseAny(s, htSHA256); auto narHash = Hash::parseAny(s, htSHA256);
getline(str, s); getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected"); uint64_t narSize;
if (!string2Int(s, narSize)) throw Error("number expected");
hashGiven = { narHash, narSize };
} }
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
info.narSize = hashGiven->second;
std::string deriver; std::string deriver;
getline(str, deriver); getline(str, deriver);
if (deriver != "") info.deriver = store.parseStorePath(deriver); if (deriver != "") info.deriver = store.parseStorePath(deriver);
@ -921,12 +912,12 @@ string showPaths(const PathSet & paths)
std::string ValidPathInfo::fingerprint(const Store & store) const std::string ValidPathInfo::fingerprint(const Store & store) const
{ {
if (narSize == 0 || !narHash) if (narSize == 0)
throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known", throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path)); store.printStorePath(path));
return return
"1;" + store.printStorePath(path) + ";" "1;" + store.printStorePath(path) + ";"
+ narHash->to_string(Base32, true) + ";" + narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";" + std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references)); + concatStringsSep(",", store.printStorePathSet(references));
} }

View file

@ -4,7 +4,6 @@
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
#include "serialise.hh" #include "serialise.hh"
#include "crypto.hh"
#include "lru-cache.hh" #include "lru-cache.hh"
#include "sync.hh" #include "sync.hh"
#include "globals.hh" #include "globals.hh"
@ -164,6 +163,10 @@ public:
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"}; Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
Setting<StringSet> systemFeatures{this, settings.systemFeatures,
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
protected: protected:
struct PathInfoCacheValue { struct PathInfoCacheValue {
@ -767,7 +770,7 @@ string showPaths(const PathSet & paths);
std::optional<ValidPathInfo> decodeValidPathInfo( std::optional<ValidPathInfo> decodeValidPathInfo(
const Store & store, const Store & store,
std::istream & str, std::istream & str,
bool hashGiven = false); std::optional<HashResult> hashGiven = std::nullopt);
/* Split URI into protocol+hierarchy part and its parameter set. */ /* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri); std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x117 #define PROTOCOL_VERSION 0x118
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)

View file

@ -366,11 +366,7 @@ void copyNAR(Source & source, Sink & sink)
ParseSink parseSink; /* null sink; just parse the NAR */ ParseSink parseSink; /* null sink; just parse the NAR */
LambdaSource wrapper([&](unsigned char * data, size_t len) { TeeSource wrapper { source, sink };
auto n = source.read(data, len);
sink(data, n);
return n;
});
parseDump(parseSink, wrapper); parseDump(parseSink, wrapper);
} }

View file

@ -136,6 +136,8 @@ std::string Hash::to_string(Base base, bool includeType) const
return s; return s;
} }
Hash Hash::dummy(htSHA256);
Hash Hash::parseSRI(std::string_view original) { Hash Hash::parseSRI(std::string_view original) {
auto rest = original; auto rest = original;

View file

@ -59,9 +59,6 @@ private:
Hash(std::string_view s, HashType type, bool isSRI); Hash(std::string_view s, HashType type, bool isSRI);
public: public:
/* Check whether a hash is set. */
operator bool () const { return (bool) type; }
/* Check whether two hash are equal. */ /* Check whether two hash are equal. */
bool operator == (const Hash & h2) const; bool operator == (const Hash & h2) const;
@ -105,6 +102,8 @@ public:
assert(type == htSHA1); assert(type == htSHA1);
return std::string(to_string(Base16, false), 0, 7); return std::string(to_string(Base16, false), 0, 7);
} }
static Hash dummy;
}; };
/* Helper that defaults empty hashes to the 0 hash. */ /* Helper that defaults empty hashes to the 0 hash. */

View file

@ -225,6 +225,17 @@ struct SizedSource : Source
} }
}; };
/* A sink that that just counts the number of bytes given to it */
struct LengthSink : Sink
{
uint64_t length = 0;
virtual void operator () (const unsigned char * _, size_t len)
{
length += len;
}
};
/* Convert a function into a sink. */ /* Convert a function into a sink. */
struct LambdaSink : Sink struct LambdaSink : Sink
{ {

View file

@ -239,7 +239,15 @@ static void daemonLoop(char * * argv)
// Handle the connection. // Handle the connection.
FdSource from(remote.get()); FdSource from(remote.get());
FdSink to(remote.get()); FdSink to(remote.get());
processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid); processConnection(openUncachedStore(), from, to, trusted, NotRecursive, [&](Store & store) {
#if 0
/* Prevent users from doing something very dangerous. */
if (geteuid() == 0 &&
querySetting("build-users-group", "") == "")
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
#endif
store.createUser(user, peer.uid);
});
exit(0); exit(0);
}, options); }, options);
@ -324,7 +332,10 @@ static int _main(int argc, char * * argv)
} else { } else {
FdSource from(STDIN_FILENO); FdSource from(STDIN_FILENO);
FdSink to(STDOUT_FILENO); FdSink to(STDOUT_FILENO);
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0); /* Auth hook is empty because in this mode we blindly trust the
standard streams. Limitting access to thoses is explicitly
not `nix-daemon`'s responsibility. */
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
} }
} else { } else {
daemonLoop(argv); daemonLoop(argv);

View file

@ -218,8 +218,8 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) { if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath); auto drv = store->derivationFromPath(storePath);
StorePathSet outputs; StorePathSet outputs;
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*store))
outputs.insert(i.second.path(*store, drv.name)); outputs.insert(i.second.second);
return outputs; return outputs;
} }
else return {storePath}; else return {storePath};
@ -312,8 +312,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
auto i2 = store->followLinksToStorePath(i); auto i2 = store->followLinksToStorePath(i);
if (forceRealise) realisePath({i2}); if (forceRealise) realisePath({i2});
Derivation drv = store->derivationFromPath(i2); Derivation drv = store->derivationFromPath(i2);
for (auto & j : drv.outputs) for (auto & j : drv.outputsAndPaths(*store))
cout << fmt("%1%\n", store->printStorePath(j.second.path(*store, drv.name))); cout << fmt("%1%\n", store->printStorePath(j.second.second));
} }
break; break;
} }
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j); auto info = store->queryPathInfo(j);
if (query == qHash) { if (query == qHash) {
assert(info->narHash && info->narHash->type == htSHA256); assert(info->narHash.type == htSHA256);
cout << fmt("%s\n", info->narHash->to_string(Base32, true)); cout << fmt("%s\n", info->narHash.to_string(Base32, true));
} else if (query == qSize) } else if (query == qSize)
cout << fmt("%d\n", info->narSize); cout << fmt("%d\n", info->narSize);
} }
@ -495,7 +495,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
ValidPathInfos infos; ValidPathInfos infos;
while (1) { while (1) {
auto info = decodeValidPathInfo(*store, cin, hashGiven); // We use a dummy value because we'll set it below. FIXME be correct by
// construction and avoid dummy value.
auto hashResultOpt = !hashGiven ? std::optional<HashResult> { {Hash::dummy, -1} } : std::nullopt;
auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
if (!info) break; if (!info) break;
if (!store->isValidPath(info->path) || reregister) { if (!store->isValidPath(info->path) || reregister) {
/* !!! races */ /* !!! races */
@ -723,7 +726,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i); auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path); auto info = store->queryPathInfo(path);
HashSink sink(info->narHash->type); HashSink sink(info->narHash.type);
store->narFromPath(path, sink); store->narFromPath(path, sink);
auto current = sink.finish(); auto current = sink.finish();
if (current.first != info->narHash) { if (current.first != info->narHash) {
@ -732,7 +735,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path), store->printStorePath(path),
info->narHash->to_string(Base32, true), info->narHash.to_string(Base32, true),
current.first.to_string(Base32, true)) current.first.to_string(Base32, true))
}); });
status = 1; status = 1;
@ -862,7 +865,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash->to_string(Base32, true) : "") out << info->narHash.to_string(Base32, true)
<< renderContentAddress(info->ca) << renderContentAddress(info->ca)
<< info->sigs; << info->sigs;
} catch (InvalidPath &) { } catch (InvalidPath &) {
@ -944,11 +947,13 @@ static void opServe(Strings opFlags, Strings opArgs)
if (!writeAllowed) throw Error("importing paths is not allowed"); if (!writeAllowed) throw Error("importing paths is not allowed");
auto path = readString(in); auto path = readString(in);
ValidPathInfo info(store->parseStorePath(path));
auto deriver = readString(in); auto deriver = readString(in);
ValidPathInfo info {
store->parseStorePath(path),
Hash::parseAny(readString(in), htSHA256),
};
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.narHash = Hash::parseAny(readString(in), htSHA256);
info.references = WorkerProto<StorePathSet>::read(*store, in); info.references = WorkerProto<StorePathSet>::read(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate; in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in); info.sigs = readStrings<StringSet>(in);

View file

@ -60,8 +60,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
hash = hsink.finish().first; hash = hsink.finish().first;
} }
ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart)); ValidPathInfo info {
info.narHash = narHash; store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
narHash,
};
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash { info.ca = std::optional { FixedOutputHash {
.method = ingestionMethod, .method = ingestionMethod,

View file

@ -124,10 +124,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use /* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */ 'buildDerivation', but that's privileged. */
auto drvName = std::string(drvPath.name()); drv.name += "-env";
assert(hasSuffix(drvName, ".drv"));
drvName.resize(drvName.size() - 4);
drvName += "-env";
for (auto & output : drv.outputs) for (auto & output : drv.outputs)
drv.env.erase(output.first); drv.env.erase(output.first);
drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}}; drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}};
@ -136,12 +133,12 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.env["outputs"] = "out"; drv.env["outputs"] = "out";
drv.inputSrcs.insert(std::move(getEnvShPath)); drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = std::get<0>(hashDerivationModulo(*store, drv, true)); Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
auto shellOutPath = store->makeOutputPath("out", h, drvName); auto shellOutPath = store->makeOutputPath("out", h, drv.name);
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed {
.path = shellOutPath .path = shellOutPath
} }); } });
drv.env["out"] = store->printStorePath(shellOutPath); drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName); auto shellDrvPath2 = writeDerivation(store, drv);
/* Build the derivation. */ /* Build the derivation. */
store->buildPaths({{shellDrvPath2}}); store->buildPaths({{shellDrvPath2}});

View file

@ -304,8 +304,8 @@ struct InstallableStorePath : Installable
if (storePath.isDerivation()) { if (storePath.isDerivation()) {
std::map<std::string, StorePath> outputs; std::map<std::string, StorePath> outputs;
auto drv = store->readDerivation(storePath); auto drv = store->readDerivation(storePath);
for (auto & [name, output] : drv.outputs) for (auto & i : drv.outputsAndPaths(*store))
outputs.emplace(name, output.path(*store, drv.name)); outputs.emplace(i.first, i.second.second);
return { return {
BuildableFromDrv { BuildableFromDrv {
.drvPath = storePath, .drvPath = storePath,

View file

@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto narHash = hashModuloSink.finish().first; auto narHash = hashModuloSink.finish().first;
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference)); ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
narHash,
};
info.references = std::move(references); info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path); if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = *info.narHash, .hash = info.narHash,
}; };
if (!json) if (!json)

View file

@ -129,11 +129,13 @@ struct ProfileManifest
auto narHash = hashString(htSHA256, *sink.s); auto narHash = hashString(htSHA256, *sink.s);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references)); ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
narHash,
};
info.references = std::move(references); info.references = std::move(references);
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash }; info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
store->addToStore(info, source); store->addToStore(info, source);

View file

@ -490,8 +490,8 @@ bool NixRepl::processLine(string line)
if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) { if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath))); auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*state->store))
std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.path(*state->store, drv.name))); std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
} }
} else if (command == ":i") { } else if (command == ":i") {
runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath}); runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});

View file

@ -67,9 +67,9 @@ struct CmdShowDerivation : InstallablesCommand
{ {
auto outputsObj(drvObj.object("outputs")); auto outputsObj(drvObj.object("outputs"));
for (auto & output : drv.outputs) { for (auto & output : drv.outputsAndPaths(*store)) {
auto outputObj(outputsObj.object(output.first)); auto outputObj(outputsObj.object(output.first));
outputObj.attr("path", store->printStorePath(output.second.path(*store, drv.name))); outputObj.attr("path", store->printStorePath(output.second.second));
std::visit(overloaded { std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) { [&](DerivationOutputInputAddressed doi) {
@ -81,7 +81,7 @@ struct CmdShowDerivation : InstallablesCommand
[&](DerivationOutputCAFloating dof) { [&](DerivationOutputCAFloating dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)); outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
}, },
}, output.second.output); }, output.second.first.output);
} }
} }

View file

@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca) if (!info->ca)
hashSink = std::make_unique<HashSink>(info->narHash->type); hashSink = std::make_unique<HashSink>(info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink); store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish(); auto hash = hashSink->finish();
if (hash.first != *info->narHash) { if (hash.first != info->narHash) {
corrupted++; corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path)); act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({ logError({
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path), store->printStorePath(info->path),
info->narHash->to_string(Base32, true), info->narHash.to_string(Base32, true),
hash.first.to_string(Base32, true)) hash.first.to_string(Base32, true))
}); });
} }

View file

@ -219,7 +219,7 @@ outPath=$(nix-build --no-out-link -E '
nix copy --to file://$cacheDir?write-nar-listing=1 $outPath nix copy --to file://$cacheDir?write-nar-listing=1 $outPath
diff -u \ diff -u \
<(jq -S < $cacheDir/$(basename $outPath).ls) \ <(jq -S < $cacheDir/$(basename $outPath | cut -c1-32).ls) \
<(echo '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' | jq -S) <(echo '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' | jq -S)

45
tests/build-hook-ca.nix Normal file
View file

@ -0,0 +1,45 @@
{ busybox }:
with import ./config.nix;
let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
outputHashMode = "recursive";
outputHashAlgo = "sha256";
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation {
shell = busybox;
name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"];
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
};
input2 = mkDerivation {
shell = busybox;
name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
requiredSystemFeatures = ["bar"];
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
};
in
mkDerivation {
shell = busybox;
name = "build-remote";
buildCommand =
''
read x < ${input1}
read y < ${input2}
echo "$x $y" > $out
'';
outputHash = "sha256-3YGhlOfbGUm9hiPn2teXXTT8M1NEpDFvfXkxMaJRld0=";
}

View file

@ -23,6 +23,17 @@ let
shell = busybox; shell = busybox;
name = "build-remote-input-2"; name = "build-remote-input-2";
buildCommand = "echo BAR > $out"; buildCommand = "echo BAR > $out";
requiredSystemFeatures = ["bar"];
};
input3 = mkDerivation {
shell = busybox;
name = "build-remote-input-3";
buildCommand = ''
read x < ${input2}
echo $x BAZ > $out
'';
requiredSystemFeatures = ["baz"];
}; };
in in
@ -33,7 +44,7 @@ in
buildCommand = buildCommand =
'' ''
read x < ${input1} read x < ${input1}
read y < ${input2} read y < ${input3}
echo $x$y > $out echo "$x $y" > $out
''; '';
} }

View file

@ -0,0 +1,5 @@
source common.sh
file=build-hook-ca.nix
source build-remote.sh

View file

@ -0,0 +1,5 @@
source common.sh
file=build-hook.nix
source build-remote.sh

View file

@ -1,31 +1,47 @@
source common.sh
clearStore
if ! canUseSandbox; then exit; fi if ! canUseSandbox; then exit; fi
if ! [[ $busybox =~ busybox ]]; then exit; fi if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/machine1 || true
chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
unset NIX_STORE_DIR unset NIX_STORE_DIR
unset NIX_STATE_DIR unset NIX_STATE_DIR
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
builders=(
# system-features will automatically be added to the outer URL, but not inner
# remote-store URL.
"ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=foo - - 1 1 foo"
"$TEST_ROOT/machine2 - - 1 1 bar"
"ssh-ng://localhost?remote-store=$TEST_ROOT/machine3?system-features=baz - - 1 1 baz"
)
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a # Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation(). # child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ # ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \ --arg busybox $busybox \
--store $TEST_ROOT/machine0 \ --store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \ --builders "$(join_by '; ' "${builders[@]}")"
--system-features foo
outPath=$(readlink -f $TEST_ROOT/result) outPath=$(readlink -f $TEST_ROOT/result)
cat $TEST_ROOT/machine0/$outPath | grep FOOBAR grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
# Ensure that input1 was built on store2 due to the required feature. set -o pipefail
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh # Ensure that input1 was built on store1 due to the required feature.
nix path-info --store $TEST_ROOT/machine1 --all \
| grep builder-build-remote-input-1.sh \
| grep -v builder-build-remote-input-2.sh \
| grep -v builder-build-remote-input-3.sh
# Ensure that input2 was built on store2 due to the required feature.
nix path-info --store $TEST_ROOT/machine2 --all \
| grep -v builder-build-remote-input-1.sh \
| grep builder-build-remote-input-2.sh \
| grep -v builder-build-remote-input-3.sh
# Ensure that input3 was built on store3 due to the required feature.
nix path-info --store $TEST_ROOT/machine3 --all \
| grep -v builder-build-remote-input-1.sh \
| grep -v builder-build-remote-input-2.sh \
| grep builder-build-remote-input-3.sh

View file

@ -1,5 +1,5 @@
nix_tests = \ nix_tests = \
init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \ hash.sh lang.sh add.sh simple.sh dependencies.sh \
config.sh \ config.sh \
gc.sh \ gc.sh \
gc-concurrent.sh \ gc-concurrent.sh \
@ -14,7 +14,7 @@ nix_tests = \
placeholders.sh nix-shell.sh \ placeholders.sh nix-shell.sh \
linux-sandbox.sh \ linux-sandbox.sh \
build-dry.sh \ build-dry.sh \
build-remote.sh \ build-remote-input-addressed.sh \
nar-access.sh \ nar-access.sh \
structured-attrs.sh \ structured-attrs.sh \
fetchGit.sh \ fetchGit.sh \
@ -34,6 +34,7 @@ nix_tests = \
recursive.sh \ recursive.sh \
flakes.sh flakes.sh
# parallel.sh # parallel.sh
# build-remote-content-addressed-fixed.sh \
install-tests += $(foreach x, $(nix_tests), tests/$(x)) install-tests += $(foreach x, $(nix_tests), tests/$(x))

View file

@ -2,6 +2,9 @@ source common.sh
clearStore clearStore
# Ensure "fake ssh" remote store works just as legacy fake ssh would.
nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store doctor
startDaemon startDaemon
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh