Merge remote-tracking branch 'upstream/master' into derivation-header-include-order

This commit is contained in:
John Ericson 2020-08-18 14:36:44 +00:00
commit 950ddfdb82
82 changed files with 1603 additions and 829 deletions

View file

@ -370,6 +370,33 @@ false</literal>.</para>
</varlistentry> </varlistentry>
<varlistentry xml:id="conf-hashed-mirrors"><term><literal>hashed-mirrors</literal></term>
<listitem><para>A list of web servers used by
<function>builtins.fetchurl</function> to obtain files by hash.
Given a hash type <replaceable>ht</replaceable> and a base-16 hash
<replaceable>h</replaceable>, Nix will try to download the file
from
<literal>hashed-mirror/<replaceable>ht</replaceable>/<replaceable>h</replaceable></literal>.
This allows files to be downloaded even if they have disappeared
from their original URI. For example, given the hashed mirror
<literal>http://tarballs.example.com/</literal>, when building the
derivation
<programlisting>
builtins.fetchurl {
url = "https://example.org/foo-1.2.3.tar.xz";
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
}
</programlisting>
Nix will attempt to download this file from
<literal>http://tarballs.example.com/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae</literal>
first. If it is not available there, if will try the original URI.</para></listitem>
</varlistentry>
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term> <varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
<listitem><para>The maximum number of parallel TCP connections <listitem><para>The maximum number of parallel TCP connections

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path) SV * queryPathHash(char * path)
PPCODE: PPCODE:
try { try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true); auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true); auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime); mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize); mXPUSHi(info->narSize);
@ -224,7 +224,7 @@ SV * hashString(char * algo, int base32, char * s)
SV * convertHash(char * algo, char * s, int toBase32) SV * convertHash(char * algo, char * s, int toBase32)
PPCODE: PPCODE:
try { try {
Hash h(s, parseHashType(algo)); auto h = Hash::parseAny(s, parseHashType(algo));
string s = h.to_string(toBase32 ? Base32 : Base16, false); string s = h.to_string(toBase32 ? Base32 : Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) { } catch (Error & e) {
@ -285,7 +285,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
PPCODE: PPCODE:
try { try {
Hash h(hash, parseHashType(algo)); auto h = Hash::parseAny(hash, parseHashType(algo));
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto path = store()->makeFixedOutputPath(method, h, name); auto path = store()->makeFixedOutputPath(method, h, name);
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
@ -303,10 +303,10 @@ SV * derivationFromPath(char * drvPath)
hash = newHV(); hash = newHV();
HV * outputs = newHV(); HV * outputs = newHV();
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*store()))
hv_store( hv_store(
outputs, i.first.c_str(), i.first.size(), outputs, i.first.c_str(), i.first.size(),
newSVpv(store()->printStorePath(i.second.path(*store(), drv.name)).c_str(), 0), newSVpv(store()->printStorePath(i.second.second).c_str(), 0),
0); 0);
hv_stores(hash, "outputs", newRV((SV *) outputs)); hv_stores(hash, "outputs", newRV((SV *) outputs));

View file

@ -38,9 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true); return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
} }
static bool allSupportedLocally(const std::set<std::string>& requiredFeatures) { static bool allSupportedLocally(Store & store, const std::set<std::string>& requiredFeatures) {
for (auto & feature : requiredFeatures) for (auto & feature : requiredFeatures)
if (!settings.systemFeatures.get().count(feature)) return false; if (!store.systemFeatures.get().count(feature)) return false;
return true; return true;
} }
@ -106,7 +106,7 @@ static int _main(int argc, char * * argv)
auto canBuildLocally = amWilling auto canBuildLocally = amWilling
&& ( neededSystem == settings.thisSystem && ( neededSystem == settings.thisSystem
|| settings.extraPlatforms.get().count(neededSystem) > 0) || settings.extraPlatforms.get().count(neededSystem) > 0)
&& allSupportedLocally(requiredFeatures); && allSupportedLocally(*store, requiredFeatures);
/* Error ignored here, will be caught later */ /* Error ignored here, will be caught later */
mkdir(currentLoad.c_str(), 0777); mkdir(currentLoad.c_str(), 0777);
@ -170,7 +170,45 @@ static int _main(int argc, char * * argv)
if (rightType && !canBuildLocally) if (rightType && !canBuildLocally)
std::cerr << "# postpone\n"; std::cerr << "# postpone\n";
else else
{
// build the hint template.
string hintstring = "derivation: %s\nrequired (system, features): (%s, %s)";
hintstring += "\n%s available machines:";
hintstring += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
for (unsigned int i = 0; i < machines.size(); ++i) {
hintstring += "\n(%s, %s, %s, %s)";
}
// add the template values.
string drvstr;
if (drvPath.has_value())
drvstr = drvPath->to_string();
else
drvstr = "<unknown>";
auto hint = hintformat(hintstring);
hint
% drvstr
% neededSystem
% concatStringsSep<StringSet>(", ", requiredFeatures)
% machines.size();
for (auto & m : machines) {
hint % concatStringsSep<vector<string>>(", ", m.systemTypes)
% m.maxJobs
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
}
logErrorInfo(lvlInfo, {
.name = "Remote build",
.description = "Failed to find a machine for remote build!",
.hint = hint
});
std::cerr << "# decline\n"; std::cerr << "# decline\n";
}
break; break;
} }
@ -186,15 +224,7 @@ static int _main(int argc, char * * argv)
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri)); Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
Store::Params storeParams; sshStore = bestMachine->openStore();
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
if (bestMachine->sshKey != "")
storeParams["ssh-key"] = bestMachine->sshKey;
}
sshStore = openStore(bestMachine->storeUri, storeParams);
sshStore->connect(); sshStore->connect();
storeUri = bestMachine->storeUri; storeUri = bestMachine->storeUri;

View file

@ -405,7 +405,7 @@ Value & AttrCursor::forceValue()
return v; return v;
} }
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name) std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErrors)
{ {
if (root->db) { if (root->db) {
if (!cachedValue) if (!cachedValue)
@ -422,9 +422,12 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
if (attr) { if (attr) {
if (std::get_if<missing_t>(&attr->second)) if (std::get_if<missing_t>(&attr->second))
return nullptr; return nullptr;
else if (std::get_if<failed_t>(&attr->second)) else if (std::get_if<failed_t>(&attr->second)) {
throw EvalError("cached failure of attribute '%s'", getAttrPathStr(name)); if (forceErrors)
debug("reevaluating failed cached attribute '%s'");
else else
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
} else
return std::make_shared<AttrCursor>(root, return std::make_shared<AttrCursor>(root,
std::make_pair(shared_from_this(), name), nullptr, std::move(attr)); std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
} }
@ -469,9 +472,9 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
return maybeGetAttr(root->state.symbols.create(name)); return maybeGetAttr(root->state.symbols.create(name));
} }
std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name) std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors)
{ {
auto p = maybeGetAttr(name); auto p = maybeGetAttr(name, forceErrors);
if (!p) if (!p)
throw Error("attribute '%s' does not exist", getAttrPathStr(name)); throw Error("attribute '%s' does not exist", getAttrPathStr(name));
return p; return p;
@ -600,7 +603,7 @@ bool AttrCursor::isDerivation()
StorePath AttrCursor::forceDerivation() StorePath AttrCursor::forceDerivation()
{ {
auto aDrvPath = getAttr(root->state.sDrvPath); auto aDrvPath = getAttr(root->state.sDrvPath, true);
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString()); auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) { if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
/* The eval cache contains 'drvPath', but the actual path has /* The eval cache contains 'drvPath', but the actual path has

View file

@ -9,6 +9,8 @@
namespace nix::eval_cache { namespace nix::eval_cache {
MakeError(CachedEvalError, EvalError);
class AttrDb; class AttrDb;
class AttrCursor; class AttrCursor;
@ -92,11 +94,11 @@ public:
std::string getAttrPathStr(Symbol name) const; std::string getAttrPathStr(Symbol name) const;
std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name); std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name, bool forceErrors = false);
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name); std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
std::shared_ptr<AttrCursor> getAttr(Symbol name); std::shared_ptr<AttrCursor> getAttr(Symbol name, bool forceErrors = false);
std::shared_ptr<AttrCursor> getAttr(std::string_view name); std::shared_ptr<AttrCursor> getAttr(std::string_view name);

View file

@ -345,6 +345,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sStructuredAttrs(symbols.create("__structuredAttrs")) , sStructuredAttrs(symbols.create("__structuredAttrs"))
, sBuilder(symbols.create("builder")) , sBuilder(symbols.create("builder"))
, sArgs(symbols.create("args")) , sArgs(symbols.create("args"))
, sContentAddressed(symbols.create("__contentAddressed"))
, sOutputHash(symbols.create("outputHash")) , sOutputHash(symbols.create("outputHash"))
, sOutputHashAlgo(symbols.create("outputHashAlgo")) , sOutputHashAlgo(symbols.create("outputHashAlgo"))
, sOutputHashMode(symbols.create("outputHashMode")) , sOutputHashMode(symbols.create("outputHashMode"))
@ -1259,7 +1260,7 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
addErrorTrace(e, lambda.pos, "while evaluating %s", addErrorTrace(e, lambda.pos, "while evaluating %s",
(lambda.name.set() (lambda.name.set()
? "'" + (string) lambda.name + "'" ? "'" + (string) lambda.name + "'"
: "anonymous lambdaction")); : "anonymous lambda"));
addErrorTrace(e, pos, "from call site%s", ""); addErrorTrace(e, pos, "from call site%s", "");
throw; throw;
} }

View file

@ -74,6 +74,7 @@ public:
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
sFile, sLine, sColumn, sFunctor, sToString, sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs, sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
sContentAddressed,
sOutputHash, sOutputHashAlgo, sOutputHashMode, sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations, sRecurseForDerivations,
sDescription, sSelf, sEpsilon; sDescription, sSelf, sEpsilon;
@ -374,6 +375,9 @@ struct EvalSettings : Config
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls", Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
"Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."}; "Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
}; };
extern EvalSettings evalSettings; extern EvalSettings evalSettings;

View file

@ -113,9 +113,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
state.mkList(*outputsVal, drv.outputs.size()); state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0; unsigned int outputs_index = 0;
for (const auto & o : drv.outputs) { for (const auto & o : drv.outputsAndPaths(*state.store)) {
v2 = state.allocAttr(w, state.symbols.create(o.first)); v2 = state.allocAttr(w, state.symbols.create(o.first));
mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path}); mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
outputsVal->listElems()[outputs_index] = state.allocValue(); outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first); mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
} }
@ -583,6 +583,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
PathSet context; PathSet context;
bool contentAddressed = false;
std::optional<std::string> outputHash; std::optional<std::string> outputHash;
std::string outputHashAlgo; std::string outputHashAlgo;
auto ingestionMethod = FileIngestionMethod::Flat; auto ingestionMethod = FileIngestionMethod::Flat;
@ -639,9 +640,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
if (i->value->type == tNull) continue; if (i->value->type == tNull) continue;
} }
if (i->name == state.sContentAddressed) {
settings.requireExperimentalFeature("ca-derivations");
contentAddressed = state.forceBool(*i->value, pos);
}
/* The `args' attribute is special: it supplies the /* The `args' attribute is special: it supplies the
command-line arguments to the builder. */ command-line arguments to the builder. */
if (i->name == state.sArgs) { else if (i->name == state.sArgs) {
state.forceList(*i->value, pos); state.forceList(*i->value, pos);
for (unsigned int n = 0; n < i->value->listSize(); ++n) { for (unsigned int n = 0; n < i->value->listSize(); ++n) {
string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true); string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true);
@ -761,7 +767,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}); });
if (outputHash) { if (outputHash) {
/* Handle fixed-output derivations. */ /* Handle fixed-output derivations.
Ignore `__contentAddressed` because fixed output derivations are
already content addressed. */
if (outputs.size() != 1 || *(outputs.begin()) != "out") if (outputs.size() != 1 || *(outputs.begin()) != "out")
throw Error({ throw Error({
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"), .hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
@ -772,9 +781,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
Hash h = newHashAllowEmpty(*outputHash, ht); Hash h = newHashAllowEmpty(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName); auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath); drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput { drv.outputs.insert_or_assign("out", DerivationOutput {
.output = DerivationOutputFixed { .output = DerivationOutputCAFixed {
.hash = FixedOutputHash { .hash = FixedOutputHash {
.method = ingestionMethod, .method = ingestionMethod,
.hash = std::move(h), .hash = std::move(h),
@ -783,6 +792,19 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}); });
} }
else if (contentAddressed) {
HashType ht = parseHashType(outputHashAlgo);
for (auto & i : outputs) {
drv.env[i] = hashPlaceholder(i);
drv.outputs.insert_or_assign(i, DerivationOutput {
.output = DerivationOutputCAFloating {
.method = ingestionMethod,
.hashType = std::move(ht),
},
});
}
}
else { else {
/* Compute a hash over the "masked" store derivation, which is /* Compute a hash over the "masked" store derivation, which is
the final one except that in the list of outputs, the the final one except that in the list of outputs, the
@ -791,7 +813,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
that changes in the set of output names do get reflected in that changes in the set of output names do get reflected in
the hash. */ the hash. */
for (auto & i : outputs) { for (auto & i : outputs) {
if (!jsonObject) drv.env[i] = ""; drv.env[i] = "";
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
@ -800,11 +822,13 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}); });
} }
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true); // Regular, non-CA derivation should always return a single hash and not
// hash per output.
Hash h = std::get<0>(hashDerivationModulo(*state.store, Derivation(drv), true));
for (auto & i : outputs) { for (auto & i : outputs) {
auto outPath = state.store->makeOutputPath(i, h, drvName); auto outPath = state.store->makeOutputPath(i, h, drvName);
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath); drv.env[i] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign(i, drv.outputs.insert_or_assign(i,
DerivationOutput { DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
@ -815,7 +839,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
} }
/* Write the resulting term into the Nix store directory. */ /* Write the resulting term into the Nix store directory. */
auto drvPath = writeDerivation(state.store, drv, drvName, state.repair); auto drvPath = writeDerivation(state.store, drv, state.repair);
auto drvPathS = state.store->printStorePath(drvPath); auto drvPathS = state.store->printStorePath(drvPath);
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS); printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
@ -828,9 +852,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.mkAttrs(v, 1 + drv.outputs.size()); state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS}); mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(*state.store)) {
mkString(*state.allocAttr(v, state.symbols.create(i.first)), mkString(*state.allocAttr(v, state.symbols.create(i.first)),
state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS}); state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
} }
v.attrs->sort(); v.attrs->sort();
} }

View file

@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
// be both a revision or a branch/tag name. // be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, *attr.pos); auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
if (std::regex_match(value, revRegex)) if (std::regex_match(value, revRegex))
rev = Hash(value, htSHA1); rev = Hash::parseAny(value, htSHA1);
else else
ref = value; ref = value;
} }

View file

@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path); : hashFile(htSHA256, path);
if (hash != *expectedHash) if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s", throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true)); *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
} }
if (state.allowedPaths) if (state.allowedPaths)

View file

@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
tree.actualPath = store->toRealPath(tree.storePath); tree.actualPath = store->toRealPath(tree.storePath);
auto narHash = store->queryPathInfo(tree.storePath)->narHash; auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true)); input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
if (auto prevNarHash = getNarHash()) { if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash) if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'", throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true)); to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
} }
if (auto prevLastModified = getLastModified()) { if (auto prevLastModified = getLastModified()) {
@ -200,9 +200,12 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const std::optional<Hash> Input::getNarHash() const
{ {
if (auto s = maybeGetStrAttr(attrs, "narHash")) if (auto s = maybeGetStrAttr(attrs, "narHash")) {
// FIXME: require SRI hash. auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
return newHashAllowEmpty(*s, htSHA256); if (hash.type != htSHA256)
throw UsageError("narHash must use SHA-256");
return hash;
}
return {}; return {};
} }
@ -216,7 +219,7 @@ std::optional<std::string> Input::getRef() const
std::optional<Hash> Input::getRev() const std::optional<Hash> Input::getRev() const
{ {
if (auto s = maybeGetStrAttr(attrs, "rev")) if (auto s = maybeGetStrAttr(attrs, "rev"))
return Hash(*s, htSHA1); return Hash::parseAny(*s, htSHA1);
return {}; return {};
} }

View file

@ -121,7 +121,7 @@ struct GitInputScheme : InputScheme
args.push_back(*ref); args.push_back(*ref);
} }
if (input.getRev()) throw Error("cloning a specific revision is not implemented"); if (input.getRev()) throw UnimplementedError("cloning a specific revision is not implemented");
args.push_back(destDir); args.push_back(destDir);
@ -269,7 +269,7 @@ struct GitInputScheme : InputScheme
// modified dirty file? // modified dirty file?
input.attrs.insert_or_assign( input.attrs.insert_or_assign(
"lastModified", "lastModified",
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0); haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return { return {
Tree(store->printStorePath(storePath), std::move(storePath)), Tree(store->printStorePath(storePath), std::move(storePath)),
@ -293,14 +293,14 @@ struct GitInputScheme : InputScheme
if (!input.getRev()) if (!input.getRev())
input.attrs.insert_or_assign("rev", input.attrs.insert_or_assign("rev",
Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev()); Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev());
repoDir = actualUrl; repoDir = actualUrl;
} else { } else {
if (auto res = getCache()->lookup(store, mutableAttrs)) { if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1); auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
if (!input.getRev() || input.getRev() == rev2) { if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev()); input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
@ -370,7 +370,7 @@ struct GitInputScheme : InputScheme
} }
if (!input.getRev()) if (!input.getRev())
input.attrs.insert_or_assign("rev", Hash(chomp(readFile(localRefFile)), htSHA1).gitRev()); input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
} }
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true"; bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
@ -421,7 +421,7 @@ struct GitInputScheme : InputScheme
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter); auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() })); auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
Attrs infoAttrs({ Attrs infoAttrs({
{"rev", input.getRev()->gitRev()}, {"rev", input.getRev()->gitRev()},

View file

@ -29,7 +29,7 @@ struct GitArchiveInputScheme : InputScheme
if (path.size() == 2) { if (path.size() == 2) {
} else if (path.size() == 3) { } else if (path.size() == 3) {
if (std::regex_match(path[2], revRegex)) if (std::regex_match(path[2], revRegex))
rev = Hash(path[2], htSHA1); rev = Hash::parseAny(path[2], htSHA1);
else if (std::regex_match(path[2], refRegex)) else if (std::regex_match(path[2], refRegex))
ref = path[2]; ref = path[2];
else else
@ -41,7 +41,7 @@ struct GitArchiveInputScheme : InputScheme
if (name == "rev") { if (name == "rev") {
if (rev) if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url); throw BadURL("URL '%s' contains multiple commit hashes", url.url);
rev = Hash(value, htSHA1); rev = Hash::parseAny(value, htSHA1);
} }
else if (name == "ref") { else if (name == "ref") {
if (!std::regex_match(value, refRegex)) if (!std::regex_match(value, refRegex))
@ -191,7 +191,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false).storePath)));
auto rev = Hash(std::string { json["sha"] }, htSHA1); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
@ -235,7 +235,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false).storePath)));
auto rev = Hash(std::string(json[0]["id"]), htSHA1); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }

View file

@ -18,7 +18,7 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) { if (path.size() == 1) {
} else if (path.size() == 2) { } else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex)) if (std::regex_match(path[1], revRegex))
rev = Hash(path[1], htSHA1); rev = Hash::parseAny(path[1], htSHA1);
else if (std::regex_match(path[1], refRegex)) else if (std::regex_match(path[1], refRegex))
ref = path[1]; ref = path[1];
else else
@ -29,7 +29,7 @@ struct IndirectInputScheme : InputScheme
ref = path[1]; ref = path[1];
if (!std::regex_match(path[2], revRegex)) if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]); throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
rev = Hash(path[2], htSHA1); rev = Hash::parseAny(path[2], htSHA1);
} else } else
throw BadURL("GitHub URL '%s' is invalid", url.url); throw BadURL("GitHub URL '%s' is invalid", url.url);

View file

@ -209,7 +209,7 @@ struct MercurialInputScheme : InputScheme
}); });
if (auto res = getCache()->lookup(store, mutableAttrs)) { if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1); auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
if (!input.getRev() || input.getRev() == rev2) { if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev()); input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second)); return makeResult(res->first, std::move(res->second));
@ -252,7 +252,7 @@ struct MercurialInputScheme : InputScheme
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3); assert(tokens.size() == 3);
input.attrs.insert_or_assign("rev", Hash(tokens[0], htSHA1).gitRev()); input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
auto revCount = std::stoull(tokens[1]); auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]); input.attrs.insert_or_assign("ref", tokens[2]);

View file

@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
StringSink sink; StringSink sink;
dumpString(*res.data, sink); dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data); auto hash = hashString(htSHA256, *res.data);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name)); ValidPathInfo info {
info.narHash = hashString(htSHA256, *sink.s); store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
hashString(htSHA256, *sink.s),
};
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat, .method = FileIngestionMethod::Flat,

View file

@ -362,7 +362,7 @@ public:
auto width = getWindowSize().second; auto width = getWindowSize().second;
if (width <= 0) width = std::numeric_limits<decltype(width)>::max(); if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K"); writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
} }
std::string getStatus(State & state) std::string getStatus(State & state)

View file

@ -143,7 +143,7 @@ struct FileSource : FdSource
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) RepairFlag repair, CheckSigsFlag checkSigs)
{ {
assert(info.narHash && info.narSize); assert(info.narSize);
if (!repair && isValidPath(info.path)) { if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink // FIXME: copyNAR -> null sink
@ -153,6 +153,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto [fdTemp, fnTemp] = createTempFile(); auto [fdTemp, fnTemp] = createTempFile();
AutoDelete autoDelete(fnTemp);
auto now1 = std::chrono::steady_clock::now(); auto now1 = std::chrono::steady_clock::now();
/* Read the NAR simultaneously into a CompressionSink+FileSink (to /* Read the NAR simultaneously into a CompressionSink+FileSink (to
@ -167,6 +169,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
TeeSource teeSource(narSource, *compressionSink); TeeSource teeSource(narSource, *compressionSink);
narAccessor = makeNarAccessor(teeSource); narAccessor = makeNarAccessor(teeSource);
compressionSink->finish(); compressionSink->finish();
fileSink.flush();
} }
auto now2 = std::chrono::steady_clock::now(); auto now2 = std::chrono::steady_clock::now();
@ -216,7 +219,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
} }
} }
upsertFile(std::string(info.path.to_string()) + ".ls", jsonOut.str(), "application/json"); upsertFile(std::string(info.path.hashPart()) + ".ls", jsonOut.str(), "application/json");
} }
/* Optionally maintain an index of DWARF debug info files /* Optionally maintain an index of DWARF debug info files
@ -280,7 +283,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
if (repair || !fileExists(narInfo->url)) { if (repair || !fileExists(narInfo->url)) {
stats.narWrite++; stats.narWrite++;
upsertFile(narInfo->url, upsertFile(narInfo->url,
std::make_shared<std::fstream>(fnTemp, std::ios_base::in), std::make_shared<std::fstream>(fnTemp, std::ios_base::in | std::ios_base::binary),
"application/x-nix-nar"); "application/x-nix-nar");
} else } else
stats.narWriteAverted++; stats.narWriteAverted++;
@ -309,14 +312,10 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
{ {
auto info = queryPathInfo(storePath).cast<const NarInfo>(); auto info = queryPathInfo(storePath).cast<const NarInfo>();
uint64_t narSize = 0; LengthSink narSize;
TeeSink tee { sink, narSize };
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { auto decompressor = makeDecompressionSink(info->compression, tee);
sink(data, len);
narSize += len;
});
auto decompressor = makeDecompressionSink(info->compression, wrapperSink);
try { try {
getFile(info->url, *decompressor); getFile(info->url, *decompressor);
@ -328,7 +327,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
stats.narRead++; stats.narRead++;
//stats.narReadCompressedBytes += nar->size(); // FIXME //stats.narReadCompressedBytes += nar->size(); // FIXME
stats.narReadBytes += narSize; stats.narReadBytes += narSize.length;
} }
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
@ -382,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s); h = hashString(hashAlgo, s);
} }
ValidPathInfo info(makeFixedOutputPath(method, *h, name)); ValidPathInfo info {
makeFixedOutputPath(method, *h, name),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs); addToStore(info, source, repair, CheckSigs);
@ -393,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
ValidPathInfo info(computeStorePathForText(name, s, references)); ValidPathInfo info {
computeStorePathForText(name, s, references),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
info.references = references; info.references = references;
if (repair || !isValidPath(info.path)) { if (repair || !isValidPath(info.path)) {

View file

@ -806,8 +806,8 @@ private:
/* RAII object to delete the chroot directory. */ /* RAII object to delete the chroot directory. */
std::shared_ptr<AutoDelete> autoDelChroot; std::shared_ptr<AutoDelete> autoDelChroot;
/* Whether this is a fixed-output derivation. */ /* The sort of derivation we are building. */
bool fixedOutput; DerivationType derivationType;
/* Whether to run the build in a private network namespace. */ /* Whether to run the build in a private network namespace. */
bool privateNetwork = false; bool privateNetwork = false;
@ -1181,8 +1181,8 @@ void DerivationGoal::haveDerivation()
retrySubstitution = false; retrySubstitution = false;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
worker.store.addTempRoot(i.second.path(worker.store, drv->name)); worker.store.addTempRoot(i.second.second);
/* Check what outputs paths are not already valid. */ /* Check what outputs paths are not already valid. */
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair); auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
@ -1195,9 +1195,9 @@ void DerivationGoal::haveDerivation()
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv); parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
if (parsedDrv->contentAddressed()) { if (drv->type() == DerivationType::CAFloating) {
settings.requireExperimentalFeature("ca-derivations"); settings.requireExperimentalFeature("ca-derivations");
throw Error("ca-derivations isn't implemented yet"); throw UnimplementedError("ca-derivations isn't implemented yet");
} }
@ -1288,14 +1288,14 @@ void DerivationGoal::repairClosure()
/* Get the output closure. */ /* Get the output closure. */
StorePathSet outputClosure; StorePathSet outputClosure;
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue; if (!wantOutput(i.first, wantedOutputs)) continue;
worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure); worker.store.computeFSClosure(i.second.second, outputClosure);
} }
/* Filter out our own outputs (which we have already checked). */ /* Filter out our own outputs (which we have already checked). */
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
outputClosure.erase(i.second.path(worker.store, drv->name)); outputClosure.erase(i.second.second);
/* Get all dependencies of this derivation so that we know which /* Get all dependencies of this derivation so that we know which
derivation is responsible for which path in the output derivation is responsible for which path in the output
@ -1306,8 +1306,8 @@ void DerivationGoal::repairClosure()
for (auto & i : inputClosure) for (auto & i : inputClosure)
if (i.isDerivation()) { if (i.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(i); Derivation drv = worker.store.derivationFromPath(i);
for (auto & j : drv.outputs) for (auto & j : drv.outputsAndPaths(worker.store))
outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i); outputsToDrv.insert_or_assign(j.second.second, i);
} }
/* Check each path (slow!). */ /* Check each path (slow!). */
@ -1392,12 +1392,12 @@ void DerivationGoal::inputsRealised()
debug("added input paths %s", worker.store.showPaths(inputPaths)); debug("added input paths %s", worker.store.showPaths(inputPaths));
/* Is this a fixed-output derivation? */ /* What type of derivation are we building? */
fixedOutput = drv->isFixedOutput(); derivationType = drv->type();
/* Don't repeat fixed-output derivations since they're already /* Don't repeat fixed-output derivations since they're already
verified by their output hash.*/ verified by their output hash.*/
nrRounds = fixedOutput ? 1 : settings.buildRepeat + 1; nrRounds = derivationIsFixed(derivationType) ? 1 : settings.buildRepeat + 1;
/* Okay, try to build. Note that here we don't wait for a build /* Okay, try to build. Note that here we don't wait for a build
slot to become available, since we don't need one if there is a slot to become available, since we don't need one if there is a
@ -1466,16 +1466,16 @@ void DerivationGoal::tryToBuild()
/* If any of the outputs already exist but are not valid, delete /* If any of the outputs already exist but are not valid, delete
them. */ them. */
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue; if (worker.store.isValidPath(i.second.second)) continue;
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name))); debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name))); deletePath(worker.store.Store::toRealPath(i.second.second));
} }
/* Don't do a remote build if the derivation has the attribute /* Don't do a remote build if the derivation has the attribute
`preferLocalBuild' set. Also, check and repair modes are only `preferLocalBuild' set. Also, check and repair modes are only
supported for local builds. */ supported for local builds. */
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(); bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Is the build hook willing to accept this job? */ /* Is the build hook willing to accept this job? */
if (!buildLocally) { if (!buildLocally) {
@ -1783,7 +1783,7 @@ void DerivationGoal::buildDone()
st = st =
dynamic_cast<NotDeterministic*>(&e) ? BuildResult::NotDeterministic : dynamic_cast<NotDeterministic*>(&e) ? BuildResult::NotDeterministic :
statusOk(status) ? BuildResult::OutputRejected : statusOk(status) ? BuildResult::OutputRejected :
fixedOutput || diskFull ? BuildResult::TransientFailure : derivationIsImpure(derivationType) || diskFull ? BuildResult::TransientFailure :
BuildResult::PermanentFailure; BuildResult::PermanentFailure;
} }
@ -1919,8 +1919,8 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
for (auto & j : paths2) { for (auto & j : paths2) {
if (j.isDerivation()) { if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j); Derivation drv = worker.store.derivationFromPath(j);
for (auto & k : drv.outputs) for (auto & k : drv.outputsAndPaths(worker.store))
worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths); worker.store.computeFSClosure(k.second.second, paths);
} }
} }
@ -1964,13 +1964,13 @@ void linkOrCopy(const Path & from, const Path & to)
void DerivationGoal::startBuilder() void DerivationGoal::startBuilder()
{ {
/* Right platform? */ /* Right platform? */
if (!parsedDrv->canBuildLocally()) if (!parsedDrv->canBuildLocally(worker.store))
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}", throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
drv->platform, drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()), concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath), worker.store.printStorePath(drvPath),
settings.thisSystem, settings.thisSystem,
concatStringsSep<StringSet>(", ", settings.systemFeatures)); concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
if (drv->isBuiltin()) if (drv->isBuiltin())
preloadNSS(); preloadNSS();
@ -1996,7 +1996,7 @@ void DerivationGoal::startBuilder()
else if (settings.sandboxMode == smDisabled) else if (settings.sandboxMode == smDisabled)
useChroot = false; useChroot = false;
else if (settings.sandboxMode == smRelaxed) else if (settings.sandboxMode == smRelaxed)
useChroot = !fixedOutput && !noChroot; useChroot = !(derivationIsImpure(derivationType)) && !noChroot;
} }
if (worker.store.storeDir != worker.store.realStoreDir) { if (worker.store.storeDir != worker.store.realStoreDir) {
@ -2014,8 +2014,8 @@ void DerivationGoal::startBuilder()
chownToBuilder(tmpDir); chownToBuilder(tmpDir);
/* Substitute output placeholders with the actual output paths. */ /* Substitute output placeholders with the actual output paths. */
for (auto & output : drv->outputs) for (auto & output : drv->outputsAndPaths(worker.store))
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name)); inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
/* Construct the environment passed to the builder. */ /* Construct the environment passed to the builder. */
initEnv(); initEnv();
@ -2165,7 +2165,7 @@ void DerivationGoal::startBuilder()
"nogroup:x:65534:\n") % sandboxGid).str()); "nogroup:x:65534:\n") % sandboxGid).str());
/* Create /etc/hosts with localhost entry. */ /* Create /etc/hosts with localhost entry. */
if (!fixedOutput) if (!(derivationIsImpure(derivationType)))
writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n"); writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n");
/* Make the closure of the inputs available in the chroot, /* Make the closure of the inputs available in the chroot,
@ -2199,8 +2199,8 @@ void DerivationGoal::startBuilder()
rebuilding a path that is in settings.dirsInChroot rebuilding a path that is in settings.dirsInChroot
(typically the dependencies of /bin/sh). Throw them (typically the dependencies of /bin/sh). Throw them
out. */ out. */
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name))); dirsInChroot.erase(worker.store.printStorePath(i.second.second));
#elif __APPLE__ #elif __APPLE__
/* We don't really have any parent prep work to do (yet?) /* We don't really have any parent prep work to do (yet?)
@ -2373,7 +2373,7 @@ void DerivationGoal::startBuilder()
us. us.
*/ */
if (!fixedOutput) if (!(derivationIsImpure(derivationType)))
privateNetwork = true; privateNetwork = true;
userNamespaceSync.create(); userNamespaceSync.create();
@ -2574,7 +2574,7 @@ void DerivationGoal::initEnv()
derivation, tell the builder, so that for instance `fetchurl' derivation, tell the builder, so that for instance `fetchurl'
can skip checking the output. On older Nixes, this environment can skip checking the output. On older Nixes, this environment
variable won't be set, so `fetchurl' will do the check. */ variable won't be set, so `fetchurl' will do the check. */
if (fixedOutput) env["NIX_OUTPUT_CHECKED"] = "1"; if (derivationIsFixed(derivationType)) env["NIX_OUTPUT_CHECKED"] = "1";
/* *Only* if this is a fixed-output derivation, propagate the /* *Only* if this is a fixed-output derivation, propagate the
values of the environment variables specified in the values of the environment variables specified in the
@ -2585,7 +2585,7 @@ void DerivationGoal::initEnv()
to the builder is generally impure, but the output of to the builder is generally impure, but the output of
fixed-output derivations is by definition pure (since we fixed-output derivations is by definition pure (since we
already know the cryptographic hash of the output). */ already know the cryptographic hash of the output). */
if (fixedOutput) { if (derivationIsImpure(derivationType)) {
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings())) for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings()))
env[i] = getEnv(i).value_or(""); env[i] = getEnv(i).value_or("");
} }
@ -2612,8 +2612,8 @@ void DerivationGoal::writeStructuredAttrs()
/* Add an "outputs" object containing the output paths. */ /* Add an "outputs" object containing the output paths. */
nlohmann::json outputs; nlohmann::json outputs;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites); outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
json["outputs"] = outputs; json["outputs"] = outputs;
/* Handle exportReferencesGraph. */ /* Handle exportReferencesGraph. */
@ -2815,9 +2815,9 @@ struct RestrictedStore : public LocalFSStore
if (!goal.isAllowed(path.path)) if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
auto drv = derivationFromPath(path.path); auto drv = derivationFromPath(path.path);
for (auto & output : drv.outputs) for (auto & output : drv.outputsAndPaths(*this))
if (wantOutput(output.first, path.outputs)) if (wantOutput(output.first, path.outputs))
newPaths.insert(output.second.path(*this, drv.name)); newPaths.insert(output.second.second);
} else if (!goal.isAllowed(path.path)) } else if (!goal.isAllowed(path.path))
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path)); throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
} }
@ -2920,7 +2920,8 @@ void DerivationGoal::startDaemon()
FdSink to(remote.get()); FdSink to(remote.get());
try { try {
daemon::processConnection(store, from, to, daemon::processConnection(store, from, to,
daemon::NotTrusted, daemon::Recursive, "nobody", 65535); daemon::NotTrusted, daemon::Recursive,
[&](Store & store) { store.createUser("nobody", 65535); });
debug("terminated daemon connection"); debug("terminated daemon connection");
} catch (SysError &) { } catch (SysError &) {
ignoreException(); ignoreException();
@ -3179,7 +3180,7 @@ void DerivationGoal::runChild()
createDirs(chrootRootDir + "/dev/shm"); createDirs(chrootRootDir + "/dev/shm");
createDirs(chrootRootDir + "/dev/pts"); createDirs(chrootRootDir + "/dev/pts");
ss.push_back("/dev/full"); ss.push_back("/dev/full");
if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm")) if (worker.store.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
ss.push_back("/dev/kvm"); ss.push_back("/dev/kvm");
ss.push_back("/dev/null"); ss.push_back("/dev/null");
ss.push_back("/dev/random"); ss.push_back("/dev/random");
@ -3195,7 +3196,7 @@ void DerivationGoal::runChild()
/* Fixed-output derivations typically need to access the /* Fixed-output derivations typically need to access the
network, so give them access to /etc/resolv.conf and so network, so give them access to /etc/resolv.conf and so
on. */ on. */
if (fixedOutput) { if (derivationIsImpure(derivationType)) {
ss.push_back("/etc/resolv.conf"); ss.push_back("/etc/resolv.conf");
// Only use nss functions to resolve hosts and // Only use nss functions to resolve hosts and
@ -3436,7 +3437,7 @@ void DerivationGoal::runChild()
sandboxProfile += "(import \"sandbox-defaults.sb\")\n"; sandboxProfile += "(import \"sandbox-defaults.sb\")\n";
if (fixedOutput) if (derivationIsImpure(derivationType))
sandboxProfile += "(import \"sandbox-network.sb\")\n"; sandboxProfile += "(import \"sandbox-network.sb\")\n";
/* Our rwx outputs */ /* Our rwx outputs */
@ -3616,8 +3617,8 @@ void DerivationGoal::registerOutputs()
to do anything here. */ to do anything here. */
if (hook) { if (hook) {
bool allValid = true; bool allValid = true;
for (auto & i : drv->outputs) for (auto & i : drv->outputsAndPaths(worker.store))
if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false; if (!worker.store.isValidPath(i.second.second)) allValid = false;
if (allValid) return; if (allValid) return;
} }
@ -3638,23 +3639,23 @@ void DerivationGoal::registerOutputs()
Nix calls. */ Nix calls. */
StorePathSet referenceablePaths; StorePathSet referenceablePaths;
for (auto & p : inputPaths) referenceablePaths.insert(p); for (auto & p : inputPaths) referenceablePaths.insert(p);
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name)); for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
for (auto & p : addedPaths) referenceablePaths.insert(p); for (auto & p : addedPaths) referenceablePaths.insert(p);
/* Check whether the output paths were created, and grep each /* Check whether the output paths were created, and grep each
output path to determine what other paths it references. Also make all output path to determine what other paths it references. Also make all
output paths read-only. */ output paths read-only. */
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name)); auto path = worker.store.printStorePath(i.second.second);
if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue; if (!missingPaths.count(i.second.second)) continue;
Path actualPath = path; Path actualPath = path;
if (needsHashRewrite()) { if (needsHashRewrite()) {
auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name)); auto r = redirectedOutputs.find(i.second.second);
if (r != redirectedOutputs.end()) { if (r != redirectedOutputs.end()) {
auto redirected = worker.store.Store::toRealPath(r->second); auto redirected = worker.store.Store::toRealPath(r->second);
if (buildMode == bmRepair if (buildMode == bmRepair
&& redirectedBadOutputs.count(i.second.path(worker.store, drv->name)) && redirectedBadOutputs.count(i.second.second)
&& pathExists(redirected)) && pathExists(redirected))
replaceValidPath(path, redirected); replaceValidPath(path, redirected);
if (buildMode == bmCheck) if (buildMode == bmCheck)
@ -3721,9 +3722,22 @@ void DerivationGoal::registerOutputs()
hash). */ hash). */
std::optional<ContentAddress> ca; std::optional<ContentAddress> ca;
if (fixedOutput) { if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.first.output)) {
DerivationOutputCAFloating outputHash;
FixedOutputHash outputHash = std::get<DerivationOutputFixed>(i.second.output).hash; std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
assert(false); // Enclosing `if` handles this case in other branch
},
[&](DerivationOutputCAFixed dof) {
outputHash = DerivationOutputCAFloating {
.method = dof.hash.method,
.hashType = dof.hash.hash.type,
};
},
[&](DerivationOutputCAFloating dof) {
outputHash = dof;
},
}, i.second.first.output);
if (outputHash.method == FileIngestionMethod::Flat) { if (outputHash.method == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */ /* The output path should be a regular file without execute permission. */
@ -3737,12 +3751,17 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by /* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */ the derivation to its content-addressed location. */
Hash h2 = outputHash.method == FileIngestionMethod::Recursive Hash h2 = outputHash.method == FileIngestionMethod::Recursive
? hashPath(outputHash.hash.type, actualPath).first ? hashPath(outputHash.hashType, actualPath).first
: hashFile(outputHash.hash.type, actualPath); : hashFile(outputHash.hashType, actualPath);
auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name()); auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
if (outputHash.hash != h2) { // true if either floating CA, or incorrect fixed hash.
bool needsMove = true;
if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.first.output)) {
Hash & h = p->hash.hash;
if (h != h2) {
/* Throw an error after registering the path as /* Throw an error after registering the path as
valid. */ valid. */
@ -3750,9 +3769,15 @@ void DerivationGoal::registerOutputs()
delayedException = std::make_exception_ptr( delayedException = std::make_exception_ptr(
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s", BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
worker.store.printStorePath(dest), worker.store.printStorePath(dest),
outputHash.hash.to_string(SRI, true), h.to_string(SRI, true),
h2.to_string(SRI, true))); h2.to_string(SRI, true)));
} else {
// matched the fixed hash, so no move needed.
needsMove = false;
}
}
if (needsMove) {
Path actualDest = worker.store.Store::toRealPath(dest); Path actualDest = worker.store.Store::toRealPath(dest);
if (worker.store.isValidPath(dest)) if (worker.store.isValidPath(dest))
@ -3840,8 +3865,10 @@ void DerivationGoal::registerOutputs()
worker.markContentsGood(worker.store.parseStorePath(path)); worker.markContentsGood(worker.store.parseStorePath(path));
} }
ValidPathInfo info(worker.store.parseStorePath(path)); ValidPathInfo info {
info.narHash = hash.first; worker.store.parseStorePath(path),
hash.first,
};
info.narSize = hash.second; info.narSize = hash.second;
info.references = std::move(references); info.references = std::move(references);
info.deriver = drvPath; info.deriver = drvPath;
@ -3897,8 +3924,8 @@ void DerivationGoal::registerOutputs()
/* If this is the first round of several, then move the output out of the way. */ /* If this is the first round of several, then move the output out of the way. */
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) { if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name)); auto path = worker.store.printStorePath(i.second.second);
Path prev = path + checkSuffix; Path prev = path + checkSuffix;
deletePath(prev); deletePath(prev);
Path dst = path + checkSuffix; Path dst = path + checkSuffix;
@ -3915,8 +3942,8 @@ void DerivationGoal::registerOutputs()
/* Remove the .check directories if we're done. FIXME: keep them /* Remove the .check directories if we're done. FIXME: keep them
if the result was not determistic? */ if the result was not determistic? */
if (curRound == nrRounds) { if (curRound == nrRounds) {
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix; Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
deletePath(prev); deletePath(prev);
} }
} }
@ -4214,12 +4241,12 @@ void DerivationGoal::flushLine()
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash) StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
{ {
StorePathSet result; StorePathSet result;
for (auto & i : drv->outputs) { for (auto & i : drv->outputsAndPaths(worker.store)) {
if (!wantOutput(i.first, wantedOutputs)) continue; if (!wantOutput(i.first, wantedOutputs)) continue;
bool good = bool good =
worker.store.isValidPath(i.second.path(worker.store, drv->name)) && worker.store.isValidPath(i.second.second) &&
(!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name))); (!checkHash || worker.pathContentsGood(i.second.second));
if (good == returnValid) result.insert(i.second.path(worker.store, drv->name)); if (good == returnValid) result.insert(i.second.second);
} }
return result; return result;
} }
@ -4852,8 +4879,17 @@ void Worker::run(const Goals & _topGoals)
waitForInput(); waitForInput();
else { else {
if (awake.empty() && 0 == settings.maxBuildJobs) if (awake.empty() && 0 == settings.maxBuildJobs)
{
if (getMachines().empty())
throw Error("unable to start any build; either increase '--max-jobs' " throw Error("unable to start any build; either increase '--max-jobs' "
"or enable remote builds"); "or enable remote builds."
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
else
throw Error("unable to start any build; remote machines may not have "
"all required system features."
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
}
assert(!awake.empty()); assert(!awake.empty());
} }
} }
@ -5037,7 +5073,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path))) if (!pathExists(store.printStorePath(path)))
res = false; res = false;
else { else {
HashResult current = hashPath(info->narHash->type, store.printStorePath(path)); HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256); Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first; res = info->narHash == nullHash || info->narHash == current.first;
} }

View file

@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
} }
}; };
/* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat")
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
debug(e.what());
}
/* Otherwise try the specified URL. */
fetch(mainUrl); fetch(mainUrl);
} }

View file

@ -1,4 +1,6 @@
#include "args.hh"
#include "content-address.hh" #include "content-address.hh"
#include "split.hh"
namespace nix { namespace nix {
@ -24,10 +26,6 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
+ hash.to_string(Base32, true); + hash.to_string(Base32, true);
} }
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string renderContentAddress(ContentAddress ca) { std::string renderContentAddress(ContentAddress ca) {
return std::visit(overloaded { return std::visit(overloaded {
[](TextHash th) { [](TextHash th) {
@ -40,38 +38,46 @@ std::string renderContentAddress(ContentAddress ca) {
} }
ContentAddress parseContentAddress(std::string_view rawCa) { ContentAddress parseContentAddress(std::string_view rawCa) {
auto prefixSeparator = rawCa.find(':'); auto rest = rawCa;
if (prefixSeparator != string::npos) {
auto prefix = string(rawCa, 0, prefixSeparator); std::string_view prefix;
{
auto optPrefix = splitPrefixTo(rest, ':');
if (!optPrefix)
throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", rawCa);
prefix = *optPrefix;
}
auto parseHashType_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", rawCa);
HashType hashType = parseHashType(*hashTypeRaw);
return std::move(hashType);
};
// Switch on prefix
if (prefix == "text") { if (prefix == "text") {
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos); // No parsing of the method, "text" only support flat.
Hash hash = Hash(string(hashTypeAndHash)); HashType hashType = parseHashType_();
if (hash.type != htSHA256) { if (hashType != htSHA256)
throw Error("parseContentAddress: the text hash should have type SHA256"); throw Error("text content address hash should use %s, but instead uses %s",
} printHashType(htSHA256), printHashType(hashType));
return TextHash { hash }; return TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
};
} else if (prefix == "fixed") { } else if (prefix == "fixed") {
// This has to be an inverse of makeFixedOutputCA // Parse method
auto methodAndHash = rawCa.substr(prefixSeparator+1, string::npos); auto method = FileIngestionMethod::Flat;
if (methodAndHash.substr(0,2) == "r:") { if (splitPrefix(rest, "r:"))
std::string_view hashRaw = methodAndHash.substr(2,string::npos); method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_();
return FixedOutputHash { return FixedOutputHash {
.method = FileIngestionMethod::Recursive, .method = method,
.hash = Hash(string(hashRaw)), .hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
}; };
} else { } else
std::string_view hashRaw = methodAndHash; throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
return FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash(string(hashRaw)),
};
}
} else {
throw Error("parseContentAddress: format not recognized; has to be text or fixed");
}
} else {
throw Error("Not a content address because it lacks an appropriate prefix");
}
}; };
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) { std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {

View file

@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto hash = store->queryPathInfo(path)->narHash; auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork(); logger->stopWork();
to << hash->to_string(Base16, false); to << hash.to_string(Base16, false);
break; break;
} }
@ -454,8 +454,46 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath)); readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
BuildMode buildMode = (BuildMode) readInt(from); BuildMode buildMode = (BuildMode) readInt(from);
logger->startWork(); logger->startWork();
if (!trusted)
throw Error("you are not privileged to build derivations"); /* Content-addressed derivations are trustless because their output paths
are verified by their content alone, so any derivation is free to
try to produce such a path.
Input-addressed derivation output paths, however, are calculated
from the derivation closure that produced them---even knowing the
root derivation is not enough. That the output data actually came
from those derivations is fundamentally unverifiable, but the daemon
trusts itself on that matter. The question instead is whether the
submitted plan has rights to the output paths it wants to fill, and
at least the derivation closure proves that.
It would have been nice if input-address algorithm merely depended
on the build time closure, rather than depending on the derivation
closure. That would mean input-addressed paths used at build time
would just be trusted and not need their own evidence. This is in
fact fine as the same guarantees would hold *inductively*: either
the remote builder has those paths and already trusts them, or it
needs to build them too and thus their evidence must be provided in
turn. The advantage of this variant algorithm is that the evidence
for input-addressed paths which the remote builder already has
doesn't need to be sent again.
That said, now that we have floating CA derivations, it is better
that people just migrate to those which also solve this problem, and
others. It's the same migration difficulty with strictly more
benefit.
Lastly, do note that when we parse fixed-output content-addressed
derivations, we throw out the precomputed output paths and just
store the hashes, so there aren't two competing sources of truth an
attacker could exploit. */
if (drv.type() == DerivationType::InputAddressed && !trusted)
throw Error("you are not privileged to build input-addressed derivations");
/* Make sure that the non-input-addressed derivations that got this far
are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted);
auto res = store->buildDerivation(drvPath, drv, buildMode); auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork(); logger->stopWork();
to << res.status << res.errorMsg; to << res.status << res.errorMsg;
@ -638,7 +676,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17) if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1; to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "") to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash->to_string(Base16, false); << info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references); writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize; to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
@ -688,17 +726,18 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from)); auto path = store->parseStorePath(readString(from));
logger->startWork(); logger->startWork();
logger->stopWork(); logger->stopWork();
dumpPath(store->printStorePath(path), to); dumpPath(store->toRealPath(path), to);
break; break;
} }
case wopAddToStoreNar: { case wopAddToStoreNar: {
bool repair, dontCheckSigs; bool repair, dontCheckSigs;
ValidPathInfo info(store->parseStorePath(readString(from))); auto path = store->parseStorePath(readString(from));
auto deriver = readString(from); auto deriver = readString(from);
auto narHash = Hash::parseAny(readString(from), htSHA256);
ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.narHash = Hash(readString(from), htSHA256);
info.references = readStorePaths<StorePathSet>(*store, from); info.references = readStorePaths<StorePathSet>(*store, from);
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
@ -817,8 +856,7 @@ void processConnection(
FdSink & to, FdSink & to,
TrustedFlag trusted, TrustedFlag trusted,
RecursiveFlag recursive, RecursiveFlag recursive,
const std::string & userName, std::function<void(Store &)> authHook)
uid_t userId)
{ {
auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr; auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr;
@ -859,15 +897,7 @@ void processConnection(
/* If we can't accept clientVersion, then throw an error /* If we can't accept clientVersion, then throw an error
*here* (not above). */ *here* (not above). */
authHook(*store);
#if 0
/* Prevent users from doing something very dangerous. */
if (geteuid() == 0 &&
querySetting("build-users-group", "") == "")
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
#endif
store->createUser(userName, userId);
tunnelLogger->stopWork(); tunnelLogger->stopWork();
to.flush(); to.flush();

View file

@ -12,7 +12,10 @@ void processConnection(
FdSink & to, FdSink & to,
TrustedFlag trusted, TrustedFlag trusted,
RecursiveFlag recursive, RecursiveFlag recursive,
const std::string & userName, /* Arbitrary hook to check authorization / initialize user data / whatever
uid_t userId); after the protocol has been negotiated. The idea is that this function
and everything it calls doesn't know about this stuff, and the
`nix-daemon` handles that instead. */
std::function<void(Store &)> authHook);
} }

View file

@ -7,23 +7,54 @@
namespace nix { namespace nix {
// FIXME Put this somewhere? std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName) const
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
StorePath DerivationOutput::path(const Store & store, std::string_view drvName) const
{ {
return std::visit(overloaded { return std::visit(overloaded {
[](DerivationOutputInputAddressed doi) { [](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
return doi.path; return { doi.path };
},
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
return {
store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName)
};
},
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
return std::nullopt;
}, },
[&](DerivationOutputFixed dof) {
return store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
}
}, output); }, output);
} }
bool derivationIsCA(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
case DerivationType::CAFixed: return true;
case DerivationType::CAFloating: return true;
};
// Since enums can have non-variant values, but making a `default:` would
// disable exhaustiveness warnings.
assert(false);
}
bool derivationIsFixed(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
case DerivationType::CAFixed: return true;
case DerivationType::CAFloating: return false;
};
assert(false);
}
bool derivationIsImpure(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
case DerivationType::CAFixed: return true;
case DerivationType::CAFloating: return false;
};
assert(false);
}
bool BasicDerivation::isBuiltin() const bool BasicDerivation::isBuiltin() const
{ {
return string(builder, 0, 8) == "builtin:"; return string(builder, 0, 8) == "builtin:";
@ -31,7 +62,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(ref<Store> store, StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair) const Derivation & drv, RepairFlag repair)
{ {
auto references = drv.inputSrcs; auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs) for (auto & i : drv.inputDrvs)
@ -39,7 +70,7 @@ StorePath writeDerivation(ref<Store> store,
/* Note that the outputs of a derivation are *not* references /* Note that the outputs of a derivation are *not* references
(that can be missing (of course) and should not necessarily be (that can be missing (of course) and should not necessarily be
held during a garbage collection). */ held during a garbage collection). */
auto suffix = std::string(name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(*store, false); auto contents = drv.unparse(*store, false);
return settings.readOnlyMode return settings.readOnlyMode
? store->computeStorePathForText(suffix, contents, references) ? store->computeStorePathForText(suffix, contents, references)
@ -108,29 +139,33 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
} }
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str) static DerivationOutput parseDerivationOutput(const Store & store,
StorePath path, std::string_view hashAlgo, std::string_view hash)
{ {
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
if (hashAlgo != "") { if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") { if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2); hashAlgo = hashAlgo.substr(2);
} }
const HashType hashType = parseHashType(hashAlgo); const HashType hashType = parseHashType(hashAlgo);
return DerivationOutput { return hash != ""
.output = DerivationOutputFixed { ? DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash { .hash = FixedOutputHash {
.method = std::move(method), .method = std::move(method),
.hash = Hash(hash, hashType), .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
}, },
} }
}; }
: (settings.requireExperimentalFeature("ca-derivations"),
DerivationOutput {
.output = DerivationOutputCAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
},
});
} else } else
return DerivationOutput { return DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
@ -139,6 +174,16 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
}; };
} }
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
{
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); const auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
}
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name) Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
{ {
@ -250,13 +295,20 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
if (first) first = false; else s += ','; if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first); s += '('; printUnquotedString(s, i.first);
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name))); s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name)));
if (auto hash = std::get_if<DerivationOutputFixed>(&i.second.output)) { std::visit(overloaded {
s += ','; printUnquotedString(s, hash->hash.printMethodAlgo()); [&](DerivationOutputInputAddressed doi) {
s += ','; printUnquotedString(s, hash->hash.hash.to_string(Base16, false));
} else {
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
} },
[&](DerivationOutputCAFixed dof) {
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
}, i.second.output);
s += ')'; s += ')';
} }
@ -308,60 +360,134 @@ bool isDerivation(const string & fileName)
} }
bool BasicDerivation::isFixedOutput() const DerivationType BasicDerivation::type() const
{ {
return outputs.size() == 1 && std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs;
outputs.begin()->first == "out" && std::optional<HashType> floatingHashType;
std::holds_alternative<DerivationOutputFixed>(outputs.begin()->second.output); for (auto & i : outputs) {
std::visit(overloaded {
[&](DerivationOutputInputAddressed _) {
inputAddressedOutputs.insert(i.first);
},
[&](DerivationOutputCAFixed _) {
fixedCAOutputs.insert(i.first);
},
[&](DerivationOutputCAFloating dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
} else {
if (*floatingHashType != dof.hashType)
throw Error("All floating outputs must use the same hash type");
}
},
}, i.second.output);
}
if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty()) {
throw Error("Must have at least one output");
} else if (! inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty()) {
return DerivationType::InputAddressed;
} else if (inputAddressedOutputs.empty() && ! fixedCAOutputs.empty() && floatingCAOutputs.empty()) {
if (fixedCAOutputs.size() > 1)
// FIXME: Experimental feature?
throw Error("Only one fixed output is allowed for now");
if (*fixedCAOutputs.begin() != "out")
throw Error("Single fixed output must be named \"out\"");
return DerivationType::CAFixed;
} else if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && ! floatingCAOutputs.empty()) {
return DerivationType::CAFloating;
} else {
throw Error("Can't mix derivation output types");
}
} }
DrvHashes drvHashes; DrvHashes drvHashes;
/* pathDerivationModulo and hashDerivationModulo are mutually recursive
*/
/* Returns the hash of a derivation modulo fixed-output /* Look up the derivation by value and memoize the
subderivations. A fixed-output derivation is a derivation with one `hashDerivationModulo` call.
output (`out') for which an expected hash and hash algorithm are */
specified (using the `outputHash' and `outputHashAlgo' static const DrvHashModulo & pathDerivationModulo(Store & store, const StorePath & drvPath)
attributes). We don't want changes to such derivations to {
propagate upwards through the dependency graph, changing output auto h = drvHashes.find(drvPath);
paths everywhere. if (h == drvHashes.end()) {
assert(store.isValidPath(drvPath));
// Cache it
h = drvHashes.insert_or_assign(
drvPath,
hashDerivationModulo(
store,
store.readDerivation(drvPath),
false)).first;
}
return h->second;
}
For instance, if we change the url in a call to the `fetchurl' /* See the header for interface details. These are the implementation details.
function, we do not want to rebuild everything depending on it
(after all, (the hash of) the file being downloaded is unchanged).
So the *output paths* should not change. On the other hand, the
*derivation paths* should change to reflect the new dependency
graph.
That's what this function does: it returns a hash which is just the For fixed-output derivations, each hash in the map is not the
hash of the derivation ATerm, except that any input derivation corresponding output's content hash, but a hash of that hash along
paths have been replaced by the result of a recursive call to this with other constant data. The key point is that the value is a pure
function, and that for fixed-output derivations we return a hash of function of the output's contents, and there are no preimage attacks
its output path. */ either spoofing an output's contents for a derivation, or
Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs) spoofing a derivation for an output's contents.
For regular derivations, it looks up each subderivation from its hash
and recurs. If the subderivation is also regular, it simply
substitutes the derivation path with its hash. If the subderivation
is fixed-output, however, it takes each output hash and pretends it
is a derivation hash producing a single "out" output. This is so we
don't leak the provenance of fixed outputs, reducing pointless cache
misses as the build itself won't know this.
*/
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs)
{ {
/* Return a fixed hash for fixed-output derivations. */ /* Return a fixed hash for fixed-output derivations. */
if (drv.isFixedOutput()) { switch (drv.type()) {
DerivationOutputs::const_iterator i = drv.outputs.begin(); case DerivationType::CAFloating:
auto hash = std::get<DerivationOutputFixed>(i->second.output); throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
return hashString(htSHA256, "fixed:out:" case DerivationType::CAFixed: {
+ hash.hash.printMethodAlgo() + ":" std::map<std::string, Hash> outputHashes;
+ hash.hash.hash.to_string(Base16, false) + ":" for (const auto & i : drv.outputsAndPaths(store)) {
+ store.printStorePath(i->second.path(store, drv.name))); auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":"
+ store.printStorePath(i.second.second));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
return outputHashes;
}
case DerivationType::InputAddressed:
break;
} }
/* For other derivations, replace the inputs paths with recursive /* For other derivations, replace the inputs paths with recursive
calls to this function. */ calls to this function. */
std::map<std::string, StringSet> inputs2; std::map<std::string, StringSet> inputs2;
for (auto & i : drv.inputDrvs) { for (auto & i : drv.inputDrvs) {
auto h = drvHashes.find(i.first); const auto & res = pathDerivationModulo(store, i.first);
if (h == drvHashes.end()) { std::visit(overloaded {
assert(store.isValidPath(i.first)); // Regular non-CA derivation, replace derivation
h = drvHashes.insert_or_assign(i.first, hashDerivationModulo(store, [&](Hash drvHash) {
store.readDerivation(i.first), false)).first; inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
},
// CA derivation's output hashes
[&](CaOutputHashes outputHashes) {
std::set<std::string> justOut = { "out" };
for (auto & output : i.second) {
/* Put each one in with a single "out" output.. */
const auto h = outputHashes.at(output);
inputs2.insert_or_assign(
h.to_string(Base16, false),
justOut);
} }
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second); },
}, res);
} }
return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
@ -385,38 +511,18 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
StorePathSet BasicDerivation::outputPaths(const Store & store) const StorePathSet BasicDerivation::outputPaths(const Store & store) const
{ {
StorePathSet paths; StorePathSet paths;
for (auto & i : outputs) for (auto & i : outputsAndPaths(store))
paths.insert(i.second.path(store, name)); paths.insert(i.second.second);
return paths; return paths;
} }
static DerivationOutput readDerivationOutput(Source & in, const Store & store) static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{ {
auto path = store.parseStorePath(readString(in)); auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in); const auto hashAlgo = readString(in);
auto hash = readString(in); const auto hash = readString(in);
if (hashAlgo != "") { return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
auto method = FileIngestionMethod::Flat;
if (string(hashAlgo, 0, 2) == "r:") {
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
auto hashType = parseHashType(hashAlgo);
return DerivationOutput {
.output = DerivationOutputFixed {
.hash = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
},
}
};
} else
return DerivationOutput {
.output = DerivationOutputInputAddressed {
.path = std::move(path),
}
};
} }
StringSet BasicDerivation::outputNames() const StringSet BasicDerivation::outputNames() const
@ -427,6 +533,27 @@ StringSet BasicDerivation::outputNames() const
return names; return names;
} }
DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
DerivationOutputsAndPaths outsAndPaths;
for (auto output : outputs)
outsAndPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.path(store, name))
)
);
return outsAndPaths;
}
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto output : outputs)
outsAndOptPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.pathOpt(store, output.first))
)
);
return outsAndOptPaths;
}
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) { std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
auto nameWithSuffix = drvPath.name(); auto nameWithSuffix = drvPath.name();
@ -467,15 +594,22 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(store)) {
out << i.first out << i.first
<< store.printStorePath(i.second.path(store, drv.name)); << store.printStorePath(i.second.second);
if (auto hash = std::get_if<DerivationOutputFixed>(&i.second.output)) { std::visit(overloaded {
out << hash->hash.printMethodAlgo() [&](DerivationOutputInputAddressed doi) {
<< hash->hash.hash.to_string(Base16, false);
} else {
out << "" << ""; out << "" << "";
} },
[&](DerivationOutputCAFixed dof) {
out << dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
[&](DerivationOutputCAFloating dof) {
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
}, i.second.first.output);
} }
writeStorePaths(store, out, drv.inputSrcs); writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;

View file

@ -6,6 +6,7 @@
#include "content-address.hh" #include "content-address.hh"
#include <map> #include <map>
#include <variant>
namespace nix { namespace nix {
@ -13,30 +14,87 @@ namespace nix {
/* Abstract syntax of derivations. */ /* Abstract syntax of derivations. */
/* The traditional non-fixed-output derivation type. */
struct DerivationOutputInputAddressed struct DerivationOutputInputAddressed
{ {
/* Will need to become `std::optional<StorePath>` once input-addressed
derivations are allowed to depend on cont-addressed derivations */
StorePath path; StorePath path;
}; };
struct DerivationOutputFixed /* Fixed-output derivations, whose output paths are content addressed
according to that fixed output. */
struct DerivationOutputCAFixed
{ {
FixedOutputHash hash; /* hash used for expected hash computation */ FixedOutputHash hash; /* hash used for expected hash computation */
}; };
/* Floating-output derivations, whose output paths are content addressed, but
not fixed, and so are dynamically calculated from whatever the output ends
up being. */
struct DerivationOutputCAFloating
{
/* information used for expected hash computation */
FileIngestionMethod method;
HashType hashType;
};
struct DerivationOutput struct DerivationOutput
{ {
std::variant<DerivationOutputInputAddressed, DerivationOutputFixed> output; std::variant<
StorePath path(const Store & store, std::string_view drvName) const; DerivationOutputInputAddressed,
DerivationOutputCAFixed,
DerivationOutputCAFloating
> output;
std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndPaths */
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const {
auto p = pathOpt(store, drvName);
if (!p) throw UnimplementedError("floating content-addressed derivations are not yet implemented");
return *p;
}
}; };
typedef std::map<string, DerivationOutput> DerivationOutputs; typedef std::map<string, DerivationOutput> DerivationOutputs;
/* These are analogues to the previous DerivationOutputs data type, but they
also contains, for each output, the (optional) store path in which it would
be written. To calculate values of these types, see the corresponding
functions in BasicDerivation */
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
DerivationOutputsAndPaths;
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
DerivationOutputsAndOptPaths;
/* For inputs that are sub-derivations, we specify exactly which /* For inputs that are sub-derivations, we specify exactly which
output IDs we are interested in. */ output IDs we are interested in. */
typedef std::map<StorePath, StringSet> DerivationInputs; typedef std::map<StorePath, StringSet> DerivationInputs;
typedef std::map<string, string> StringPairs; typedef std::map<string, string> StringPairs;
enum struct DerivationType : uint8_t {
InputAddressed,
CAFixed,
CAFloating,
};
/* Do the outputs of the derivation have paths calculated from their content,
or from the derivation itself? */
bool derivationIsCA(DerivationType);
/* Is the content of the outputs fixed a-priori via a hash? Never true for
non-CA derivations. */
bool derivationIsFixed(DerivationType);
/* Is the derivation impure and needs to access non-deterministic resources, or
pure and can be sandboxed? Note that whether or not we actually sandbox the
derivation is controlled separately. Never true for non-CA derivations. */
bool derivationIsImpure(DerivationType);
struct BasicDerivation struct BasicDerivation
{ {
DerivationOutputs outputs; /* keyed on symbolic IDs */ DerivationOutputs outputs; /* keyed on symbolic IDs */
@ -53,7 +111,7 @@ struct BasicDerivation
bool isBuiltin() const; bool isBuiltin() const;
/* Return true iff this is a fixed-output derivation. */ /* Return true iff this is a fixed-output derivation. */
bool isFixedOutput() const; DerivationType type() const;
/* Return the output paths of a derivation. */ /* Return the output paths of a derivation. */
StorePathSet outputPaths(const Store & store) const; StorePathSet outputPaths(const Store & store) const;
@ -61,6 +119,13 @@ struct BasicDerivation
/* Return the output names of a derivation. */ /* Return the output names of a derivation. */
StringSet outputNames() const; StringSet outputNames() const;
/* Calculates the maps that contains all the DerivationOutputs, but
augmented with knowledge of the Store paths they would be written into.
The first one of these functions will be removed when the CA work is
completed */
DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath); static std::string_view nameFromPath(const StorePath & storePath);
}; };
@ -82,7 +147,7 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(ref<Store> store, StorePath writeDerivation(ref<Store> store,
const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair); const Derivation & drv, RepairFlag repair = NoRepair);
/* Read a derivation from a file. */ /* Read a derivation from a file. */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
@ -90,10 +155,42 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
// FIXME: remove // FIXME: remove
bool isDerivation(const string & fileName); bool isDerivation(const string & fileName);
Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs); // known CA drv's output hashes, current just for fixed-output derivations
// whose output hashes are always known since they are fixed up-front.
typedef std::map<std::string, Hash> CaOutputHashes;
typedef std::variant<
Hash, // regular DRV normalized hash
CaOutputHashes
> DrvHashModulo;
/* Returns hashes with the details of fixed-output subderivations
expunged.
A fixed-output derivation is a derivation whose outputs have a
specified content hash and hash algorithm. (Currently they must have
exactly one output (`out'), which is specified using the `outputHash'
and `outputHashAlgo' attributes, but the algorithm doesn't assume
this.) We don't want changes to such derivations to propagate upwards
through the dependency graph, changing output paths everywhere.
For instance, if we change the url in a call to the `fetchurl'
function, we do not want to rebuild everything depending on it---after
all, (the hash of) the file being downloaded is unchanged. So the
*output paths* should not change. On the other hand, the *derivation
paths* should change to reflect the new dependency graph.
For fixed-output derivations, this returns a map from the name of
each output to its hash, unique up to the output's contents.
For regular derivations, it returns a single hash of the derivation
ATerm, after subderivations have been likewise expunged from that
derivation.
*/
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
/* Memoisation of hashDerivationModulo(). */ /* Memoisation of hashDerivationModulo(). */
typedef std::map<StorePath, Hash> DrvHashes; typedef std::map<StorePath, DrvHashModulo> DrvHashes;
extern DrvHashes drvHashes; // FIXME: global, not thread-safe extern DrvHashes drvHashes; // FIXME: global, not thread-safe

View file

@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines. filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */ Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first; Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash->type)) if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!", throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true)); printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
teeSink teeSink
<< exportMagic << exportMagic
@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
if (magic != exportMagic) if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format"); throw Error("Nix archive cannot be imported; wrong format");
ValidPathInfo info(parseStorePath(readString(source))); auto path = parseStorePath(readString(source));
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
info.references = readStorePaths<StorePathSet>(*this, source); auto references = readStorePaths<StorePathSet>(*this, source);
auto deriver = readString(source); auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = parseStorePath(deriver); info.deriver = parseStorePath(deriver);
info.references = references;
info.narHash = hashString(htSHA256, *saved.s);
info.narSize = saved.s->size(); info.narSize = saved.s->size();
// Ignore optional legacy signature. // Ignore optional legacy signature.

View file

@ -335,6 +335,9 @@ public:
"setuid/setgid bits or with file capabilities."}; "setuid/setgid bits or with file capabilities."};
#endif #endif
Setting<Strings> hashedMirrors{this, {}, "hashed-mirrors",
"A list of servers used by builtins.fetchurl to fetch files by hash."};
Setting<uint64_t> minFree{this, 0, "min-free", Setting<uint64_t> minFree{this, 0, "min-free",
"Automatically run the garbage collector when free disk space drops below the specified amount."}; "Automatically run the garbage collector when free disk space drops below the specified amount."};

View file

@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
try { try {
auto conn(connections->get()); auto conn(connections->get());
/* No longer support missing NAR hash */
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)}; conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
auto p = readString(conn->from); auto p = readString(conn->from);
if (p.empty()) return callback(nullptr); if (p.empty()) return callback(nullptr);
auto info = std::make_shared<ValidPathInfo>(parseStorePath(p)); auto path2 = parseStorePath(p);
assert(path == info->path); assert(path == path2);
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
PathSet references; PathSet references;
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
readLongLong(conn->from); // download size readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from); info->narSize = readLongLong(conn->from);
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) { {
auto s = readString(conn->from); auto s = readString(conn->from);
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s}; if (s == "")
throw Error("NAR hash is now mandatory");
info->narHash = Hash::parseAnyPrefixed(s);
}
info->ca = parseContentAddressOpt(readString(conn->from)); info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from); info->sigs = readStrings<StringSet>(conn->from);
}
auto s = readString(conn->from); auto s = readString(conn->from);
assert(s == ""); assert(s == "");
@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar << cmdAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash->to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to conn->to
<< info.registrationTime << info.registrationTime
@ -202,6 +209,24 @@ struct LegacySSHStore : public Store
const StorePathSet & references, RepairFlag repair) override const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); } { unsupported("addTextToStore"); }
private:
void putBuildSettings(Connection & conn)
{
conn.to
<< settings.maxSilentTime
<< settings.buildTimeout;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
conn.to
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
conn.to
<< settings.buildRepeat
<< settings.enforceDeterminism;
}
public:
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override BuildMode buildMode) override
{ {
@ -211,16 +236,8 @@ struct LegacySSHStore : public Store
<< cmdBuildDerivation << cmdBuildDerivation
<< printStorePath(drvPath); << printStorePath(drvPath);
writeDerivation(conn->to, *this, drv); writeDerivation(conn->to, *this, drv);
conn->to
<< settings.maxSilentTime putBuildSettings(*conn);
<< settings.buildTimeout;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 2)
conn->to
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3)
conn->to
<< settings.buildRepeat
<< settings.enforceDeterminism;
conn->to.flush(); conn->to.flush();
@ -234,6 +251,29 @@ struct LegacySSHStore : public Store
return status; return status;
} }
void buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode) override
{
auto conn(connections->get());
conn->to << cmdBuildPaths;
Strings ss;
for (auto & p : drvPaths)
ss.push_back(p.to_string(*this));
conn->to << ss;
putBuildSettings(*conn);
conn->to.flush();
BuildResult result;
result.status = (BuildResult::Status) readInt(conn->from);
if (!result.success()) {
conn->from >> result.errorMsg;
throw Error(result.status, result.errorMsg);
}
}
void ensurePath(const StorePath & path) override void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); } { unsupported("ensurePath"); }

View file

@ -544,11 +544,8 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
std::string drvName(drvPath.name()); std::string drvName(drvPath.name());
drvName = string(drvName, 0, drvName.size() - drvExtension.size()); drvName = string(drvName, 0, drvName.size() - drvExtension.size());
auto check = [&](const StorePath & expected, const StorePath & actual, const std::string & varName) auto envHasRightPath = [&](const StorePath & actual, const std::string & varName)
{ {
if (actual != expected)
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
printStorePath(drvPath), printStorePath(actual), printStorePath(expected));
auto j = drv.env.find(varName); auto j = drv.env.find(varName);
if (j == drv.env.end() || parseStorePath(j->second) != actual) if (j == drv.env.end() || parseStorePath(j->second) != actual)
throw Error("derivation '%s' has incorrect environment variable '%s', should be '%s'", throw Error("derivation '%s' has incorrect environment variable '%s', should be '%s'",
@ -556,16 +553,34 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
}; };
if (drv.isFixedOutput()) { // Don't need the answer, but do this anyways to assert is proper
DerivationOutputs::const_iterator out = drv.outputs.find("out"); // combination. The code below is more general and naturally allows
if (out == drv.outputs.end()) // combinations that are currently prohibited.
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath)); drv.type();
}
else { std::optional<Hash> h;
Hash h = hashDerivationModulo(*this, drv, true); for (auto & i : drv.outputs) {
for (auto & i : drv.outputs) std::visit(overloaded {
check(makeOutputPath(i.first, h, drvName), i.second.path(*this, drv.name), i.first); [&](DerivationOutputInputAddressed doia) {
if (!h) {
// somewhat expensive so we do lazily
auto temp = hashDerivationModulo(*this, drv, true);
h = std::get<Hash>(temp);
}
StorePath recomputed = makeOutputPath(i.first, *h, drvName);
if (doia.path != recomputed)
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
envHasRightPath(doia.path, i.first);
},
[&](DerivationOutputCAFixed dof) {
StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
envHasRightPath(path, i.first);
},
[&](DerivationOutputCAFloating _) {
throw UnimplementedError("floating CA output derivations are not yet implemented");
},
}, i.second.output);
} }
} }
@ -579,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use() state.stmtRegisterValidPath.use()
(printStorePath(info.path)) (printStorePath(info.path))
(info.narHash->to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime) (info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver) (info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
@ -603,11 +618,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */ registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv); if (checkOutputs) checkDerivationOutputs(info.path, drv);
for (auto & i : drv.outputs) { for (auto & i : drv.outputsAndPaths(*this)) {
state.stmtAddDerivationOutput.use() state.stmtAddDerivationOutput.use()
(id) (id)
(i.first) (i.first)
(printStorePath(i.second.path(*this, drv.name))) (printStorePath(i.second.second))
.exec(); .exec();
} }
} }
@ -626,25 +641,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{ {
try { try {
auto info = std::make_shared<ValidPathInfo>(path);
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() { callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
auto state(_state.lock()); auto state(_state.lock());
/* Get the path info. */ /* Get the path info. */
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path))); auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
if (!useQueryPathInfo.next()) if (!useQueryPathInfo.next())
return std::shared_ptr<ValidPathInfo>(); return std::shared_ptr<ValidPathInfo>();
info->id = useQueryPathInfo.getInt(0); auto id = useQueryPathInfo.getInt(0);
auto narHash = Hash::dummy;
try { try {
info->narHash = Hash(useQueryPathInfo.getStr(1)); narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
} catch (BadHash & e) { } catch (BadHash & e) {
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what()); throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
} }
auto info = std::make_shared<ValidPathInfo>(path, narHash);
info->id = id;
info->registrationTime = useQueryPathInfo.getInt(2); info->registrationTime = useQueryPathInfo.getInt(2);
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3); auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
@ -679,7 +697,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{ {
state.stmtUpdatePathInfo.use() state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0) (info.narSize, info.narSize != 0)
(info.narHash->to_string(Base16, true)) (info.narHash.to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate) (info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty()) (concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca) (renderContentAddress(info.ca), (bool) info.ca)
@ -905,7 +923,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths; StorePathSet paths;
for (auto & i : infos) { for (auto & i : infos) {
assert(i.narHash && i.narHash->type == htSHA256); assert(i.narHash.type == htSHA256);
if (isValidPath_(*state, i.path)) if (isValidPath_(*state, i.path))
updatePathInfo(*state, i); updatePathInfo(*state, i);
else else
@ -969,9 +987,6 @@ const PublicKeys & LocalStore::getPublicKeys()
void LocalStore::addToStore(const ValidPathInfo & info, Source & source, void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) RepairFlag repair, CheckSigsFlag checkSigs)
{ {
if (!info.narHash)
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys())) if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path)); throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
@ -1006,11 +1021,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
else else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart())); hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t { TeeSource wrapperSource { source, *hashSink };
size_t n = source.read(data, len);
(*hashSink)(data, n);
return n;
});
restorePath(realPath, wrapperSource); restorePath(realPath, wrapperSource);
@ -1018,7 +1029,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash) if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true)); printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize) if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s", throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1038,20 +1049,6 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
} }
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
if (method == FileIngestionMethod::Recursive)
dumpPath(srcPath, sink, filter);
else
readFile(srcPath, sink);
});
return addToStoreFromDump(*source, name, method, hashAlgo, repair);
}
StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name, StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
{ {
@ -1154,8 +1151,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
optimisePath(realPath); optimisePath(realPath);
ValidPathInfo info(dstPath); ValidPathInfo info { dstPath, narHash.first };
info.narHash = narHash.first;
info.narSize = narHash.second; info.narSize = narHash.second;
info.ca = FixedOutputHash { .method = method, .hash = hash }; info.ca = FixedOutputHash { .method = method, .hash = hash };
registerValidPath(info); registerValidPath(info);
@ -1198,8 +1194,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
optimisePath(realPath); optimisePath(realPath);
ValidPathInfo info(dstPath); ValidPathInfo info { dstPath, narHash };
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.references = references; info.references = references;
info.ca = TextHash { .hash = hash }; info.ca = TextHash { .hash = hash };
@ -1314,9 +1309,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path)) if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash->type); hashSink = std::make_unique<HashSink>(info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink); dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish(); auto current = hashSink->finish();
@ -1325,7 +1320,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({ logError({
.name = "Invalid hash - path modified", .name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'", .hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true)) printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
}); });
if (repair) repairPath(i); else errors = true; if (repair) repairPath(i); else errors = true;
} else { } else {

View file

@ -142,16 +142,8 @@ public:
void addToStore(const ValidPathInfo & info, Source & source, void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override;
/* Like addToStore(), but the contents of the path are contained
in `dump', which is either a NAR serialisation (if recursive ==
true) or simply the contents of a regular file (if recursive ==
false). */
StorePath addToStoreFromDump(Source & dump, const string & name, StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override; FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
StorePath addTextToStore(const string & name, const string & s, StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override; const StorePathSet & references, RepairFlag repair) override;

View file

@ -1,6 +1,7 @@
#include "machines.hh" #include "machines.hh"
#include "util.hh" #include "util.hh"
#include "globals.hh" #include "globals.hh"
#include "store-api.hh"
#include <algorithm> #include <algorithm>
@ -48,6 +49,29 @@ bool Machine::mandatoryMet(const std::set<string> & features) const {
}); });
} }
ref<Store> Machine::openStore() const {
Store::Params storeParams;
if (hasPrefix(storeUri, "ssh://")) {
storeParams["max-connections"] = "1";
storeParams["log-fd"] = "4";
if (sshKey != "")
storeParams["ssh-key"] = sshKey;
}
{
auto & fs = storeParams["system-features"];
auto append = [&](auto feats) {
for (auto & f : feats) {
if (fs.size() > 0) fs += ' ';
fs += f;
}
};
append(supportedFeatures);
append(mandatoryFeatures);
}
return nix::openStore(storeUri, storeParams);
}
void parseMachines(const std::string & s, Machines & machines) void parseMachines(const std::string & s, Machines & machines)
{ {
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) { for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {

View file

@ -4,6 +4,8 @@
namespace nix { namespace nix {
class Store;
struct Machine { struct Machine {
const string storeUri; const string storeUri;
@ -28,6 +30,8 @@ struct Machine {
decltype(supportedFeatures) supportedFeatures, decltype(supportedFeatures) supportedFeatures,
decltype(mandatoryFeatures) mandatoryFeatures, decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey); decltype(sshPublicHostKey) sshPublicHostKey);
ref<Store> openStore() const;
}; };
typedef std::vector<Machine> Machines; typedef std::vector<Machine> Machines;

View file

@ -4,6 +4,7 @@
#include "local-store.hh" #include "local-store.hh"
#include "store-api.hh" #include "store-api.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "topo-sort.hh"
namespace nix { namespace nix {
@ -112,7 +113,7 @@ std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv)
{ {
auto out = drv.outputs.find("out"); auto out = drv.outputs.find("out");
if (out != drv.outputs.end()) { if (out != drv.outputs.end()) {
if (auto v = std::get_if<DerivationOutputFixed>(&out->second.output)) if (auto v = std::get_if<DerivationOutputCAFixed>(&out->second.output))
return v->hash; return v->hash;
} }
return std::nullopt; return std::nullopt;
@ -206,10 +207,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
ParsedDerivation parsedDrv(StorePath(path.path), *drv); ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid; PathSet invalid;
for (auto & j : drv->outputs) for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs) if (wantOutput(j.first, path.outputs)
&& !isValidPath(j.second.path(*this, drv->name))) && !isValidPath(j.second.second))
invalid.insert(printStorePath(j.second.path(*this, drv->name))); invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return; if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) { if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
@ -256,41 +257,21 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
StorePaths Store::topoSortPaths(const StorePathSet & paths) StorePaths Store::topoSortPaths(const StorePathSet & paths)
{ {
StorePaths sorted; return topoSort(paths,
StorePathSet visited, parents; {[&](const StorePath & path) {
std::function<void(const StorePath & path, const StorePath * parent)> dfsVisit;
dfsVisit = [&](const StorePath & path, const StorePath * parent) {
if (parents.count(path))
throw BuildError("cycle detected in the references of '%s' from '%s'",
printStorePath(path), printStorePath(*parent));
if (!visited.insert(path).second) return;
parents.insert(path);
StorePathSet references; StorePathSet references;
try { try {
references = queryPathInfo(path)->references; references = queryPathInfo(path)->references;
} catch (InvalidPath &) { } catch (InvalidPath &) {
} }
return references;
for (auto & i : references) }},
/* Don't traverse into paths that don't exist. That can {[&](const StorePath & path, const StorePath & parent) {
happen due to substitutes for non-existent paths. */ return BuildError(
if (i != path && paths.count(i)) "cycle detected in the references of '%s' from '%s'",
dfsVisit(i, &path); printStorePath(path),
printStorePath(parent));
sorted.push_back(path); }});
parents.erase(path);
};
for (auto & i : paths)
dfsVisit(i, nullptr);
std::reverse(sorted.begin(), sorted.end());
return sorted;
} }

View file

@ -189,13 +189,14 @@ public:
return {oInvalid, 0}; return {oInvalid, 0};
auto namePart = queryNAR.getStr(1); auto namePart = queryNAR.getStr(1);
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart)); auto narInfo = make_ref<NarInfo>(
StorePath(hashPart + "-" + namePart),
Hash::parseAnyPrefixed(queryNAR.getStr(6)));
narInfo->url = queryNAR.getStr(2); narInfo->url = queryNAR.getStr(2);
narInfo->compression = queryNAR.getStr(3); narInfo->compression = queryNAR.getStr(3);
if (!queryNAR.isNull(4)) if (!queryNAR.isNull(4))
narInfo->fileHash = Hash(queryNAR.getStr(4)); narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
narInfo->fileSize = queryNAR.getInt(5); narInfo->fileSize = queryNAR.getInt(5);
narInfo->narHash = Hash(queryNAR.getStr(6));
narInfo->narSize = queryNAR.getInt(7); narInfo->narSize = queryNAR.getInt(7);
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " ")) for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
narInfo->references.insert(StorePath(r)); narInfo->references.insert(StorePath(r));
@ -232,7 +233,7 @@ public:
(narInfo ? narInfo->compression : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash) (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash->to_string(Base32, true)) (info->narHash.to_string(Base32, true))
(info->narSize) (info->narSize)
(concatStringsSep(" ", info->shortRefs())) (concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -1,10 +1,11 @@
#include "globals.hh" #include "globals.hh"
#include "nar-info.hh" #include "nar-info.hh"
#include "store-api.hh"
namespace nix { namespace nix {
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence) NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
{ {
auto corrupt = [&]() { auto corrupt = [&]() {
return Error("NAR info file '%1%' is corrupt", whence); return Error("NAR info file '%1%' is corrupt", whence);
@ -12,13 +13,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
auto parseHashField = [&](const string & s) { auto parseHashField = [&](const string & s) {
try { try {
return Hash(s); return Hash::parseAnyPrefixed(s);
} catch (BadHash &) { } catch (BadHash &) {
throw corrupt(); throw corrupt();
} }
}; };
bool havePath = false; bool havePath = false;
bool haveNarHash = false;
size_t pos = 0; size_t pos = 0;
while (pos < s.size()) { while (pos < s.size()) {
@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileSize") { else if (name == "FileSize") {
if (!string2Int(value, fileSize)) throw corrupt(); if (!string2Int(value, fileSize)) throw corrupt();
} }
else if (name == "NarHash") else if (name == "NarHash") {
narHash = parseHashField(value); narHash = parseHashField(value);
haveNarHash = true;
}
else if (name == "NarSize") { else if (name == "NarSize") {
if (!string2Int(value, narSize)) throw corrupt(); if (!string2Int(value, narSize)) throw corrupt();
} }
@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2"; if (compression == "") compression = "bzip2";
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt(); if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
} }
std::string NarInfo::to_string(const Store & store) const std::string NarInfo::to_string(const Store & store) const
@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
assert(fileHash && fileHash->type == htSHA256); assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n"; res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash && narHash->type == htSHA256); assert(narHash.type == htSHA256);
res += "NarHash: " + narHash->to_string(Base32, true) + "\n"; res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -2,10 +2,12 @@
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "store-api.hh" #include "path-info.hh"
namespace nix { namespace nix {
class Store;
struct NarInfo : ValidPathInfo struct NarInfo : ValidPathInfo
{ {
std::string url; std::string url;
@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
std::string system; std::string system;
NarInfo() = delete; NarInfo() = delete;
NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { } NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { } NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const Store & store, const std::string & s, const std::string & whence); NarInfo(const Store & store, const std::string & s, const std::string & whence);

View file

@ -94,7 +94,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
return res; return res;
} }
bool ParsedDerivation::canBuildLocally() const bool ParsedDerivation::canBuildLocally(Store & localStore) const
{ {
if (drv.platform != settings.thisSystem.get() if (drv.platform != settings.thisSystem.get()
&& !settings.extraPlatforms.get().count(drv.platform) && !settings.extraPlatforms.get().count(drv.platform)
@ -102,14 +102,14 @@ bool ParsedDerivation::canBuildLocally() const
return false; return false;
for (auto & feature : getRequiredSystemFeatures()) for (auto & feature : getRequiredSystemFeatures())
if (!settings.systemFeatures.get().count(feature)) return false; if (!localStore.systemFeatures.get().count(feature)) return false;
return true; return true;
} }
bool ParsedDerivation::willBuildLocally() const bool ParsedDerivation::willBuildLocally(Store & localStore) const
{ {
return getBoolAttr("preferLocalBuild") && canBuildLocally(); return getBoolAttr("preferLocalBuild") && canBuildLocally(localStore);
} }
bool ParsedDerivation::substitutesAllowed() const bool ParsedDerivation::substitutesAllowed() const
@ -117,9 +117,4 @@ bool ParsedDerivation::substitutesAllowed() const
return getBoolAttr("allowSubstitutes", true); return getBoolAttr("allowSubstitutes", true);
} }
bool ParsedDerivation::contentAddressed() const
{
return getBoolAttr("__contentAddressed", false);
}
} }

View file

@ -29,13 +29,11 @@ public:
StringSet getRequiredSystemFeatures() const; StringSet getRequiredSystemFeatures() const;
bool canBuildLocally() const; bool canBuildLocally(Store & localStore) const;
bool willBuildLocally() const; bool willBuildLocally(Store & localStore) const;
bool substitutesAllowed() const; bool substitutesAllowed() const;
bool contentAddressed() const;
}; };
} }

112
src/libstore/path-info.hh Normal file
View file

@ -0,0 +1,112 @@
#pragma once
#include "crypto.hh"
#include "path.hh"
#include "hash.hh"
#include "content-address.hh"
#include <string>
#include <optional>
namespace nix {
class Store;
struct SubstitutablePathInfo
{
std::optional<StorePath> deriver;
StorePathSet references;
uint64_t downloadSize; /* 0 = unknown or inapplicable */
uint64_t narSize; /* 0 = unknown */
};
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo
{
StorePath path;
std::optional<StorePath> deriver;
// TODO document this
Hash narHash;
StorePathSet references;
time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown
uint64_t id; // internal use only
/* Whether the path is ultimately trusted, that is, it's a
derivation output that was built locally. */
bool ultimate = false;
StringSet sigs; // note: not necessarily verified
/* If non-empty, an assertion that the path is content-addressed,
i.e., that the store path is computed from a cryptographic hash
of the contents of the path, plus some other bits of data like
the "name" part of the path. Such a path doesn't need
signatures, since we don't have to trust anybody's claim that
the path is the output of a particular derivation. (In the
extensional store model, we have to trust that the *contents*
of an output path of a derivation were actually produced by
that derivation. In the intensional model, we have to trust
that a particular output path was produced by a derivation; the
path then implies the contents.)
Ideally, the content-addressability assertion would just be a Boolean,
and the store path would be computed from the name component, narHash
and references. However, we support many types of content addresses.
*/
std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const
{
return
path == i.path
&& narHash == i.narHash
&& references == i.references;
}
/* Return a fingerprint of the store path to be used in binary
cache signatures. It contains the store path, the base-32
SHA-256 hash of the NAR serialisation of the path, the size of
the NAR, and the sorted references. The size field is strictly
speaking superfluous, but might prevent endless/excessive data
attacks. */
std::string fingerprint(const Store & store) const;
void sign(const Store & store, const SecretKey & secretKey);
/* Return true iff the path is verifiably content-addressed. */
bool isContentAddressed(const Store & store) const;
/* Functions to view references + hasSelfReference as one set, mainly for
compatibility's sake. */
StorePathSet referencesPossiblyToSelf() const;
void insertReferencePossiblyToSelf(StorePath && ref);
void setReferencesPossiblyToSelf(StorePathSet && refs);
static const size_t maxSigs = std::numeric_limits<size_t>::max();
/* Return the number of signatures on this .narinfo that were
produced by one of the specified keys, or maxSigs if the path
is content-addressed. */
size_t checkSignatures(const Store & store, const PublicKeys & publicKeys) const;
/* Verify a single signature. */
bool checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const;
Strings shortRefs() const;
ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
virtual ~ValidPathInfo() { }
};
typedef list<ValidPathInfo> ValidPathInfos;
}

View file

@ -419,10 +419,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid; bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path)); if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
} }
info = std::make_shared<ValidPathInfo>(StorePath(path));
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
info = std::make_shared<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver); if (deriver != "") info->deriver = parseStorePath(deriver);
info->narHash = Hash(readString(conn->from), htSHA256);
info->references = readStorePaths<StorePathSet>(*this, conn->from); info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize; conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
@ -521,7 +521,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar conn->to << wopAddToStoreNar
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash->to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca) << info.ultimate << info.sigs << renderContentAddress(info.ca)

View file

@ -266,6 +266,10 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
const std::string & mimeType, const std::string & mimeType,
const std::string & contentEncoding) const std::string & contentEncoding)
{ {
istream->seekg(0, istream->end);
auto size = istream->tellg();
istream->seekg(0, istream->beg);
auto maxThreads = std::thread::hardware_concurrency(); auto maxThreads = std::thread::hardware_concurrency();
static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor> static std::shared_ptr<Aws::Utils::Threading::PooledThreadExecutor>
@ -343,10 +347,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1) std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
.count(); .count();
printInfo("uploaded 's3://%s/%s' in %d ms", printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms",
bucketName, path, duration); bucketName, path, size, duration);
stats.putTimeMs += duration; stats.putTimeMs += duration;
stats.putBytes += std::max(size, (decltype(size)) 0);
stats.put++; stats.put++;
} }

View file

@ -19,6 +19,7 @@ public:
struct Stats struct Stats
{ {
std::atomic<uint64_t> put{0}; std::atomic<uint64_t> put{0};
std::atomic<uint64_t> putBytes{0};
std::atomic<uint64_t> putTimeMs{0}; std::atomic<uint64_t> putTimeMs{0};
std::atomic<uint64_t> get{0}; std::atomic<uint64_t> get{0};
std::atomic<uint64_t> getBytes{0}; std::atomic<uint64_t> getBytes{0};

View file

@ -193,10 +193,6 @@ StorePath Store::makeFixedOutputPath(
} }
} }
// FIXME Put this somewhere?
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
StorePath Store::makeFixedOutputPathFromCA(std::string_view name, ContentAddress ca, StorePath Store::makeFixedOutputPathFromCA(std::string_view name, ContentAddress ca,
const StorePathSet & references, bool hasSelfReference) const const StorePathSet & references, bool hasSelfReference) const
{ {
@ -239,6 +235,20 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
} }
StorePath Store::addToStore(const string & name, const Path & _srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
if (method == FileIngestionMethod::Recursive)
dumpPath(srcPath, sink, filter);
else
readFile(srcPath, sink);
});
return addToStoreFromDump(*source, name, method, hashAlgo, repair);
}
/* /*
The aim of this function is to compute in one pass the correct ValidPathInfo for The aim of this function is to compute in one pass the correct ValidPathInfo for
the files that we are trying to add to the store. To accomplish that in one the files that we are trying to add to the store. To accomplish that in one
@ -310,8 +320,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash) if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath); throw Error("hash mismatch for '%s'", srcPath);
ValidPathInfo info(makeFixedOutputPath(method, hash, name)); ValidPathInfo info {
info.narHash = narHash; makeFixedOutputPath(method, hash, name),
narHash,
};
info.narSize = narSize; info.narSize = narSize;
info.ca = FixedOutputHash { .method = method, .hash = hash }; info.ca = FixedOutputHash { .method = method, .hash = hash };
@ -555,7 +567,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i); auto info = queryPathInfo(i);
if (showHash) { if (showHash) {
s += info->narHash->to_string(Base16, false) + "\n"; s += info->narHash.to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str(); s += (format("%1%\n") % info->narSize).str();
} }
@ -587,7 +599,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath); auto info = queryPathInfo(storePath);
jsonPath jsonPath
.attr("narHash", info->narHash->to_string(hashBase, true)) .attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narSize", info->narSize); .attr("narSize", info->narSize);
{ {
@ -715,20 +727,6 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
info = info2; info = info2;
} }
if (!info->narHash) {
StringSink sink;
srcStore->narFromPath({storePath}, sink);
auto info2 = make_ref<ValidPathInfo>(*info);
info2->narHash = hashString(htSHA256, *sink.s);
if (!info->narSize) info2->narSize = sink.s->size();
if (info->ultimate) info2->ultimate = false;
info = info2;
StringSource source(*sink.s);
dstStore->addToStore(*info, source, repair, checkSigs);
return;
}
if (info->ultimate) { if (info->ultimate) {
auto info2 = make_ref<ValidPathInfo>(*info); auto info2 = make_ref<ValidPathInfo>(*info);
info2->ultimate = false; info2->ultimate = false;
@ -736,12 +734,12 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
} }
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) { LambdaSink progressSink([&](const unsigned char * data, size_t len) {
sink(data, len);
total += len; total += len;
act.progress(total, info->narSize); act.progress(total, info->narSize);
}); });
srcStore->narFromPath(storePath, wrapperSink); TeeSink tee { sink, progressSink };
srcStore->narFromPath(storePath, tee);
}, [&]() { }, [&]() {
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri()); throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
}); });
@ -853,19 +851,22 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
} }
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven) std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
{ {
std::string path; std::string path;
getline(str, path); getline(str, path);
if (str.eof()) { return {}; } if (str.eof()) { return {}; }
ValidPathInfo info(store.parseStorePath(path)); if (!hashGiven) {
if (hashGiven) {
string s; string s;
getline(str, s); getline(str, s);
info.narHash = Hash(s, htSHA256); auto narHash = Hash::parseAny(s, htSHA256);
getline(str, s); getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected"); uint64_t narSize;
if (!string2Int(s, narSize)) throw Error("number expected");
hashGiven = { narHash, narSize };
} }
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
info.narSize = hashGiven->second;
std::string deriver; std::string deriver;
getline(str, deriver); getline(str, deriver);
if (deriver != "") info.deriver = store.parseStorePath(deriver); if (deriver != "") info.deriver = store.parseStorePath(deriver);
@ -900,12 +901,12 @@ string showPaths(const PathSet & paths)
std::string ValidPathInfo::fingerprint(const Store & store) const std::string ValidPathInfo::fingerprint(const Store & store) const
{ {
if (narSize == 0 || !narHash) if (narSize == 0)
throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known", throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path)); store.printStorePath(path));
return return
"1;" + store.printStorePath(path) + ";" "1;" + store.printStorePath(path) + ";"
+ narHash->to_string(Base32, true) + ";" + narHash.to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";" + std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references)); + concatStringsSep(",", store.printStorePathSet(references));
} }

View file

@ -4,12 +4,12 @@
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
#include "serialise.hh" #include "serialise.hh"
#include "crypto.hh"
#include "lru-cache.hh" #include "lru-cache.hh"
#include "sync.hh" #include "sync.hh"
#include "globals.hh" #include "globals.hh"
#include "config.hh" #include "config.hh"
#include "derivations.hh" #include "derivations.hh"
#include "path-info.hh"
#include <atomic> #include <atomic>
#include <limits> #include <limits>
@ -101,95 +101,6 @@ struct GCResults
}; };
struct SubstitutablePathInfo
{
std::optional<StorePath> deriver;
StorePathSet references;
uint64_t downloadSize; /* 0 = unknown or inapplicable */
uint64_t narSize; /* 0 = unknown */
};
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
struct ValidPathInfo
{
StorePath path;
std::optional<StorePath> deriver;
// TODO document this
std::optional<Hash> narHash;
StorePathSet references;
time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown
uint64_t id; // internal use only
/* Whether the path is ultimately trusted, that is, it's a
derivation output that was built locally. */
bool ultimate = false;
StringSet sigs; // note: not necessarily verified
/* If non-empty, an assertion that the path is content-addressed,
i.e., that the store path is computed from a cryptographic hash
of the contents of the path, plus some other bits of data like
the "name" part of the path. Such a path doesn't need
signatures, since we don't have to trust anybody's claim that
the path is the output of a particular derivation. (In the
extensional store model, we have to trust that the *contents*
of an output path of a derivation were actually produced by
that derivation. In the intensional model, we have to trust
that a particular output path was produced by a derivation; the
path then implies the contents.)
Ideally, the content-addressability assertion would just be a Boolean,
and the store path would be computed from the name component, narHash
and references. However, we support many types of content addresses.
*/
std::optional<ContentAddress> ca;
bool operator == (const ValidPathInfo & i) const
{
return
path == i.path
&& narHash == i.narHash
&& references == i.references;
}
/* Return a fingerprint of the store path to be used in binary
cache signatures. It contains the store path, the base-32
SHA-256 hash of the NAR serialisation of the path, the size of
the NAR, and the sorted references. The size field is strictly
speaking superfluous, but might prevent endless/excessive data
attacks. */
std::string fingerprint(const Store & store) const;
void sign(const Store & store, const SecretKey & secretKey);
/* Return true iff the path is verifiably content-addressed. */
bool isContentAddressed(const Store & store) const;
static const size_t maxSigs = std::numeric_limits<size_t>::max();
/* Return the number of signatures on this .narinfo that were
produced by one of the specified keys, or maxSigs if the path
is content-addressed. */
size_t checkSignatures(const Store & store, const PublicKeys & publicKeys) const;
/* Verify a single signature. */
bool checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const;
Strings shortRefs() const;
ValidPathInfo(const ValidPathInfo & other) = default;
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
ValidPathInfo(const StorePath & path) : path(path) { };
virtual ~ValidPathInfo() { }
};
typedef list<ValidPathInfo> ValidPathInfos;
enum BuildMode { bmNormal, bmRepair, bmCheck }; enum BuildMode { bmNormal, bmRepair, bmCheck };
@ -252,6 +163,10 @@ public:
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"}; Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
Setting<StringSet> systemFeatures{this, settings.systemFeatures,
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
protected: protected:
struct PathInfoCacheValue { struct PathInfoCacheValue {
@ -456,7 +371,7 @@ public:
libutil/archive.hh). */ libutil/archive.hh). */
virtual StorePath addToStore(const string & name, const Path & srcPath, virtual StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0; PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair);
/* Copy the contents of a path to the store and register the /* Copy the contents of a path to the store and register the
validity the resulting path, using a constant amount of validity the resulting path, using a constant amount of
@ -465,6 +380,10 @@ public:
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
std::optional<Hash> expectedCAHash = {}); std::optional<Hash> expectedCAHash = {});
/* Like addToStore(), but the contents of the path are contained
in `dump', which is either a NAR serialisation (if recursive ==
true) or simply the contents of a regular file (if recursive ==
false). */
// FIXME: remove? // FIXME: remove?
virtual StorePath addToStoreFromDump(Source & dump, const string & name, virtual StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
@ -845,7 +764,7 @@ string showPaths(const PathSet & paths);
std::optional<ValidPathInfo> decodeValidPathInfo( std::optional<ValidPathInfo> decodeValidPathInfo(
const Store & store, const Store & store,
std::istream & str, std::istream & str,
bool hashGiven = false); std::optional<HashResult> hashGiven = std::nullopt);
/* Split URI into protocol+hierarchy part and its parameter set. */ /* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri); std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f #define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x117 #define PROTOCOL_VERSION 0x118
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)

View file

@ -366,11 +366,7 @@ void copyNAR(Source & source, Sink & sink)
ParseSink parseSink; /* null sink; just parse the NAR */ ParseSink parseSink; /* null sink; just parse the NAR */
LambdaSource wrapper([&](unsigned char * data, size_t len) { TeeSource wrapper { source, sink };
auto n = source.read(data, len);
sink(data, n);
return n;
});
parseDump(parseSink, wrapper); parseDump(parseSink, wrapper);
} }

View file

@ -192,6 +192,7 @@ public:
MakeError(Error, BaseError); MakeError(Error, BaseError);
MakeError(UsageError, Error); MakeError(UsageError, Error);
MakeError(UnimplementedError, Error);
class SysError : public Error class SysError : public Error
{ {

View file

@ -7,6 +7,7 @@
#include "args.hh" #include "args.hh"
#include "hash.hh" #include "hash.hh"
#include "archive.hh" #include "archive.hh"
#include "split.hh"
#include "util.hh" #include "util.hh"
#include <sys/types.h> #include <sys/types.h>
@ -15,6 +16,7 @@
namespace nix { namespace nix {
static size_t regularHashSize(HashType type) { static size_t regularHashSize(HashType type) {
switch (type) { switch (type) {
case htMD5: return md5HashSize; case htMD5: return md5HashSize;
@ -25,10 +27,11 @@ static size_t regularHashSize(HashType type) {
abort(); abort();
} }
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" }; std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
void Hash::init() Hash::Hash(HashType type) : type(type)
{ {
hashSize = regularHashSize(type); hashSize = regularHashSize(type);
assert(hashSize <= maxHashSize); assert(hashSize <= maxHashSize);
@ -133,57 +136,91 @@ std::string Hash::to_string(Base base, bool includeType) const
return s; return s;
} }
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { } Hash Hash::dummy(htSHA256);
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
Hash::Hash(std::string_view original, std::optional<HashType> optType) Hash Hash::parseSRI(std::string_view original) {
{
auto rest = original; auto rest = original;
size_t pos = 0; // Parse the has type before the separater, if there was one.
auto hashRaw = splitPrefixTo(rest, '-');
if (!hashRaw)
throw BadHash("hash '%s' is not SRI", original);
HashType parsedType = parseHashType(*hashRaw);
return Hash(rest, parsedType, true);
}
// Mutates the string to eliminate the prefixes when found
static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_view & rest) {
bool isSRI = false; bool isSRI = false;
// Parse the has type before the separater, if there was one. // Parse the has type before the separater, if there was one.
std::optional<HashType> optParsedType; std::optional<HashType> optParsedType;
{ {
auto sep = rest.find(':'); auto hashRaw = splitPrefixTo(rest, ':');
if (sep == std::string_view::npos) {
sep = rest.find('-'); if (!hashRaw) {
if (sep != std::string_view::npos) hashRaw = splitPrefixTo(rest, '-');
if (hashRaw)
isSRI = true; isSRI = true;
} }
if (sep != std::string_view::npos) { if (hashRaw)
auto hashRaw = rest.substr(0, sep); optParsedType = parseHashType(*hashRaw);
optParsedType = parseHashType(hashRaw);
rest = rest.substr(sep + 1);
} }
return {optParsedType, isSRI};
} }
Hash Hash::parseAnyPrefixed(std::string_view original)
{
auto rest = original;
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
// Either the string or user must provide the type, if they both do they // Either the string or user must provide the type, if they both do they
// must agree. // must agree.
if (!optParsedType && !optType) { if (!optParsedType)
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest); throw BadHash("hash '%s' does not include a type", rest);
} else {
this->type = optParsedType ? *optParsedType : *optType; return Hash(rest, *optParsedType, isSRI);
if (optParsedType && optType && *optParsedType != *optType)
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
} }
init(); Hash Hash::parseAny(std::string_view original, std::optional<HashType> optType)
{
auto rest = original;
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
// Either the string or user must provide the type, if they both do they
// must agree.
if (!optParsedType && !optType)
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest);
else if (optParsedType && optType && *optParsedType != *optType)
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
HashType hashType = optParsedType ? *optParsedType : *optType;
return Hash(rest, hashType, isSRI);
}
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type)
{
return Hash(s, type, false);
}
Hash::Hash(std::string_view rest, HashType type, bool isSRI)
: Hash(type)
{
if (!isSRI && rest.size() == base16Len()) { if (!isSRI && rest.size() == base16Len()) {
auto parseHexDigit = [&](char c) { auto parseHexDigit = [&](char c) {
if (c >= '0' && c <= '9') return c - '0'; if (c >= '0' && c <= '9') return c - '0';
if (c >= 'A' && c <= 'F') return c - 'A' + 10; if (c >= 'A' && c <= 'F') return c - 'A' + 10;
if (c >= 'a' && c <= 'f') return c - 'a' + 10; if (c >= 'a' && c <= 'f') return c - 'a' + 10;
throw BadHash("invalid base-16 hash '%s'", original); throw BadHash("invalid base-16 hash '%s'", rest);
}; };
for (unsigned int i = 0; i < hashSize; i++) { for (unsigned int i = 0; i < hashSize; i++) {
hash[i] = hash[i] =
parseHexDigit(rest[pos + i * 2]) << 4 parseHexDigit(rest[i * 2]) << 4
| parseHexDigit(rest[pos + i * 2 + 1]); | parseHexDigit(rest[i * 2 + 1]);
} }
} }
@ -195,7 +232,7 @@ Hash::Hash(std::string_view original, std::optional<HashType> optType)
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
if (base32Chars[digit] == c) break; if (base32Chars[digit] == c) break;
if (digit >= 32) if (digit >= 32)
throw BadHash("invalid base-32 hash '%s'", original); throw BadHash("invalid base-32 hash '%s'", rest);
unsigned int b = n * 5; unsigned int b = n * 5;
unsigned int i = b / 8; unsigned int i = b / 8;
unsigned int j = b % 8; unsigned int j = b % 8;
@ -205,7 +242,7 @@ Hash::Hash(std::string_view original, std::optional<HashType> optType)
hash[i + 1] |= digit >> (8 - j); hash[i + 1] |= digit >> (8 - j);
} else { } else {
if (digit >> (8 - j)) if (digit >> (8 - j))
throw BadHash("invalid base-32 hash '%s'", original); throw BadHash("invalid base-32 hash '%s'", rest);
} }
} }
} }
@ -213,7 +250,7 @@ Hash::Hash(std::string_view original, std::optional<HashType> optType)
else if (isSRI || rest.size() == base64Len()) { else if (isSRI || rest.size() == base64Len()) {
auto d = base64Decode(rest); auto d = base64Decode(rest);
if (d.size() != hashSize) if (d.size() != hashSize)
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", original); throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", rest);
assert(hashSize); assert(hashSize);
memcpy(hash, d.data(), hashSize); memcpy(hash, d.data(), hashSize);
} }
@ -231,7 +268,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
warn("found empty hash, assuming '%s'", h.to_string(SRI, true)); warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
return h; return h;
} else } else
return Hash(hashStr, ht); return Hash::parseAny(hashStr, ht);
} }

View file

@ -34,24 +34,31 @@ struct Hash
HashType type; HashType type;
/* Create a zero-filled hash object. */ /* Create a zero-filled hash object. */
Hash(HashType type) : type(type) { init(); }; Hash(HashType type);
/* Initialize the hash from a string representation, in the format /* Parse the hash from a string representation in the format
"[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a "[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
Subresource Integrity hash expression). If the 'type' argument Subresource Integrity hash expression). If the 'type' argument
is not present, then the hash type must be specified in the is not present, then the hash type must be specified in the
string. */ string. */
Hash(std::string_view s, std::optional<HashType> type); static Hash parseAny(std::string_view s, std::optional<HashType> type);
// type must be provided
Hash(std::string_view s, HashType type);
// hash type must be part of string
Hash(std::string_view s);
void init(); /* Parse a hash from a string representation like the above, except the
type prefix is mandatory is there is no separate arguement. */
static Hash parseAnyPrefixed(std::string_view s);
/* Check whether a hash is set. */ /* Parse a plain hash that musst not have any prefix indicating the type.
operator bool () const { return (bool) type; } The type is passed in to disambiguate. */
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
static Hash parseSRI(std::string_view original);
private:
/* The type must be provided, the string view must not include <type>
prefix. `isSRI` helps disambigate the various base-* encodings. */
Hash(std::string_view s, HashType type, bool isSRI);
public:
/* Check whether two hash are equal. */ /* Check whether two hash are equal. */
bool operator == (const Hash & h2) const; bool operator == (const Hash & h2) const;
@ -95,6 +102,8 @@ struct Hash
assert(type == htSHA1); assert(type == htSHA1);
return std::string(to_string(Base16, false), 0, 7); return std::string(to_string(Base16, false), 0, 7);
} }
static Hash dummy;
}; };
/* Helper that defaults empty hashes to the 0 hash. */ /* Helper that defaults empty hashes to the 0 hash. */

View file

@ -225,6 +225,17 @@ struct SizedSource : Source
} }
}; };
/* A sink that that just counts the number of bytes given to it */
struct LengthSink : Sink
{
uint64_t length = 0;
virtual void operator () (const unsigned char * _, size_t len)
{
length += len;
}
};
/* Convert a function into a sink. */ /* Convert a function into a sink. */
struct LambdaSink : Sink struct LambdaSink : Sink
{ {

33
src/libutil/split.hh Normal file
View file

@ -0,0 +1,33 @@
#pragma once
#include <optional>
#include <string_view>
#include "util.hh"
namespace nix {
// If `separator` is found, we return the portion of the string before the
// separator, and modify the string argument to contain only the part after the
// separator. Otherwise, wer return `std::nullopt`, and we leave the argument
// string alone.
static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) {
auto sepInstance = string.find(separator);
if (sepInstance != std::string_view::npos) {
auto prefix = string.substr(0, sepInstance);
string.remove_prefix(sepInstance+1);
return prefix;
}
return std::nullopt;
}
static inline bool splitPrefix(std::string_view & string, std::string_view prefix) {
bool res = hasPrefix(string, prefix);
if (res)
string.remove_prefix(prefix.length());
return res;
}
}

40
src/libutil/topo-sort.hh Normal file
View file

@ -0,0 +1,40 @@
#include "error.hh"
namespace nix {
template<typename T>
std::vector<T> topoSort(std::set<T> items,
std::function<std::set<T>(const T &)> getChildren,
std::function<Error(const T &, const T &)> makeCycleError)
{
std::vector<T> sorted;
std::set<T> visited, parents;
std::function<void(const T & path, const T * parent)> dfsVisit;
dfsVisit = [&](const T & path, const T * parent) {
if (parents.count(path)) throw makeCycleError(path, *parent);
if (!visited.insert(path).second) return;
parents.insert(path);
std::set<T> references = getChildren(path);
for (auto & i : references)
/* Don't traverse into items that don't exist in our starting set. */
if (i != path && items.count(i))
dfsVisit(i, &path);
sorted.push_back(path);
parents.erase(path);
};
for (auto & i : items)
dfsVisit(i, nullptr);
std::reverse(sorted.begin(), sorted.end());
return sorted;
}
}

View file

@ -494,6 +494,7 @@ std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
{ {
Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX"); Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
// Strictly speaking, this is UB, but who cares... // Strictly speaking, this is UB, but who cares...
// FIXME: use O_TMPFILE.
AutoCloseFD fd(mkstemp((char *) tmpl.c_str())); AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
if (!fd) if (!fd)
throw SysError("creating temporary file '%s'", tmpl); throw SysError("creating temporary file '%s'", tmpl);
@ -1449,7 +1450,7 @@ string base64Decode(std::string_view s)
char digit = decode[(unsigned char) c]; char digit = decode[(unsigned char) c];
if (digit == -1) if (digit == -1)
throw Error("invalid character in Base64 string"); throw Error("invalid character in Base64 string: '%c'", c);
bits += 6; bits += 6;
d = d << 6 | digit; d = d << 6 | digit;

View file

@ -601,4 +601,9 @@ constexpr auto enumerate(T && iterable)
} }
// C++17 std::visit boilerplate
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
} }

View file

@ -239,7 +239,15 @@ static void daemonLoop(char * * argv)
// Handle the connection. // Handle the connection.
FdSource from(remote.get()); FdSource from(remote.get());
FdSink to(remote.get()); FdSink to(remote.get());
processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid); processConnection(openUncachedStore(), from, to, trusted, NotRecursive, [&](Store & store) {
#if 0
/* Prevent users from doing something very dangerous. */
if (geteuid() == 0 &&
querySetting("build-users-group", "") == "")
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
#endif
store.createUser(user, peer.uid);
});
exit(0); exit(0);
}, options); }, options);
@ -324,7 +332,10 @@ static int _main(int argc, char * * argv)
} else { } else {
FdSource from(STDIN_FILENO); FdSource from(STDIN_FILENO);
FdSink to(STDOUT_FILENO); FdSink to(STDOUT_FILENO);
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0); /* Auth hook is empty because in this mode we blindly trust the
standard streams. Limitting access to thoses is explicitly
not `nix-daemon`'s responsibility. */
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
} }
} else { } else {
daemonLoop(argv); daemonLoop(argv);

View file

@ -157,7 +157,7 @@ static int _main(int argc, char * * argv)
Hash hash(ht); Hash hash(ht);
std::optional<StorePath> storePath; std::optional<StorePath> storePath;
if (args.size() == 2) { if (args.size() == 2) {
expectedHash = Hash(args[1], ht); expectedHash = Hash::parseAny(args[1], ht);
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
storePath = store->makeFixedOutputPath(recursive, *expectedHash, name); storePath = store->makeFixedOutputPath(recursive, *expectedHash, name);
if (store->isValidPath(*storePath)) if (store->isValidPath(*storePath))

View file

@ -208,7 +208,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs)
string hash = *i++; string hash = *i++;
string name = *i++; string name = *i++;
cout << fmt("%s\n", store->printStorePath(store->makeFixedOutputPath(recursive, Hash(hash, hashAlgo), name))); cout << fmt("%s\n", store->printStorePath(store->makeFixedOutputPath(recursive, Hash::parseAny(hash, hashAlgo), name)));
} }
@ -218,8 +218,8 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) { if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath); auto drv = store->derivationFromPath(storePath);
StorePathSet outputs; StorePathSet outputs;
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*store))
outputs.insert(i.second.path(*store, drv.name)); outputs.insert(i.second.second);
return outputs; return outputs;
} }
else return {storePath}; else return {storePath};
@ -312,8 +312,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
auto i2 = store->followLinksToStorePath(i); auto i2 = store->followLinksToStorePath(i);
if (forceRealise) realisePath({i2}); if (forceRealise) realisePath({i2});
Derivation drv = store->derivationFromPath(i2); Derivation drv = store->derivationFromPath(i2);
for (auto & j : drv.outputs) for (auto & j : drv.outputsAndPaths(*store))
cout << fmt("%1%\n", store->printStorePath(j.second.path(*store, drv.name))); cout << fmt("%1%\n", store->printStorePath(j.second.second));
} }
break; break;
} }
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j); auto info = store->queryPathInfo(j);
if (query == qHash) { if (query == qHash) {
assert(info->narHash && info->narHash->type == htSHA256); assert(info->narHash.type == htSHA256);
cout << fmt("%s\n", info->narHash->to_string(Base32, true)); cout << fmt("%s\n", info->narHash.to_string(Base32, true));
} else if (query == qSize) } else if (query == qSize)
cout << fmt("%d\n", info->narSize); cout << fmt("%d\n", info->narSize);
} }
@ -495,7 +495,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
ValidPathInfos infos; ValidPathInfos infos;
while (1) { while (1) {
auto info = decodeValidPathInfo(*store, cin, hashGiven); // We use a dummy value because we'll set it below. FIXME be correct by
// construction and avoid dummy value.
auto hashResultOpt = !hashGiven ? std::optional<HashResult> { {Hash::dummy, -1} } : std::nullopt;
auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
if (!info) break; if (!info) break;
if (!store->isValidPath(info->path) || reregister) { if (!store->isValidPath(info->path) || reregister) {
/* !!! races */ /* !!! races */
@ -723,7 +726,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i); auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path); auto info = store->queryPathInfo(path);
HashSink sink(info->narHash->type); HashSink sink(info->narHash.type);
store->narFromPath(path, sink); store->narFromPath(path, sink);
auto current = sink.finish(); auto current = sink.finish();
if (current.first != info->narHash) { if (current.first != info->narHash) {
@ -732,7 +735,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path), store->printStorePath(path),
info->narHash->to_string(Base32, true), info->narHash.to_string(Base32, true),
current.first.to_string(Base32, true)) current.first.to_string(Base32, true))
}); });
status = 1; status = 1;
@ -862,7 +865,7 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4) if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash->to_string(Base32, true) : "") out << info->narHash.to_string(Base32, true)
<< renderContentAddress(info->ca) << renderContentAddress(info->ca)
<< info->sigs; << info->sigs;
} catch (InvalidPath &) { } catch (InvalidPath &) {
@ -944,11 +947,13 @@ static void opServe(Strings opFlags, Strings opArgs)
if (!writeAllowed) throw Error("importing paths is not allowed"); if (!writeAllowed) throw Error("importing paths is not allowed");
auto path = readString(in); auto path = readString(in);
ValidPathInfo info(store->parseStorePath(path));
auto deriver = readString(in); auto deriver = readString(in);
ValidPathInfo info {
store->parseStorePath(path),
Hash::parseAny(readString(in), htSHA256),
};
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.narHash = Hash(readString(in), htSHA256);
info.references = readStorePaths<StorePathSet>(*store, in); info.references = readStorePaths<StorePathSet>(*store, in);
in >> info.registrationTime >> info.narSize >> info.ultimate; in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in); info.sigs = readStrings<StringSet>(in);

View file

@ -60,8 +60,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
hash = hsink.finish().first; hash = hsink.finish().first;
} }
ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart)); ValidPathInfo info {
info.narHash = narHash; store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
narHash,
};
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash { info.ca = std::optional { FixedOutputHash {
.method = ingestionMethod, .method = ingestionMethod,

View file

@ -8,7 +8,7 @@ namespace nix {
App Installable::toApp(EvalState & state) App Installable::toApp(EvalState & state)
{ {
auto [cursor, attrPath] = getCursor(state, true); auto [cursor, attrPath] = getCursor(state);
auto type = cursor->getAttr("type")->getString(); auto type = cursor->getAttr("type")->getString();

View file

@ -64,17 +64,24 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
if (dryRun) return; if (dryRun) return;
if (outLink != "") { if (outLink != "")
for (size_t i = 0; i < buildables.size(); ++i) { if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
for (auto & output : buildables[i].outputs) for (size_t i = 0; i < buildables.size(); ++i)
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) { std::visit(overloaded {
[&](BuildableOpaque bo) {
std::string symlink = outLink;
if (i) symlink += fmt("-%d", i);
store2->addPermRoot(bo.path, absPath(symlink), true);
},
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs) {
std::string symlink = outLink; std::string symlink = outLink;
if (i) symlink += fmt("-%d", i); if (i) symlink += fmt("-%d", i);
if (output.first != "out") symlink += fmt("-%s", output.first); if (output.first != "out") symlink += fmt("-%s", output.first);
store2->addPermRoot(output.second, absPath(symlink), true); store2->addPermRoot(output.second, absPath(symlink), true);
} }
} },
} }, buildables[i]);
updateProfile(buildables); updateProfile(buildables);
} }

View file

@ -128,20 +128,25 @@ void MixProfile::updateProfile(const Buildables & buildables)
{ {
if (!profile) return; if (!profile) return;
std::optional<StorePath> result; std::vector<StorePath> result;
for (auto & buildable : buildables) { for (auto & buildable : buildables) {
for (auto & output : buildable.outputs) { std::visit(overloaded {
if (result) [&](BuildableOpaque bo) {
throw Error("'--profile' requires that the arguments produce a single store path, but there are multiple"); result.push_back(bo.path);
result = output.second; },
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs) {
result.push_back(output.second);
} }
},
}, buildable);
} }
if (!result) if (result.size() != 1)
throw Error("'--profile' requires that the arguments produce a single store path, but there are none"); throw Error("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
updateProfile(*result); updateProfile(result[0]);
} }
MixDefaultProfile::MixDefaultProfile() MixDefaultProfile::MixDefaultProfile()

View file

@ -124,10 +124,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use /* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */ 'buildDerivation', but that's privileged. */
auto drvName = std::string(drvPath.name()); drv.name += "-env";
assert(hasSuffix(drvName, ".drv"));
drvName.resize(drvName.size() - 4);
drvName += "-env";
for (auto & output : drv.outputs) for (auto & output : drv.outputs)
drv.env.erase(output.first); drv.env.erase(output.first);
drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}}; drv.outputs = {{"out", DerivationOutput { .output = DerivationOutputInputAddressed { .path = StorePath::dummy }}}};
@ -135,13 +132,13 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drv.env["_outputs_saved"] = drv.env["outputs"]; drv.env["_outputs_saved"] = drv.env["outputs"];
drv.env["outputs"] = "out"; drv.env["outputs"] = "out";
drv.inputSrcs.insert(std::move(getEnvShPath)); drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true); Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
auto shellOutPath = store->makeOutputPath("out", h, drvName); auto shellOutPath = store->makeOutputPath("out", h, drv.name);
drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed { drv.outputs.insert_or_assign("out", DerivationOutput { .output = DerivationOutputInputAddressed {
.path = shellOutPath .path = shellOutPath
} }); } });
drv.env["out"] = store->printStorePath(shellOutPath); drv.env["out"] = store->printStorePath(shellOutPath);
auto shellDrvPath2 = writeDerivation(store, drv, drvName); auto shellDrvPath2 = writeDerivation(store, drv);
/* Build the derivation. */ /* Build the derivation. */
store->buildPaths({{shellDrvPath2}}); store->buildPaths({{shellDrvPath2}});

View file

@ -572,7 +572,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
Strings{templateName == "" ? "defaultTemplate" : templateName}, Strings{templateName == "" ? "defaultTemplate" : templateName},
Strings(attrsPathPrefixes), lockFlags); Strings(attrsPathPrefixes), lockFlags);
auto [cursor, attrPath] = installable.getCursor(*evalState, true); auto [cursor, attrPath] = installable.getCursor(*evalState);
auto templateDir = cursor->getAttr("path")->getString(); auto templateDir = cursor->getAttr("path")->getString();
@ -782,7 +782,6 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
struct CmdFlakeShow : FlakeCommand struct CmdFlakeShow : FlakeCommand
{ {
bool showLegacy = false; bool showLegacy = false;
bool useEvalCache = true;
CmdFlakeShow() CmdFlakeShow()
{ {
@ -791,12 +790,6 @@ struct CmdFlakeShow : FlakeCommand
.description = "show the contents of the 'legacyPackages' output", .description = "show the contents of the 'legacyPackages' output",
.handler = {&showLegacy, true} .handler = {&showLegacy, true}
}); });
addFlag({
.longName = "no-eval-cache",
.description = "do not use the flake evaluation cache",
.handler = {[&]() { useEvalCache = false; }}
});
} }
std::string description() override std::string description() override
@ -934,7 +927,7 @@ struct CmdFlakeShow : FlakeCommand
} }
}; };
auto cache = openEvalCache(*state, flake, useEvalCache); auto cache = openEvalCache(*state, flake);
visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), ""); visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
} }

View file

@ -107,7 +107,7 @@ struct CmdToBase : Command
void run() override void run() override
{ {
for (auto s : args) for (auto s : args)
logger->stdout(Hash(s, ht).to_string(base, base == SRI)); logger->stdout(Hash::parseAny(s, ht).to_string(base, base == SRI));
} }
}; };

View file

@ -183,8 +183,7 @@ void completeFlakeRefWithFragment(
auto flakeRef = parseFlakeRef(flakeRefS, absPath(".")); auto flakeRef = parseFlakeRef(flakeRefS, absPath("."));
auto evalCache = openEvalCache(*evalState, auto evalCache = openEvalCache(*evalState,
std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)), std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)));
true);
auto root = evalCache->getRoot(); auto root = evalCache->getRoot();
@ -273,7 +272,7 @@ Buildable Installable::toBuildable()
} }
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
Installable::getCursors(EvalState & state, bool useEvalCache) Installable::getCursors(EvalState & state)
{ {
auto evalCache = auto evalCache =
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state, std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
@ -282,9 +281,9 @@ Installable::getCursors(EvalState & state, bool useEvalCache)
} }
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string> std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
Installable::getCursor(EvalState & state, bool useEvalCache) Installable::getCursor(EvalState & state)
{ {
auto cursors = getCursors(state, useEvalCache); auto cursors = getCursors(state);
if (cursors.empty()) if (cursors.empty())
throw Error("cannot find flake attribute '%s'", what()); throw Error("cannot find flake attribute '%s'", what());
return cursors[0]; return cursors[0];
@ -305,19 +304,18 @@ struct InstallableStorePath : Installable
if (storePath.isDerivation()) { if (storePath.isDerivation()) {
std::map<std::string, StorePath> outputs; std::map<std::string, StorePath> outputs;
auto drv = store->readDerivation(storePath); auto drv = store->readDerivation(storePath);
for (auto & [name, output] : drv.outputs) for (auto & i : drv.outputsAndPaths(*store))
outputs.emplace(name, output.path(*store, drv.name)); outputs.emplace(i.first, i.second.second);
return { return {
Buildable { BuildableFromDrv {
.drvPath = storePath, .drvPath = storePath,
.outputs = std::move(outputs) .outputs = std::move(outputs)
} }
}; };
} else { } else {
return { return {
Buildable { BuildableOpaque {
.drvPath = {}, .path = storePath,
.outputs = {{"out", storePath}}
} }
}; };
} }
@ -333,32 +331,19 @@ Buildables InstallableValue::toBuildables()
{ {
Buildables res; Buildables res;
StorePathSet drvPaths; std::map<StorePath, OutputPathMap> drvsToOutputs;
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) { for (auto & drv : toDerivations()) {
Buildable b{.drvPath = drv.drvPath};
drvPaths.insert(drv.drvPath);
auto outputName = drv.outputName; auto outputName = drv.outputName;
if (outputName == "") if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(*b.drvPath)); throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath));
drvsToOutputs[drv.drvPath].insert_or_assign(outputName, drv.outPath);
b.outputs.emplace(outputName, drv.outPath);
res.push_back(std::move(b));
} }
// Hack to recognize .all: if all drvs have the same drvPath, for (auto & i : drvsToOutputs)
// merge the buildables. res.push_back(BuildableFromDrv { i.first, i.second });
if (drvPaths.size() == 1) {
Buildable b{.drvPath = *drvPaths.begin()};
for (auto & b2 : res)
for (auto & output : b2.outputs)
b.outputs.insert_or_assign(output.first, output.second);
Buildables bs;
bs.push_back(std::move(b));
return bs;
} else
return res; return res;
} }
@ -434,12 +419,11 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
ref<eval_cache::EvalCache> openEvalCache( ref<eval_cache::EvalCache> openEvalCache(
EvalState & state, EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake, std::shared_ptr<flake::LockedFlake> lockedFlake)
bool useEvalCache)
{ {
auto fingerprint = lockedFlake->getFingerprint(); auto fingerprint = lockedFlake->getFingerprint();
return make_ref<nix::eval_cache::EvalCache>( return make_ref<nix::eval_cache::EvalCache>(
useEvalCache && evalSettings.pureEval evalSettings.useEvalCache && evalSettings.pureEval
? std::optional { std::cref(fingerprint) } ? std::optional { std::cref(fingerprint) }
: std::nullopt, : std::nullopt,
state, state,
@ -474,10 +458,9 @@ static std::string showAttrPaths(const std::vector<std::string> & paths)
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation() std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{ {
auto lockedFlake = getLockedFlake(); auto lockedFlake = getLockedFlake();
auto cache = openEvalCache(*state, lockedFlake, true); auto cache = openEvalCache(*state, lockedFlake);
auto root = cache->getRoot(); auto root = cache->getRoot();
for (auto & attrPath : getActualAttrPaths()) { for (auto & attrPath : getActualAttrPaths()) {
@ -531,11 +514,10 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
} }
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
InstallableFlake::getCursors(EvalState & state, bool useEvalCache) InstallableFlake::getCursors(EvalState & state)
{ {
auto evalCache = openEvalCache(state, auto evalCache = openEvalCache(state,
std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)), std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)));
useEvalCache);
auto root = evalCache->getRoot(); auto root = evalCache->getRoot();
@ -656,14 +638,17 @@ Buildables build(ref<Store> store, Realise mode,
for (auto & i : installables) { for (auto & i : installables) {
for (auto & b : i->toBuildables()) { for (auto & b : i->toBuildables()) {
if (b.drvPath) { std::visit(overloaded {
[&](BuildableOpaque bo) {
pathsToBuild.push_back({bo.path});
},
[&](BuildableFromDrv bfd) {
StringSet outputNames; StringSet outputNames;
for (auto & output : b.outputs) for (auto & output : bfd.outputs)
outputNames.insert(output.first); outputNames.insert(output.first);
pathsToBuild.push_back({*b.drvPath, outputNames}); pathsToBuild.push_back({bfd.drvPath, outputNames});
} else },
for (auto & output : b.outputs) }, b);
pathsToBuild.push_back({output.second});
buildables.push_back(std::move(b)); buildables.push_back(std::move(b));
} }
} }
@ -684,16 +669,23 @@ StorePathSet toStorePaths(ref<Store> store,
if (operateOn == OperateOn::Output) { if (operateOn == OperateOn::Output) {
for (auto & b : build(store, mode, installables)) for (auto & b : build(store, mode, installables))
for (auto & output : b.outputs) std::visit(overloaded {
[&](BuildableOpaque bo) {
outPaths.insert(bo.path);
},
[&](BuildableFromDrv bfd) {
for (auto & output : bfd.outputs)
outPaths.insert(output.second); outPaths.insert(output.second);
},
}, b);
} else { } else {
if (mode == Realise::Nothing) if (mode == Realise::Nothing)
settings.readOnlyMode = true; settings.readOnlyMode = true;
for (auto & i : installables) for (auto & i : installables)
for (auto & b : i->toBuildables()) for (auto & b : i->toBuildables())
if (b.drvPath) if (auto bfd = std::get_if<BuildableFromDrv>(&b))
outPaths.insert(*b.drvPath); outPaths.insert(bfd->drvPath);
} }
return outPaths; return outPaths;
@ -717,20 +709,21 @@ StorePathSet toDerivations(ref<Store> store,
StorePathSet drvPaths; StorePathSet drvPaths;
for (auto & i : installables) for (auto & i : installables)
for (auto & b : i->toBuildables()) { for (auto & b : i->toBuildables())
if (!b.drvPath) { std::visit(overloaded {
[&](BuildableOpaque bo) {
if (!useDeriver) if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what()); throw Error("argument '%s' did not evaluate to a derivation", i->what());
for (auto & output : b.outputs) { auto derivers = store->queryValidDerivers(bo.path);
auto derivers = store->queryValidDerivers(output.second);
if (derivers.empty()) if (derivers.empty())
throw Error("'%s' does not have a known deriver", i->what()); throw Error("'%s' does not have a known deriver", i->what());
// FIXME: use all derivers? // FIXME: use all derivers?
drvPaths.insert(*derivers.begin()); drvPaths.insert(*derivers.begin());
} },
} else [&](BuildableFromDrv bfd) {
drvPaths.insert(*b.drvPath); drvPaths.insert(bfd.drvPath);
} },
}, b);
return drvPaths; return drvPaths;
} }

View file

@ -14,12 +14,20 @@ struct SourceExprCommand;
namespace eval_cache { class EvalCache; class AttrCursor; } namespace eval_cache { class EvalCache; class AttrCursor; }
struct Buildable struct BuildableOpaque {
{ StorePath path;
std::optional<StorePath> drvPath; };
struct BuildableFromDrv {
StorePath drvPath;
std::map<std::string, StorePath> outputs; std::map<std::string, StorePath> outputs;
}; };
typedef std::variant<
BuildableOpaque,
BuildableFromDrv
> Buildable;
typedef std::vector<Buildable> Buildables; typedef std::vector<Buildable> Buildables;
struct App struct App
@ -54,10 +62,10 @@ struct Installable
} }
virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
getCursors(EvalState & state, bool useEvalCache); getCursors(EvalState & state);
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string> std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
getCursor(EvalState & state, bool useEvalCache); getCursor(EvalState & state);
virtual FlakeRef nixpkgsFlakeRef() const virtual FlakeRef nixpkgsFlakeRef() const
{ {
@ -110,7 +118,7 @@ struct InstallableFlake : InstallableValue
std::pair<Value *, Pos> toValue(EvalState & state) override; std::pair<Value *, Pos> toValue(EvalState & state) override;
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
getCursors(EvalState & state, bool useEvalCache) override; getCursors(EvalState & state) override;
std::shared_ptr<flake::LockedFlake> getLockedFlake() const; std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
@ -119,7 +127,6 @@ struct InstallableFlake : InstallableValue
ref<eval_cache::EvalCache> openEvalCache( ref<eval_cache::EvalCache> openEvalCache(
EvalState & state, EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake, std::shared_ptr<flake::LockedFlake> lockedFlake);
bool useEvalCache);
} }

View file

@ -45,11 +45,14 @@ struct CmdLog : InstallableCommand
RunPager pager; RunPager pager;
for (auto & sub : subs) { for (auto & sub : subs) {
auto log = b.drvPath ? sub->getBuildLog(*b.drvPath) : nullptr; auto log = std::visit(overloaded {
for (auto & output : b.outputs) { [&](BuildableOpaque bo) {
if (log) break; return sub->getBuildLog(bo.path);
log = sub->getBuildLog(output.second); },
} [&](BuildableFromDrv bfd) {
return sub->getBuildLog(bfd.drvPath);
},
}, b);
if (!log) continue; if (!log) continue;
stopProgressBar(); stopProgressBar();
printInfo("got build log for '%s' from '%s'", installable->what(), sub->getUri()); printInfo("got build log for '%s' from '%s'", installable->what(), sub->getUri());

View file

@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto narHash = hashModuloSink.finish().first; auto narHash = hashModuloSink.finish().first;
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference)); ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
narHash,
};
info.references = std::move(references); info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path); if (hasSelfReference) info.references.insert(info.path);
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive, .method = FileIngestionMethod::Recursive,
.hash = *info.narHash, .hash = info.narHash,
}; };
if (!json) if (!json)

View file

@ -129,11 +129,13 @@ struct ProfileManifest
auto narHash = hashString(htSHA256, *sink.s); auto narHash = hashString(htSHA256, *sink.s);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references)); ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
narHash,
};
info.references = std::move(references); info.references = std::move(references);
info.narHash = narHash;
info.narSize = sink.s->size(); info.narSize = sink.s->size();
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash }; info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
auto source = StringSource { *sink.s }; auto source = StringSource { *sink.s };
store->addToStore(info, source); store->addToStore(info, source);

View file

@ -33,12 +33,17 @@ extern "C" {
#include "command.hh" #include "command.hh"
#include "finally.hh" #include "finally.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW #define GC_INCLUDE_NEW
#include <gc/gc_cpp.h> #include <gc/gc_cpp.h>
#endif
namespace nix { namespace nix {
struct NixRepl : gc struct NixRepl
#if HAVE_BOEHMGC
: gc
#endif
{ {
string curDir; string curDir;
std::unique_ptr<EvalState> state; std::unique_ptr<EvalState> state;
@ -489,8 +494,8 @@ bool NixRepl::processLine(string line)
if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPathRaw}) == 0) { if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPathRaw}) == 0) {
auto drv = state->store->readDerivation(drvPath); auto drv = state->store->readDerivation(drvPath);
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl; std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
for (auto & i : drv.outputs) for (auto & i : drv.outputsAndPaths(*state->store))
std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.path(*state->store, drv.name))); std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
} }
} else if (command == ":i") { } else if (command == ":i") {
runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPathRaw}); runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPathRaw});

View file

@ -177,7 +177,7 @@ struct CmdSearch : InstallableCommand, MixJSON
} }
}; };
for (auto & [cursor, prefix] : installable->getCursors(*state, true)) for (auto & [cursor, prefix] : installable->getCursors(*state))
visit(*cursor, parseAttrPath(*state, prefix)); visit(*cursor, parseAttrPath(*state, prefix));
if (!json && !results) if (!json && !results)

View file

@ -67,13 +67,21 @@ struct CmdShowDerivation : InstallablesCommand
{ {
auto outputsObj(drvObj.object("outputs")); auto outputsObj(drvObj.object("outputs"));
for (auto & output : drv.outputs) { for (auto & output : drv.outputsAndPaths(*store)) {
auto outputObj(outputsObj.object(output.first)); auto outputObj(outputsObj.object(output.first));
outputObj.attr("path", store->printStorePath(output.second.path(*store, drv.name))); outputObj.attr("path", store->printStorePath(output.second.second));
if (auto hash = std::get_if<DerivationOutputFixed>(&output.second.output)) {
outputObj.attr("hashAlgo", hash->hash.printMethodAlgo()); std::visit(overloaded {
outputObj.attr("hash", hash->hash.hash.to_string(Base16, false)); [&](DerivationOutputInputAddressed doi) {
} },
[&](DerivationOutputCAFixed dof) {
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
},
}, output.second.first.output);
} }
} }

View file

@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr<AbstractHashSink> hashSink; std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca) if (!info->ca)
hashSink = std::make_unique<HashSink>(info->narHash->type); hashSink = std::make_unique<HashSink>(info->narHash.type);
else else
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart())); hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink); store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish(); auto hash = hashSink->finish();
if (hash.first != *info->narHash) { if (hash.first != info->narHash) {
corrupted++; corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path)); act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({ logError({
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt( .hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'", "path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path), store->printStorePath(info->path),
info->narHash->to_string(Base32, true), info->narHash.to_string(Base32, true),
hash.first.to_string(Base32, true)) hash.first.to_string(Base32, true))
}); });
} }

View file

@ -218,7 +218,9 @@ outPath=$(nix-build --no-out-link -E '
nix copy --to file://$cacheDir?write-nar-listing=1 $outPath nix copy --to file://$cacheDir?write-nar-listing=1 $outPath
[[ $(cat $cacheDir/$(basename $outPath).ls) = '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' ]] diff -u \
<(jq -S < $cacheDir/$(basename $outPath | cut -c1-32).ls) \
<(echo '{"version":1,"root":{"type":"directory","entries":{"bar":{"type":"regular","size":4,"narOffset":232},"link":{"type":"symlink","target":"xyzzy"}}}}' | jq -S)
# Test debug info index generation. # Test debug info index generation.
@ -234,4 +236,6 @@ outPath=$(nix-build --no-out-link -E '
nix copy --to "file://$cacheDir?index-debug-info=1&compression=none" $outPath nix copy --to "file://$cacheDir?index-debug-info=1&compression=none" $outPath
[[ $(cat $cacheDir/debuginfo/02623eda209c26a59b1a8638ff7752f6b945c26b.debug) = '{"archive":"../nar/100vxs724qr46phz8m24iswmg9p3785hsyagz0kchf6q6gf06sw6.nar","member":"lib/debug/.build-id/02/623eda209c26a59b1a8638ff7752f6b945c26b.debug"}' ]] diff -u \
<(cat $cacheDir/debuginfo/02623eda209c26a59b1a8638ff7752f6b945c26b.debug | jq -S) \
<(echo '{"archive":"../nar/100vxs724qr46phz8m24iswmg9p3785hsyagz0kchf6q6gf06sw6.nar","member":"lib/debug/.build-id/02/623eda209c26a59b1a8638ff7752f6b945c26b.debug"}' | jq -S)

45
tests/build-hook-ca.nix Normal file
View file

@ -0,0 +1,45 @@
{ busybox }:
with import ./config.nix;
let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
outputHashMode = "recursive";
outputHashAlgo = "sha256";
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation {
shell = busybox;
name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"];
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
};
input2 = mkDerivation {
shell = busybox;
name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
requiredSystemFeatures = ["bar"];
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
};
in
mkDerivation {
shell = busybox;
name = "build-remote";
buildCommand =
''
read x < ${input1}
read y < ${input2}
echo "$x $y" > $out
'';
outputHash = "sha256-3YGhlOfbGUm9hiPn2teXXTT8M1NEpDFvfXkxMaJRld0=";
}

View file

@ -23,6 +23,17 @@ let
shell = busybox; shell = busybox;
name = "build-remote-input-2"; name = "build-remote-input-2";
buildCommand = "echo BAR > $out"; buildCommand = "echo BAR > $out";
requiredSystemFeatures = ["bar"];
};
input3 = mkDerivation {
shell = busybox;
name = "build-remote-input-3";
buildCommand = ''
read x < ${input2}
echo $x BAZ > $out
'';
requiredSystemFeatures = ["baz"];
}; };
in in
@ -33,7 +44,7 @@ in
buildCommand = buildCommand =
'' ''
read x < ${input1} read x < ${input1}
read y < ${input2} read y < ${input3}
echo $x$y > $out echo "$x $y" > $out
''; '';
} }

View file

@ -0,0 +1,5 @@
source common.sh
file=build-hook-ca.nix
source build-remote.sh

View file

@ -0,0 +1,5 @@
source common.sh
file=build-hook.nix
source build-remote.sh

View file

@ -1,31 +1,47 @@
source common.sh
clearStore
if ! canUseSandbox; then exit; fi if ! canUseSandbox; then exit; fi
if ! [[ $busybox =~ busybox ]]; then exit; fi if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/machine1 || true
chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
unset NIX_STORE_DIR unset NIX_STORE_DIR
unset NIX_STATE_DIR unset NIX_STATE_DIR
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
builders=(
# system-features will automatically be added to the outer URL, but not inner
# remote-store URL.
"ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=foo - - 1 1 foo"
"$TEST_ROOT/machine2 - - 1 1 bar"
"ssh-ng://localhost?remote-store=$TEST_ROOT/machine3?system-features=baz - - 1 1 baz"
)
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a # Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation(). # child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \ # ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \ --arg busybox $busybox \
--store $TEST_ROOT/machine0 \ --store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \ --builders "$(join_by '; ' "${builders[@]}")"
--system-features foo
outPath=$(readlink -f $TEST_ROOT/result) outPath=$(readlink -f $TEST_ROOT/result)
cat $TEST_ROOT/machine0/$outPath | grep FOOBAR grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
# Ensure that input1 was built on store2 due to the required feature. set -o pipefail
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh # Ensure that input1 was built on store1 due to the required feature.
nix path-info --store $TEST_ROOT/machine1 --all \
| grep builder-build-remote-input-1.sh \
| grep -v builder-build-remote-input-2.sh \
| grep -v builder-build-remote-input-3.sh
# Ensure that input2 was built on store2 due to the required feature.
nix path-info --store $TEST_ROOT/machine2 --all \
| grep -v builder-build-remote-input-1.sh \
| grep builder-build-remote-input-2.sh \
| grep -v builder-build-remote-input-3.sh
# Ensure that input3 was built on store3 due to the required feature.
nix path-info --store $TEST_ROOT/machine3 --all \
| grep -v builder-build-remote-input-1.sh \
| grep -v builder-build-remote-input-2.sh \
| grep builder-build-remote-input-3.sh

View file

@ -1,5 +1,5 @@
nix_tests = \ nix_tests = \
init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \ hash.sh lang.sh add.sh simple.sh dependencies.sh \
config.sh \ config.sh \
gc.sh \ gc.sh \
gc-concurrent.sh \ gc-concurrent.sh \
@ -14,7 +14,7 @@ nix_tests = \
placeholders.sh nix-shell.sh \ placeholders.sh nix-shell.sh \
linux-sandbox.sh \ linux-sandbox.sh \
build-dry.sh \ build-dry.sh \
build-remote.sh \ build-remote-input-addressed.sh \
nar-access.sh \ nar-access.sh \
structured-attrs.sh \ structured-attrs.sh \
fetchGit.sh \ fetchGit.sh \
@ -34,6 +34,7 @@ nix_tests = \
recursive.sh \ recursive.sh \
flakes.sh flakes.sh
# parallel.sh # parallel.sh
# build-remote-content-addressed-fixed.sh \
install-tests += $(foreach x, $(nix_tests), tests/$(x)) install-tests += $(foreach x, $(nix_tests), tests/$(x))

View file

@ -26,12 +26,24 @@ nix cat-store $storePath/foo/baz > baz.cat-nar
diff -u baz.cat-nar $storePath/foo/baz diff -u baz.cat-nar $storePath/foo/baz
# Test --json. # Test --json.
[[ $(nix ls-nar --json $narFile /) = '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' ]] diff -u \
[[ $(nix ls-nar --json -R $narFile /foo) = '{"type":"directory","entries":{"bar":{"type":"regular","size":0,"narOffset":368},"baz":{"type":"regular","size":0,"narOffset":552},"data":{"type":"regular","size":58,"narOffset":736}}}' ]] <(nix ls-nar --json $narFile / | jq -S) \
[[ $(nix ls-nar --json -R $narFile /foo/bar) = '{"type":"regular","size":0,"narOffset":368}' ]] <(echo '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' | jq -S)
[[ $(nix ls-store --json $storePath) = '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' ]] diff -u \
[[ $(nix ls-store --json -R $storePath/foo) = '{"type":"directory","entries":{"bar":{"type":"regular","size":0},"baz":{"type":"regular","size":0},"data":{"type":"regular","size":58}}}' ]] <(nix ls-nar --json -R $narFile /foo | jq -S) \
[[ $(nix ls-store --json -R $storePath/foo/bar) = '{"type":"regular","size":0}' ]] <(echo '{"type":"directory","entries":{"bar":{"type":"regular","size":0,"narOffset":368},"baz":{"type":"regular","size":0,"narOffset":552},"data":{"type":"regular","size":58,"narOffset":736}}}' | jq -S)
diff -u \
<(nix ls-nar --json -R $narFile /foo/bar | jq -S) \
<(echo '{"type":"regular","size":0,"narOffset":368}' | jq -S)
diff -u \
<(nix ls-store --json $storePath | jq -S) \
<(echo '{"type":"directory","entries":{"foo":{},"foo-x":{},"qux":{},"zyx":{}}}' | jq -S)
diff -u \
<(nix ls-store --json -R $storePath/foo | jq -S) \
<(echo '{"type":"directory","entries":{"bar":{"type":"regular","size":0},"baz":{"type":"regular","size":0},"data":{"type":"regular","size":58}}}' | jq -S)
diff -u \
<(nix ls-store --json -R $storePath/foo/bar| jq -S) \
<(echo '{"type":"regular","size":0}' | jq -S)
# Test missing files. # Test missing files.
nix ls-store --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR' nix ls-store --json -R $storePath/xyzzy 2>&1 | grep 'does not exist in NAR'

View file

@ -2,6 +2,9 @@ source common.sh
clearStore clearStore
# Ensure "fake ssh" remote store works just as legacy fake ssh would.
nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store doctor
startDaemon startDaemon
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh