forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into trustless-remote-builder-simple
This commit is contained in:
commit
9dd28a65c8
|
@ -370,6 +370,33 @@ false</literal>.</para>
|
||||||
|
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
|
||||||
|
<varlistentry xml:id="conf-hashed-mirrors"><term><literal>hashed-mirrors</literal></term>
|
||||||
|
|
||||||
|
<listitem><para>A list of web servers used by
|
||||||
|
<function>builtins.fetchurl</function> to obtain files by hash.
|
||||||
|
Given a hash type <replaceable>ht</replaceable> and a base-16 hash
|
||||||
|
<replaceable>h</replaceable>, Nix will try to download the file
|
||||||
|
from
|
||||||
|
<literal>hashed-mirror/<replaceable>ht</replaceable>/<replaceable>h</replaceable></literal>.
|
||||||
|
This allows files to be downloaded even if they have disappeared
|
||||||
|
from their original URI. For example, given the hashed mirror
|
||||||
|
<literal>http://tarballs.example.com/</literal>, when building the
|
||||||
|
derivation
|
||||||
|
|
||||||
|
<programlisting>
|
||||||
|
builtins.fetchurl {
|
||||||
|
url = "https://example.org/foo-1.2.3.tar.xz";
|
||||||
|
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
|
||||||
|
}
|
||||||
|
</programlisting>
|
||||||
|
|
||||||
|
Nix will attempt to download this file from
|
||||||
|
<literal>http://tarballs.example.com/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae</literal>
|
||||||
|
first. If it is not available there, if will try the original URI.</para></listitem>
|
||||||
|
|
||||||
|
</varlistentry>
|
||||||
|
|
||||||
|
|
||||||
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
||||||
|
|
||||||
<listitem><para>The maximum number of parallel TCP connections
|
<listitem><para>The maximum number of parallel TCP connections
|
||||||
|
|
|
@ -80,7 +80,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
|
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -303,10 +303,10 @@ SV * derivationFromPath(char * drvPath)
|
||||||
hash = newHV();
|
hash = newHV();
|
||||||
|
|
||||||
HV * outputs = newHV();
|
HV * outputs = newHV();
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputsAndPaths(*store()))
|
||||||
hv_store(
|
hv_store(
|
||||||
outputs, i.first.c_str(), i.first.size(),
|
outputs, i.first.c_str(), i.first.size(),
|
||||||
newSVpv(store()->printStorePath(i.second.path(*store(), drv.name)).c_str(), 0),
|
newSVpv(store()->printStorePath(i.second.second).c_str(), 0),
|
||||||
0);
|
0);
|
||||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||||
|
|
||||||
|
|
|
@ -113,9 +113,9 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
|
||||||
state.mkList(*outputsVal, drv.outputs.size());
|
state.mkList(*outputsVal, drv.outputs.size());
|
||||||
unsigned int outputs_index = 0;
|
unsigned int outputs_index = 0;
|
||||||
|
|
||||||
for (const auto & o : drv.outputs) {
|
for (const auto & o : drv.outputsAndPaths(*state.store)) {
|
||||||
v2 = state.allocAttr(w, state.symbols.create(o.first));
|
v2 = state.allocAttr(w, state.symbols.create(o.first));
|
||||||
mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path});
|
mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
|
||||||
outputsVal->listElems()[outputs_index] = state.allocValue();
|
outputsVal->listElems()[outputs_index] = state.allocValue();
|
||||||
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
|
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
|
||||||
}
|
}
|
||||||
|
@ -852,9 +852,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
|
|
||||||
state.mkAttrs(v, 1 + drv.outputs.size());
|
state.mkAttrs(v, 1 + drv.outputs.size());
|
||||||
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
|
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndPaths(*state.store)) {
|
||||||
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
|
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
|
||||||
state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS});
|
state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
|
||||||
}
|
}
|
||||||
v.attrs->sort();
|
v.attrs->sort();
|
||||||
}
|
}
|
||||||
|
|
|
@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
: hashFile(htSHA256, path);
|
: hashFile(htSHA256, path);
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
|
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
|
|
@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||||
tree.actualPath = store->toRealPath(tree.storePath);
|
tree.actualPath = store->toRealPath(tree.storePath);
|
||||||
|
|
||||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||||
|
|
||||||
if (auto prevNarHash = getNarHash()) {
|
if (auto prevNarHash = getNarHash()) {
|
||||||
if (narHash != *prevNarHash)
|
if (narHash != *prevNarHash)
|
||||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
|
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto prevLastModified = getLastModified()) {
|
if (auto prevLastModified = getLastModified()) {
|
||||||
|
|
|
@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(*res.data, sink);
|
||||||
auto hash = hashString(htSHA256, *res.data);
|
auto hash = hashString(htSHA256, *res.data);
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
ValidPathInfo info {
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||||
|
hashString(htSHA256, *sink.s),
|
||||||
|
};
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Flat,
|
.method = FileIngestionMethod::Flat,
|
||||||
|
|
|
@ -143,7 +143,7 @@ struct FileSource : FdSource
|
||||||
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
assert(info.narHash && info.narSize);
|
assert(info.narSize);
|
||||||
|
|
||||||
if (!repair && isValidPath(info.path)) {
|
if (!repair && isValidPath(info.path)) {
|
||||||
// FIXME: copyNAR -> null sink
|
// FIXME: copyNAR -> null sink
|
||||||
|
@ -381,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
h = hashString(hashAlgo, s);
|
h = hashString(hashAlgo, s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
|
ValidPathInfo info {
|
||||||
|
makeFixedOutputPath(method, *h, name),
|
||||||
|
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
|
||||||
|
};
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
|
@ -392,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
||||||
const StorePathSet & references, RepairFlag repair)
|
const StorePathSet & references, RepairFlag repair)
|
||||||
{
|
{
|
||||||
ValidPathInfo info(computeStorePathForText(name, s, references));
|
ValidPathInfo info {
|
||||||
|
computeStorePathForText(name, s, references),
|
||||||
|
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
|
||||||
|
};
|
||||||
info.references = references;
|
info.references = references;
|
||||||
|
|
||||||
if (repair || !isValidPath(info.path)) {
|
if (repair || !isValidPath(info.path)) {
|
||||||
|
|
|
@ -1181,8 +1181,8 @@ void DerivationGoal::haveDerivation()
|
||||||
|
|
||||||
retrySubstitution = false;
|
retrySubstitution = false;
|
||||||
|
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputsAndPaths(worker.store))
|
||||||
worker.store.addTempRoot(i.second.path(worker.store, drv->name));
|
worker.store.addTempRoot(i.second.second);
|
||||||
|
|
||||||
/* Check what outputs paths are not already valid. */
|
/* Check what outputs paths are not already valid. */
|
||||||
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
|
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
|
||||||
|
@ -1288,14 +1288,14 @@ void DerivationGoal::repairClosure()
|
||||||
|
|
||||||
/* Get the output closure. */
|
/* Get the output closure. */
|
||||||
StorePathSet outputClosure;
|
StorePathSet outputClosure;
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
if (!wantOutput(i.first, wantedOutputs)) continue;
|
||||||
worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure);
|
worker.store.computeFSClosure(i.second.second, outputClosure);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Filter out our own outputs (which we have already checked). */
|
/* Filter out our own outputs (which we have already checked). */
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputsAndPaths(worker.store))
|
||||||
outputClosure.erase(i.second.path(worker.store, drv->name));
|
outputClosure.erase(i.second.second);
|
||||||
|
|
||||||
/* Get all dependencies of this derivation so that we know which
|
/* Get all dependencies of this derivation so that we know which
|
||||||
derivation is responsible for which path in the output
|
derivation is responsible for which path in the output
|
||||||
|
@ -1306,8 +1306,8 @@ void DerivationGoal::repairClosure()
|
||||||
for (auto & i : inputClosure)
|
for (auto & i : inputClosure)
|
||||||
if (i.isDerivation()) {
|
if (i.isDerivation()) {
|
||||||
Derivation drv = worker.store.derivationFromPath(i);
|
Derivation drv = worker.store.derivationFromPath(i);
|
||||||
for (auto & j : drv.outputs)
|
for (auto & j : drv.outputsAndPaths(worker.store))
|
||||||
outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i);
|
outputsToDrv.insert_or_assign(j.second.second, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check each path (slow!). */
|
/* Check each path (slow!). */
|
||||||
|
@ -1466,10 +1466,10 @@ void DerivationGoal::tryToBuild()
|
||||||
|
|
||||||
/* If any of the outputs already exist but are not valid, delete
|
/* If any of the outputs already exist but are not valid, delete
|
||||||
them. */
|
them. */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue;
|
if (worker.store.isValidPath(i.second.second)) continue;
|
||||||
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name)));
|
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.second));
|
||||||
deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name)));
|
deletePath(worker.store.Store::toRealPath(i.second.second));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Don't do a remote build if the derivation has the attribute
|
/* Don't do a remote build if the derivation has the attribute
|
||||||
|
@ -1919,8 +1919,8 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
||||||
for (auto & j : paths2) {
|
for (auto & j : paths2) {
|
||||||
if (j.isDerivation()) {
|
if (j.isDerivation()) {
|
||||||
Derivation drv = worker.store.derivationFromPath(j);
|
Derivation drv = worker.store.derivationFromPath(j);
|
||||||
for (auto & k : drv.outputs)
|
for (auto & k : drv.outputsAndPaths(worker.store))
|
||||||
worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths);
|
worker.store.computeFSClosure(k.second.second, paths);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2014,8 +2014,8 @@ void DerivationGoal::startBuilder()
|
||||||
chownToBuilder(tmpDir);
|
chownToBuilder(tmpDir);
|
||||||
|
|
||||||
/* Substitute output placeholders with the actual output paths. */
|
/* Substitute output placeholders with the actual output paths. */
|
||||||
for (auto & output : drv->outputs)
|
for (auto & output : drv->outputsAndPaths(worker.store))
|
||||||
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name));
|
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.second);
|
||||||
|
|
||||||
/* Construct the environment passed to the builder. */
|
/* Construct the environment passed to the builder. */
|
||||||
initEnv();
|
initEnv();
|
||||||
|
@ -2199,8 +2199,8 @@ void DerivationGoal::startBuilder()
|
||||||
rebuilding a path that is in settings.dirsInChroot
|
rebuilding a path that is in settings.dirsInChroot
|
||||||
(typically the dependencies of /bin/sh). Throw them
|
(typically the dependencies of /bin/sh). Throw them
|
||||||
out. */
|
out. */
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputsAndPaths(worker.store))
|
||||||
dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name)));
|
dirsInChroot.erase(worker.store.printStorePath(i.second.second));
|
||||||
|
|
||||||
#elif __APPLE__
|
#elif __APPLE__
|
||||||
/* We don't really have any parent prep work to do (yet?)
|
/* We don't really have any parent prep work to do (yet?)
|
||||||
|
@ -2612,8 +2612,8 @@ void DerivationGoal::writeStructuredAttrs()
|
||||||
|
|
||||||
/* Add an "outputs" object containing the output paths. */
|
/* Add an "outputs" object containing the output paths. */
|
||||||
nlohmann::json outputs;
|
nlohmann::json outputs;
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputsAndPaths(worker.store))
|
||||||
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites);
|
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.second), inputRewrites);
|
||||||
json["outputs"] = outputs;
|
json["outputs"] = outputs;
|
||||||
|
|
||||||
/* Handle exportReferencesGraph. */
|
/* Handle exportReferencesGraph. */
|
||||||
|
@ -2815,9 +2815,9 @@ struct RestrictedStore : public LocalFSStore
|
||||||
if (!goal.isAllowed(path.path))
|
if (!goal.isAllowed(path.path))
|
||||||
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
||||||
auto drv = derivationFromPath(path.path);
|
auto drv = derivationFromPath(path.path);
|
||||||
for (auto & output : drv.outputs)
|
for (auto & output : drv.outputsAndPaths(*this))
|
||||||
if (wantOutput(output.first, path.outputs))
|
if (wantOutput(output.first, path.outputs))
|
||||||
newPaths.insert(output.second.path(*this, drv.name));
|
newPaths.insert(output.second.second);
|
||||||
} else if (!goal.isAllowed(path.path))
|
} else if (!goal.isAllowed(path.path))
|
||||||
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
||||||
}
|
}
|
||||||
|
@ -3617,8 +3617,8 @@ void DerivationGoal::registerOutputs()
|
||||||
to do anything here. */
|
to do anything here. */
|
||||||
if (hook) {
|
if (hook) {
|
||||||
bool allValid = true;
|
bool allValid = true;
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputsAndPaths(worker.store))
|
||||||
if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false;
|
if (!worker.store.isValidPath(i.second.second)) allValid = false;
|
||||||
if (allValid) return;
|
if (allValid) return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3639,23 +3639,23 @@ void DerivationGoal::registerOutputs()
|
||||||
Nix calls. */
|
Nix calls. */
|
||||||
StorePathSet referenceablePaths;
|
StorePathSet referenceablePaths;
|
||||||
for (auto & p : inputPaths) referenceablePaths.insert(p);
|
for (auto & p : inputPaths) referenceablePaths.insert(p);
|
||||||
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name));
|
for (auto & i : drv->outputsAndPaths(worker.store)) referenceablePaths.insert(i.second.second);
|
||||||
for (auto & p : addedPaths) referenceablePaths.insert(p);
|
for (auto & p : addedPaths) referenceablePaths.insert(p);
|
||||||
|
|
||||||
/* Check whether the output paths were created, and grep each
|
/* Check whether the output paths were created, and grep each
|
||||||
output path to determine what other paths it references. Also make all
|
output path to determine what other paths it references. Also make all
|
||||||
output paths read-only. */
|
output paths read-only. */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
|
auto path = worker.store.printStorePath(i.second.second);
|
||||||
if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue;
|
if (!missingPaths.count(i.second.second)) continue;
|
||||||
|
|
||||||
Path actualPath = path;
|
Path actualPath = path;
|
||||||
if (needsHashRewrite()) {
|
if (needsHashRewrite()) {
|
||||||
auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name));
|
auto r = redirectedOutputs.find(i.second.second);
|
||||||
if (r != redirectedOutputs.end()) {
|
if (r != redirectedOutputs.end()) {
|
||||||
auto redirected = worker.store.Store::toRealPath(r->second);
|
auto redirected = worker.store.Store::toRealPath(r->second);
|
||||||
if (buildMode == bmRepair
|
if (buildMode == bmRepair
|
||||||
&& redirectedBadOutputs.count(i.second.path(worker.store, drv->name))
|
&& redirectedBadOutputs.count(i.second.second)
|
||||||
&& pathExists(redirected))
|
&& pathExists(redirected))
|
||||||
replaceValidPath(path, redirected);
|
replaceValidPath(path, redirected);
|
||||||
if (buildMode == bmCheck)
|
if (buildMode == bmCheck)
|
||||||
|
@ -3722,7 +3722,7 @@ void DerivationGoal::registerOutputs()
|
||||||
hash). */
|
hash). */
|
||||||
std::optional<ContentAddress> ca;
|
std::optional<ContentAddress> ca;
|
||||||
|
|
||||||
if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.output)) {
|
if (! std::holds_alternative<DerivationOutputInputAddressed>(i.second.first.output)) {
|
||||||
DerivationOutputCAFloating outputHash;
|
DerivationOutputCAFloating outputHash;
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](DerivationOutputInputAddressed doi) {
|
[&](DerivationOutputInputAddressed doi) {
|
||||||
|
@ -3737,7 +3737,7 @@ void DerivationGoal::registerOutputs()
|
||||||
[&](DerivationOutputCAFloating dof) {
|
[&](DerivationOutputCAFloating dof) {
|
||||||
outputHash = dof;
|
outputHash = dof;
|
||||||
},
|
},
|
||||||
}, i.second.output);
|
}, i.second.first.output);
|
||||||
|
|
||||||
if (outputHash.method == FileIngestionMethod::Flat) {
|
if (outputHash.method == FileIngestionMethod::Flat) {
|
||||||
/* The output path should be a regular file without execute permission. */
|
/* The output path should be a regular file without execute permission. */
|
||||||
|
@ -3754,12 +3754,12 @@ void DerivationGoal::registerOutputs()
|
||||||
? hashPath(outputHash.hashType, actualPath).first
|
? hashPath(outputHash.hashType, actualPath).first
|
||||||
: hashFile(outputHash.hashType, actualPath);
|
: hashFile(outputHash.hashType, actualPath);
|
||||||
|
|
||||||
auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name());
|
auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.second.name());
|
||||||
|
|
||||||
// true if either floating CA, or incorrect fixed hash.
|
// true if either floating CA, or incorrect fixed hash.
|
||||||
bool needsMove = true;
|
bool needsMove = true;
|
||||||
|
|
||||||
if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.output)) {
|
if (auto p = std::get_if<DerivationOutputCAFixed>(& i.second.first.output)) {
|
||||||
Hash & h = p->hash.hash;
|
Hash & h = p->hash.hash;
|
||||||
if (h != h2) {
|
if (h != h2) {
|
||||||
|
|
||||||
|
@ -3865,8 +3865,10 @@ void DerivationGoal::registerOutputs()
|
||||||
worker.markContentsGood(worker.store.parseStorePath(path));
|
worker.markContentsGood(worker.store.parseStorePath(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(worker.store.parseStorePath(path));
|
ValidPathInfo info {
|
||||||
info.narHash = hash.first;
|
worker.store.parseStorePath(path),
|
||||||
|
hash.first,
|
||||||
|
};
|
||||||
info.narSize = hash.second;
|
info.narSize = hash.second;
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
info.deriver = drvPath;
|
info.deriver = drvPath;
|
||||||
|
@ -3922,8 +3924,8 @@ void DerivationGoal::registerOutputs()
|
||||||
|
|
||||||
/* If this is the first round of several, then move the output out of the way. */
|
/* If this is the first round of several, then move the output out of the way. */
|
||||||
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
|
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
|
auto path = worker.store.printStorePath(i.second.second);
|
||||||
Path prev = path + checkSuffix;
|
Path prev = path + checkSuffix;
|
||||||
deletePath(prev);
|
deletePath(prev);
|
||||||
Path dst = path + checkSuffix;
|
Path dst = path + checkSuffix;
|
||||||
|
@ -3940,8 +3942,8 @@ void DerivationGoal::registerOutputs()
|
||||||
/* Remove the .check directories if we're done. FIXME: keep them
|
/* Remove the .check directories if we're done. FIXME: keep them
|
||||||
if the result was not determistic? */
|
if the result was not determistic? */
|
||||||
if (curRound == nrRounds) {
|
if (curRound == nrRounds) {
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix;
|
Path prev = worker.store.printStorePath(i.second.second) + checkSuffix;
|
||||||
deletePath(prev);
|
deletePath(prev);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4239,12 +4241,12 @@ void DerivationGoal::flushLine()
|
||||||
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
|
StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
|
||||||
{
|
{
|
||||||
StorePathSet result;
|
StorePathSet result;
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndPaths(worker.store)) {
|
||||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
if (!wantOutput(i.first, wantedOutputs)) continue;
|
||||||
bool good =
|
bool good =
|
||||||
worker.store.isValidPath(i.second.path(worker.store, drv->name)) &&
|
worker.store.isValidPath(i.second.second) &&
|
||||||
(!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name)));
|
(!checkHash || worker.pathContentsGood(i.second.second));
|
||||||
if (good == returnValid) result.insert(i.second.path(worker.store, drv->name));
|
if (good == returnValid) result.insert(i.second.second);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -5071,7 +5073,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
if (!pathExists(store.printStorePath(path)))
|
if (!pathExists(store.printStorePath(path)))
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
|
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(htSHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* Try the hashed mirrors first. */
|
||||||
|
if (getAttr("outputHashMode") == "flat")
|
||||||
|
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||||
|
try {
|
||||||
|
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||||
|
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
||||||
|
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||||
|
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
|
||||||
|
return;
|
||||||
|
} catch (Error & e) {
|
||||||
|
debug(e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Otherwise try the specified URL. */
|
||||||
fetch(mainUrl);
|
fetch(mainUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto hash = store->queryPathInfo(path)->narHash;
|
auto hash = store->queryPathInfo(path)->narHash;
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << hash->to_string(Base16, false);
|
to << hash.to_string(Base16, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -676,7 +676,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||||
to << 1;
|
to << 1;
|
||||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||||
<< info->narHash->to_string(Base16, false);
|
<< info->narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*store, to, info->references);
|
writeStorePaths(*store, to, info->references);
|
||||||
to << info->registrationTime << info->narSize;
|
to << info->registrationTime << info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
|
@ -732,11 +732,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
|
|
||||||
case wopAddToStoreNar: {
|
case wopAddToStoreNar: {
|
||||||
bool repair, dontCheckSigs;
|
bool repair, dontCheckSigs;
|
||||||
ValidPathInfo info(store->parseStorePath(readString(from)));
|
auto path = store->parseStorePath(readString(from));
|
||||||
auto deriver = readString(from);
|
auto deriver = readString(from);
|
||||||
|
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
||||||
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash::parseAny(readString(from), htSHA256);
|
|
||||||
info.references = readStorePaths<StorePathSet>(*store, from);
|
info.references = readStorePaths<StorePathSet>(*store, from);
|
||||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(from);
|
info.sigs = readStrings<StringSet>(from);
|
||||||
|
|
|
@ -480,12 +480,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
||||||
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
|
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
|
||||||
case DerivationType::CAFixed: {
|
case DerivationType::CAFixed: {
|
||||||
std::map<std::string, Hash> outputHashes;
|
std::map<std::string, Hash> outputHashes;
|
||||||
for (const auto & i : drv.outputs) {
|
for (const auto & i : drv.outputsAndPaths(store)) {
|
||||||
auto & dof = std::get<DerivationOutputCAFixed>(i.second.output);
|
auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
|
||||||
auto hash = hashString(htSHA256, "fixed:out:"
|
auto hash = hashString(htSHA256, "fixed:out:"
|
||||||
+ dof.hash.printMethodAlgo() + ":"
|
+ dof.hash.printMethodAlgo() + ":"
|
||||||
+ dof.hash.hash.to_string(Base16, false) + ":"
|
+ dof.hash.hash.to_string(Base16, false) + ":"
|
||||||
+ store.printStorePath(i.second.path(store, drv.name)));
|
+ store.printStorePath(i.second.second));
|
||||||
outputHashes.insert_or_assign(i.first, std::move(hash));
|
outputHashes.insert_or_assign(i.first, std::move(hash));
|
||||||
}
|
}
|
||||||
return outputHashes;
|
return outputHashes;
|
||||||
|
@ -539,8 +539,8 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
|
||||||
StorePathSet BasicDerivation::outputPaths(const Store & store) const
|
StorePathSet BasicDerivation::outputPaths(const Store & store) const
|
||||||
{
|
{
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (auto & i : outputs)
|
for (auto & i : outputsAndPaths(store))
|
||||||
paths.insert(i.second.path(store, name));
|
paths.insert(i.second.second);
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -561,6 +561,27 @@ StringSet BasicDerivation::outputNames() const
|
||||||
return names;
|
return names;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
|
||||||
|
DerivationOutputsAndPaths outsAndPaths;
|
||||||
|
for (auto output : outputs)
|
||||||
|
outsAndPaths.insert(std::make_pair(
|
||||||
|
output.first,
|
||||||
|
std::make_pair(output.second, output.second.path(store, name))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return outsAndPaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
|
||||||
|
DerivationOutputsAndOptPaths outsAndOptPaths;
|
||||||
|
for (auto output : outputs)
|
||||||
|
outsAndOptPaths.insert(std::make_pair(
|
||||||
|
output.first,
|
||||||
|
std::make_pair(output.second, output.second.pathOpt(store, output.first))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return outsAndOptPaths;
|
||||||
|
}
|
||||||
|
|
||||||
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
||||||
auto nameWithSuffix = drvPath.name();
|
auto nameWithSuffix = drvPath.name();
|
||||||
|
@ -601,9 +622,9 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
|
||||||
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
|
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
|
||||||
{
|
{
|
||||||
out << drv.outputs.size();
|
out << drv.outputs.size();
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndPaths(store)) {
|
||||||
out << i.first
|
out << i.first
|
||||||
<< store.printStorePath(i.second.path(store, drv.name));
|
<< store.printStorePath(i.second.second);
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](DerivationOutputInputAddressed doi) {
|
[&](DerivationOutputInputAddressed doi) {
|
||||||
out << "" << "";
|
out << "" << "";
|
||||||
|
@ -616,7 +637,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
|
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
|
||||||
<< "";
|
<< "";
|
||||||
},
|
},
|
||||||
}, i.second.output);
|
}, i.second.first.output);
|
||||||
}
|
}
|
||||||
writeStorePaths(store, out, drv.inputSrcs);
|
writeStorePaths(store, out, drv.inputSrcs);
|
||||||
out << drv.platform << drv.builder << drv.args;
|
out << drv.platform << drv.builder << drv.args;
|
||||||
|
|
|
@ -47,6 +47,9 @@ struct DerivationOutput
|
||||||
DerivationOutputCAFloating
|
DerivationOutputCAFloating
|
||||||
> output;
|
> output;
|
||||||
std::optional<HashType> hashAlgoOpt(const Store & store) const;
|
std::optional<HashType> hashAlgoOpt(const Store & store) const;
|
||||||
|
/* Note, when you use this function you should make sure that you're passing
|
||||||
|
the right derivation name. When in doubt, you should use the safer
|
||||||
|
interface provided by BasicDerivation::outputsAndPaths */
|
||||||
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
|
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
|
||||||
/* DEPRECATED: Remove after CA drvs are fully implemented */
|
/* DEPRECATED: Remove after CA drvs are fully implemented */
|
||||||
StorePath path(const Store & store, std::string_view drvName) const {
|
StorePath path(const Store & store, std::string_view drvName) const {
|
||||||
|
@ -58,6 +61,15 @@ struct DerivationOutput
|
||||||
|
|
||||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||||
|
|
||||||
|
/* These are analogues to the previous DerivationOutputs data type, but they
|
||||||
|
also contains, for each output, the (optional) store path in which it would
|
||||||
|
be written. To calculate values of these types, see the corresponding
|
||||||
|
functions in BasicDerivation */
|
||||||
|
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
|
||||||
|
DerivationOutputsAndPaths;
|
||||||
|
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||||
|
DerivationOutputsAndOptPaths;
|
||||||
|
|
||||||
/* For inputs that are sub-derivations, we specify exactly which
|
/* For inputs that are sub-derivations, we specify exactly which
|
||||||
output IDs we are interested in. */
|
output IDs we are interested in. */
|
||||||
typedef std::map<StorePath, StringSet> DerivationInputs;
|
typedef std::map<StorePath, StringSet> DerivationInputs;
|
||||||
|
@ -107,6 +119,13 @@ struct BasicDerivation
|
||||||
/* Return the output names of a derivation. */
|
/* Return the output names of a derivation. */
|
||||||
StringSet outputNames() const;
|
StringSet outputNames() const;
|
||||||
|
|
||||||
|
/* Calculates the maps that contains all the DerivationOutputs, but
|
||||||
|
augmented with knowledge of the Store paths they would be written into.
|
||||||
|
The first one of these functions will be removed when the CA work is
|
||||||
|
completed */
|
||||||
|
DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
|
||||||
|
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
|
||||||
|
|
||||||
static std::string_view nameFromPath(const StorePath & storePath);
|
static std::string_view nameFromPath(const StorePath & storePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashSink.currentHash().first;
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
|
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
|
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
if (magic != exportMagic)
|
if (magic != exportMagic)
|
||||||
throw Error("Nix archive cannot be imported; wrong format");
|
throw Error("Nix archive cannot be imported; wrong format");
|
||||||
|
|
||||||
ValidPathInfo info(parseStorePath(readString(source)));
|
auto path = parseStorePath(readString(source));
|
||||||
|
|
||||||
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
|
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
|
||||||
|
|
||||||
info.references = readStorePaths<StorePathSet>(*this, source);
|
auto references = readStorePaths<StorePathSet>(*this, source);
|
||||||
|
|
||||||
auto deriver = readString(source);
|
auto deriver = readString(source);
|
||||||
|
auto narHash = hashString(htSHA256, *saved.s);
|
||||||
|
|
||||||
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
|
info.references = references;
|
||||||
info.narHash = hashString(htSHA256, *saved.s);
|
|
||||||
info.narSize = saved.s->size();
|
info.narSize = saved.s->size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
|
|
|
@ -335,6 +335,9 @@ public:
|
||||||
"setuid/setgid bits or with file capabilities."};
|
"setuid/setgid bits or with file capabilities."};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
Setting<Strings> hashedMirrors{this, {}, "hashed-mirrors",
|
||||||
|
"A list of servers used by builtins.fetchurl to fetch files by hash."};
|
||||||
|
|
||||||
Setting<uint64_t> minFree{this, 0, "min-free",
|
Setting<uint64_t> minFree{this, 0, "min-free",
|
||||||
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
|
||||||
try {
|
try {
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
|
/* No longer support missing NAR hash */
|
||||||
|
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||||
|
|
||||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||||
|
|
||||||
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
|
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
|
||||||
|
@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
|
||||||
|
|
||||||
auto p = readString(conn->from);
|
auto p = readString(conn->from);
|
||||||
if (p.empty()) return callback(nullptr);
|
if (p.empty()) return callback(nullptr);
|
||||||
auto info = std::make_shared<ValidPathInfo>(parseStorePath(p));
|
auto path2 = parseStorePath(p);
|
||||||
assert(path == info->path);
|
assert(path == path2);
|
||||||
|
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
|
||||||
|
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
|
||||||
|
|
||||||
PathSet references;
|
PathSet references;
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
|
@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
|
||||||
readLongLong(conn->from); // download size
|
readLongLong(conn->from); // download size
|
||||||
info->narSize = readLongLong(conn->from);
|
info->narSize = readLongLong(conn->from);
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
{
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
info->narHash = s.empty() ? std::optional<Hash>{} : Hash::parseAnyPrefixed(s);
|
if (s == "")
|
||||||
|
throw Error("NAR hash is now mandatory");
|
||||||
|
info->narHash = Hash::parseAnyPrefixed(s);
|
||||||
|
}
|
||||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
}
|
|
||||||
|
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
assert(s == "");
|
assert(s == "");
|
||||||
|
@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdAddToStoreNar
|
<< cmdAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash->to_string(Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
|
|
|
@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
|
|
||||||
state.stmtRegisterValidPath.use()
|
state.stmtRegisterValidPath.use()
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
(info.narHash->to_string(Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
|
@ -618,11 +618,11 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
registration above is undone. */
|
registration above is undone. */
|
||||||
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
||||||
|
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndPaths(*this)) {
|
||||||
state.stmtAddDerivationOutput.use()
|
state.stmtAddDerivationOutput.use()
|
||||||
(id)
|
(id)
|
||||||
(i.first)
|
(i.first)
|
||||||
(printStorePath(i.second.path(*this, drv.name)))
|
(printStorePath(i.second.second))
|
||||||
.exec();
|
.exec();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -641,25 +641,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
|
||||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
auto info = std::make_shared<ValidPathInfo>(path);
|
|
||||||
|
|
||||||
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
|
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
/* Get the path info. */
|
/* Get the path info. */
|
||||||
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path)));
|
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
|
||||||
|
|
||||||
if (!useQueryPathInfo.next())
|
if (!useQueryPathInfo.next())
|
||||||
return std::shared_ptr<ValidPathInfo>();
|
return std::shared_ptr<ValidPathInfo>();
|
||||||
|
|
||||||
info->id = useQueryPathInfo.getInt(0);
|
auto id = useQueryPathInfo.getInt(0);
|
||||||
|
|
||||||
|
auto narHash = Hash::dummy;
|
||||||
try {
|
try {
|
||||||
info->narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
||||||
} catch (BadHash & e) {
|
} catch (BadHash & e) {
|
||||||
throw Error("in valid-path entry for '%s': %s", printStorePath(path), e.what());
|
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto info = std::make_shared<ValidPathInfo>(path, narHash);
|
||||||
|
|
||||||
|
info->id = id;
|
||||||
|
|
||||||
info->registrationTime = useQueryPathInfo.getInt(2);
|
info->registrationTime = useQueryPathInfo.getInt(2);
|
||||||
|
|
||||||
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
|
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
|
||||||
|
@ -694,7 +697,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
state.stmtUpdatePathInfo.use()
|
state.stmtUpdatePathInfo.use()
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.narHash->to_string(Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(renderContentAddress(info.ca), (bool) info.ca)
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
|
@ -920,7 +923,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
assert(i.narHash && i.narHash->type == htSHA256);
|
assert(i.narHash.type == htSHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -984,9 +987,6 @@ const PublicKeys & LocalStore::getPublicKeys()
|
||||||
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
if (!info.narHash)
|
|
||||||
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
|
|
||||||
|
|
||||||
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
|
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
|
||||||
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
||||||
|
|
||||||
|
@ -1029,7 +1029,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
|
@ -1151,8 +1151,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info { dstPath, narHash.first };
|
||||||
info.narHash = narHash.first;
|
|
||||||
info.narSize = narHash.second;
|
info.narSize = narHash.second;
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
|
@ -1195,8 +1194,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info { dstPath, narHash };
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.ca = TextHash { .hash = hash };
|
info.ca = TextHash { .hash = hash };
|
||||||
|
@ -1311,9 +1309,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca || !info->references.count(info->path))
|
if (!info->ca || !info->references.count(info->path))
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), *hashSink);
|
dumpPath(Store::toRealPath(i), *hashSink);
|
||||||
auto current = hashSink->finish();
|
auto current = hashSink->finish();
|
||||||
|
@ -1322,7 +1320,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash - path modified",
|
.name = "Invalid hash - path modified",
|
||||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
|
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -207,10 +207,10 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
|
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
|
||||||
|
|
||||||
PathSet invalid;
|
PathSet invalid;
|
||||||
for (auto & j : drv->outputs)
|
for (auto & j : drv->outputsAndPaths(*this))
|
||||||
if (wantOutput(j.first, path.outputs)
|
if (wantOutput(j.first, path.outputs)
|
||||||
&& !isValidPath(j.second.path(*this, drv->name)))
|
&& !isValidPath(j.second.second))
|
||||||
invalid.insert(printStorePath(j.second.path(*this, drv->name)));
|
invalid.insert(printStorePath(j.second.second));
|
||||||
if (invalid.empty()) return;
|
if (invalid.empty()) return;
|
||||||
|
|
||||||
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
|
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
|
||||||
|
|
|
@ -189,13 +189,14 @@ public:
|
||||||
return {oInvalid, 0};
|
return {oInvalid, 0};
|
||||||
|
|
||||||
auto namePart = queryNAR.getStr(1);
|
auto namePart = queryNAR.getStr(1);
|
||||||
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart));
|
auto narInfo = make_ref<NarInfo>(
|
||||||
|
StorePath(hashPart + "-" + namePart),
|
||||||
|
Hash::parseAnyPrefixed(queryNAR.getStr(6)));
|
||||||
narInfo->url = queryNAR.getStr(2);
|
narInfo->url = queryNAR.getStr(2);
|
||||||
narInfo->compression = queryNAR.getStr(3);
|
narInfo->compression = queryNAR.getStr(3);
|
||||||
if (!queryNAR.isNull(4))
|
if (!queryNAR.isNull(4))
|
||||||
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
|
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
|
||||||
narInfo->fileSize = queryNAR.getInt(5);
|
narInfo->fileSize = queryNAR.getInt(5);
|
||||||
narInfo->narHash = Hash::parseAnyPrefixed(queryNAR.getStr(6));
|
|
||||||
narInfo->narSize = queryNAR.getInt(7);
|
narInfo->narSize = queryNAR.getInt(7);
|
||||||
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
|
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
|
||||||
narInfo->references.insert(StorePath(r));
|
narInfo->references.insert(StorePath(r));
|
||||||
|
@ -232,7 +233,7 @@ public:
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash->to_string(Base32, true))
|
(info->narHash.to_string(Base32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "nar-info.hh"
|
#include "nar-info.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||||
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||||
{
|
{
|
||||||
auto corrupt = [&]() {
|
auto corrupt = [&]() {
|
||||||
return Error("NAR info file '%1%' is corrupt", whence);
|
return Error("NAR info file '%1%' is corrupt", whence);
|
||||||
|
@ -19,6 +20,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
};
|
};
|
||||||
|
|
||||||
bool havePath = false;
|
bool havePath = false;
|
||||||
|
bool haveNarHash = false;
|
||||||
|
|
||||||
size_t pos = 0;
|
size_t pos = 0;
|
||||||
while (pos < s.size()) {
|
while (pos < s.size()) {
|
||||||
|
@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "FileSize") {
|
else if (name == "FileSize") {
|
||||||
if (!string2Int(value, fileSize)) throw corrupt();
|
if (!string2Int(value, fileSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "NarHash")
|
else if (name == "NarHash") {
|
||||||
narHash = parseHashField(value);
|
narHash = parseHashField(value);
|
||||||
|
haveNarHash = true;
|
||||||
|
}
|
||||||
else if (name == "NarSize") {
|
else if (name == "NarSize") {
|
||||||
if (!string2Int(value, narSize)) throw corrupt();
|
if (!string2Int(value, narSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
|
@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
|
|
||||||
if (compression == "") compression = "bzip2";
|
if (compression == "") compression = "bzip2";
|
||||||
|
|
||||||
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
|
if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string NarInfo::to_string(const Store & store) const
|
std::string NarInfo::to_string(const Store & store) const
|
||||||
|
@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
assert(fileHash && fileHash->type == htSHA256);
|
assert(fileHash && fileHash->type == htSHA256);
|
||||||
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash && narHash->type == htSHA256);
|
assert(narHash.type == htSHA256);
|
||||||
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
|
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -2,10 +2,12 @@
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "store-api.hh"
|
#include "path-info.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
struct NarInfo : ValidPathInfo
|
struct NarInfo : ValidPathInfo
|
||||||
{
|
{
|
||||||
std::string url;
|
std::string url;
|
||||||
|
@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
|
||||||
std::string system;
|
std::string system;
|
||||||
|
|
||||||
NarInfo() = delete;
|
NarInfo() = delete;
|
||||||
NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { }
|
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
||||||
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
||||||
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "crypto.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
|
@ -29,7 +30,7 @@ struct ValidPathInfo
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<StorePath> deriver;
|
std::optional<StorePath> deriver;
|
||||||
// TODO document this
|
// TODO document this
|
||||||
std::optional<Hash> narHash;
|
Hash narHash;
|
||||||
StorePathSet references;
|
StorePathSet references;
|
||||||
time_t registrationTime = 0;
|
time_t registrationTime = 0;
|
||||||
uint64_t narSize = 0; // 0 = unknown
|
uint64_t narSize = 0; // 0 = unknown
|
||||||
|
@ -100,8 +101,8 @@ struct ValidPathInfo
|
||||||
|
|
||||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||||
|
|
||||||
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
|
ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
|
||||||
ValidPathInfo(const StorePath & path) : path(path) { };
|
ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
|
||||||
|
|
||||||
virtual ~ValidPathInfo() { }
|
virtual ~ValidPathInfo() { }
|
||||||
};
|
};
|
||||||
|
|
|
@ -419,10 +419,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||||
bool valid; conn->from >> valid;
|
bool valid; conn->from >> valid;
|
||||||
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||||
}
|
}
|
||||||
info = std::make_shared<ValidPathInfo>(StorePath(path));
|
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
|
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
|
||||||
|
info = std::make_shared<ValidPathInfo>(path, narHash);
|
||||||
if (deriver != "") info->deriver = parseStorePath(deriver);
|
if (deriver != "") info->deriver = parseStorePath(deriver);
|
||||||
info->narHash = Hash::parseAny(readString(conn->from), htSHA256);
|
|
||||||
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
||||||
conn->from >> info->registrationTime >> info->narSize;
|
conn->from >> info->registrationTime >> info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||||
|
@ -521,7 +521,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
conn->to << wopAddToStoreNar
|
conn->to << wopAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash->to_string(Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||||
|
|
|
@ -320,8 +320,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
if (expectedCAHash && expectedCAHash != hash)
|
if (expectedCAHash && expectedCAHash != hash)
|
||||||
throw Error("hash mismatch for '%s'", srcPath);
|
throw Error("hash mismatch for '%s'", srcPath);
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, hash, name));
|
ValidPathInfo info {
|
||||||
info.narHash = narHash;
|
makeFixedOutputPath(method, hash, name),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.narSize = narSize;
|
info.narSize = narSize;
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
|
|
||||||
|
@ -565,7 +567,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
auto info = queryPathInfo(i);
|
auto info = queryPathInfo(i);
|
||||||
|
|
||||||
if (showHash) {
|
if (showHash) {
|
||||||
s += info->narHash->to_string(Base16, false) + "\n";
|
s += info->narHash.to_string(Base16, false) + "\n";
|
||||||
s += (format("%1%\n") % info->narSize).str();
|
s += (format("%1%\n") % info->narSize).str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -597,7 +599,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
auto info = queryPathInfo(storePath);
|
auto info = queryPathInfo(storePath);
|
||||||
|
|
||||||
jsonPath
|
jsonPath
|
||||||
.attr("narHash", info->narHash->to_string(hashBase, true))
|
.attr("narHash", info->narHash.to_string(hashBase, true))
|
||||||
.attr("narSize", info->narSize);
|
.attr("narSize", info->narSize);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -725,20 +727,6 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
info = info2;
|
info = info2;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!info->narHash) {
|
|
||||||
StringSink sink;
|
|
||||||
srcStore->narFromPath({storePath}, sink);
|
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
|
||||||
info2->narHash = hashString(htSHA256, *sink.s);
|
|
||||||
if (!info->narSize) info2->narSize = sink.s->size();
|
|
||||||
if (info->ultimate) info2->ultimate = false;
|
|
||||||
info = info2;
|
|
||||||
|
|
||||||
StringSource source(*sink.s);
|
|
||||||
dstStore->addToStore(*info, source, repair, checkSigs);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (info->ultimate) {
|
if (info->ultimate) {
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||||
info2->ultimate = false;
|
info2->ultimate = false;
|
||||||
|
@ -879,19 +867,22 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven)
|
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
|
||||||
{
|
{
|
||||||
std::string path;
|
std::string path;
|
||||||
getline(str, path);
|
getline(str, path);
|
||||||
if (str.eof()) { return {}; }
|
if (str.eof()) { return {}; }
|
||||||
ValidPathInfo info(store.parseStorePath(path));
|
if (!hashGiven) {
|
||||||
if (hashGiven) {
|
|
||||||
string s;
|
string s;
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
info.narHash = Hash::parseAny(s, htSHA256);
|
auto narHash = Hash::parseAny(s, htSHA256);
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
if (!string2Int(s, info.narSize)) throw Error("number expected");
|
uint64_t narSize;
|
||||||
|
if (!string2Int(s, narSize)) throw Error("number expected");
|
||||||
|
hashGiven = { narHash, narSize };
|
||||||
}
|
}
|
||||||
|
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
|
||||||
|
info.narSize = hashGiven->second;
|
||||||
std::string deriver;
|
std::string deriver;
|
||||||
getline(str, deriver);
|
getline(str, deriver);
|
||||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||||
|
@ -926,12 +917,12 @@ string showPaths(const PathSet & paths)
|
||||||
|
|
||||||
std::string ValidPathInfo::fingerprint(const Store & store) const
|
std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
{
|
{
|
||||||
if (narSize == 0 || !narHash)
|
if (narSize == 0)
|
||||||
throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known",
|
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash->to_string(Base32, true) + ";"
|
+ narHash.to_string(Base32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
#include "serialise.hh"
|
#include "serialise.hh"
|
||||||
#include "crypto.hh"
|
|
||||||
#include "lru-cache.hh"
|
#include "lru-cache.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -767,7 +766,7 @@ string showPaths(const PathSet & paths);
|
||||||
std::optional<ValidPathInfo> decodeValidPathInfo(
|
std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
const Store & store,
|
const Store & store,
|
||||||
std::istream & str,
|
std::istream & str,
|
||||||
bool hashGiven = false);
|
std::optional<HashResult> hashGiven = std::nullopt);
|
||||||
|
|
||||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||||
|
|
|
@ -136,6 +136,8 @@ std::string Hash::to_string(Base base, bool includeType) const
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Hash Hash::dummy(htSHA256);
|
||||||
|
|
||||||
Hash Hash::parseSRI(std::string_view original) {
|
Hash Hash::parseSRI(std::string_view original) {
|
||||||
auto rest = original;
|
auto rest = original;
|
||||||
|
|
||||||
|
|
|
@ -59,9 +59,6 @@ private:
|
||||||
Hash(std::string_view s, HashType type, bool isSRI);
|
Hash(std::string_view s, HashType type, bool isSRI);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/* Check whether a hash is set. */
|
|
||||||
operator bool () const { return (bool) type; }
|
|
||||||
|
|
||||||
/* Check whether two hash are equal. */
|
/* Check whether two hash are equal. */
|
||||||
bool operator == (const Hash & h2) const;
|
bool operator == (const Hash & h2) const;
|
||||||
|
|
||||||
|
@ -105,6 +102,8 @@ public:
|
||||||
assert(type == htSHA1);
|
assert(type == htSHA1);
|
||||||
return std::string(to_string(Base16, false), 0, 7);
|
return std::string(to_string(Base16, false), 0, 7);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Hash dummy;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Helper that defaults empty hashes to the 0 hash. */
|
/* Helper that defaults empty hashes to the 0 hash. */
|
||||||
|
|
|
@ -218,8 +218,8 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
|
||||||
if (useOutput && storePath.isDerivation()) {
|
if (useOutput && storePath.isDerivation()) {
|
||||||
auto drv = store->derivationFromPath(storePath);
|
auto drv = store->derivationFromPath(storePath);
|
||||||
StorePathSet outputs;
|
StorePathSet outputs;
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputsAndPaths(*store))
|
||||||
outputs.insert(i.second.path(*store, drv.name));
|
outputs.insert(i.second.second);
|
||||||
return outputs;
|
return outputs;
|
||||||
}
|
}
|
||||||
else return {storePath};
|
else return {storePath};
|
||||||
|
@ -312,8 +312,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
auto i2 = store->followLinksToStorePath(i);
|
auto i2 = store->followLinksToStorePath(i);
|
||||||
if (forceRealise) realisePath({i2});
|
if (forceRealise) realisePath({i2});
|
||||||
Derivation drv = store->derivationFromPath(i2);
|
Derivation drv = store->derivationFromPath(i2);
|
||||||
for (auto & j : drv.outputs)
|
for (auto & j : drv.outputsAndPaths(*store))
|
||||||
cout << fmt("%1%\n", store->printStorePath(j.second.path(*store, drv.name)));
|
cout << fmt("%1%\n", store->printStorePath(j.second.second));
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
||||||
auto info = store->queryPathInfo(j);
|
auto info = store->queryPathInfo(j);
|
||||||
if (query == qHash) {
|
if (query == qHash) {
|
||||||
assert(info->narHash && info->narHash->type == htSHA256);
|
assert(info->narHash.type == htSHA256);
|
||||||
cout << fmt("%s\n", info->narHash->to_string(Base32, true));
|
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
||||||
} else if (query == qSize)
|
} else if (query == qSize)
|
||||||
cout << fmt("%d\n", info->narSize);
|
cout << fmt("%d\n", info->narSize);
|
||||||
}
|
}
|
||||||
|
@ -495,7 +495,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
||||||
ValidPathInfos infos;
|
ValidPathInfos infos;
|
||||||
|
|
||||||
while (1) {
|
while (1) {
|
||||||
auto info = decodeValidPathInfo(*store, cin, hashGiven);
|
// We use a dummy value because we'll set it below. FIXME be correct by
|
||||||
|
// construction and avoid dummy value.
|
||||||
|
auto hashResultOpt = !hashGiven ? std::optional<HashResult> { {Hash::dummy, -1} } : std::nullopt;
|
||||||
|
auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
|
||||||
if (!info) break;
|
if (!info) break;
|
||||||
if (!store->isValidPath(info->path) || reregister) {
|
if (!store->isValidPath(info->path) || reregister) {
|
||||||
/* !!! races */
|
/* !!! races */
|
||||||
|
@ -723,7 +726,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
auto path = store->followLinksToStorePath(i);
|
auto path = store->followLinksToStorePath(i);
|
||||||
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
||||||
auto info = store->queryPathInfo(path);
|
auto info = store->queryPathInfo(path);
|
||||||
HashSink sink(info->narHash->type);
|
HashSink sink(info->narHash.type);
|
||||||
store->narFromPath(path, sink);
|
store->narFromPath(path, sink);
|
||||||
auto current = sink.finish();
|
auto current = sink.finish();
|
||||||
if (current.first != info->narHash) {
|
if (current.first != info->narHash) {
|
||||||
|
@ -732,7 +735,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(path),
|
store->printStorePath(path),
|
||||||
info->narHash->to_string(Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
current.first.to_string(Base32, true))
|
current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
status = 1;
|
status = 1;
|
||||||
|
@ -862,7 +865,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
out << info->narSize // downloadSize
|
out << info->narSize // downloadSize
|
||||||
<< info->narSize;
|
<< info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||||
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
|
out << info->narHash.to_string(Base32, true)
|
||||||
<< renderContentAddress(info->ca)
|
<< renderContentAddress(info->ca)
|
||||||
<< info->sigs;
|
<< info->sigs;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
|
@ -944,11 +947,13 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
if (!writeAllowed) throw Error("importing paths is not allowed");
|
if (!writeAllowed) throw Error("importing paths is not allowed");
|
||||||
|
|
||||||
auto path = readString(in);
|
auto path = readString(in);
|
||||||
ValidPathInfo info(store->parseStorePath(path));
|
|
||||||
auto deriver = readString(in);
|
auto deriver = readString(in);
|
||||||
|
ValidPathInfo info {
|
||||||
|
store->parseStorePath(path),
|
||||||
|
Hash::parseAny(readString(in), htSHA256),
|
||||||
|
};
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash::parseAny(readString(in), htSHA256);
|
|
||||||
info.references = readStorePaths<StorePathSet>(*store, in);
|
info.references = readStorePaths<StorePathSet>(*store, in);
|
||||||
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(in);
|
info.sigs = readStrings<StringSet>(in);
|
||||||
|
|
|
@ -60,8 +60,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
hash = hsink.finish().first;
|
hash = hsink.finish().first;
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart));
|
ValidPathInfo info {
|
||||||
info.narHash = narHash;
|
store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = std::optional { FixedOutputHash {
|
info.ca = std::optional { FixedOutputHash {
|
||||||
.method = ingestionMethod,
|
.method = ingestionMethod,
|
||||||
|
|
|
@ -304,8 +304,8 @@ struct InstallableStorePath : Installable
|
||||||
if (storePath.isDerivation()) {
|
if (storePath.isDerivation()) {
|
||||||
std::map<std::string, StorePath> outputs;
|
std::map<std::string, StorePath> outputs;
|
||||||
auto drv = store->readDerivation(storePath);
|
auto drv = store->readDerivation(storePath);
|
||||||
for (auto & [name, output] : drv.outputs)
|
for (auto & i : drv.outputsAndPaths(*store))
|
||||||
outputs.emplace(name, output.path(*store, drv.name));
|
outputs.emplace(i.first, i.second.second);
|
||||||
return {
|
return {
|
||||||
BuildableFromDrv {
|
BuildableFromDrv {
|
||||||
.drvPath = storePath,
|
.drvPath = storePath,
|
||||||
|
|
|
@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
|
|
||||||
auto narHash = hashModuloSink.finish().first;
|
auto narHash = hashModuloSink.finish().first;
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
|
ValidPathInfo info {
|
||||||
|
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
if (hasSelfReference) info.references.insert(info.path);
|
if (hasSelfReference) info.references.insert(info.path);
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Recursive,
|
.method = FileIngestionMethod::Recursive,
|
||||||
.hash = *info.narHash,
|
.hash = info.narHash,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!json)
|
if (!json)
|
||||||
|
|
|
@ -129,11 +129,13 @@ struct ProfileManifest
|
||||||
|
|
||||||
auto narHash = hashString(htSHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
|
ValidPathInfo info {
|
||||||
|
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
|
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
store->addToStore(info, source);
|
store->addToStore(info, source);
|
||||||
|
|
|
@ -490,8 +490,8 @@ bool NixRepl::processLine(string line)
|
||||||
if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
|
if (runProgram(settings.nixBinDir + "/nix", Strings{"build", "--no-link", drvPath}) == 0) {
|
||||||
auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
|
auto drv = readDerivation(*state->store, drvPath, Derivation::nameFromPath(state->store->parseStorePath(drvPath)));
|
||||||
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
|
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputsAndPaths(*state->store))
|
||||||
std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.path(*state->store, drv.name)));
|
std::cout << fmt(" %s -> %s\n", i.first, state->store->printStorePath(i.second.second));
|
||||||
}
|
}
|
||||||
} else if (command == ":i") {
|
} else if (command == ":i") {
|
||||||
runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});
|
runProgram(settings.nixBinDir + "/nix-env", Strings{"-i", drvPath});
|
||||||
|
|
|
@ -67,9 +67,9 @@ struct CmdShowDerivation : InstallablesCommand
|
||||||
|
|
||||||
{
|
{
|
||||||
auto outputsObj(drvObj.object("outputs"));
|
auto outputsObj(drvObj.object("outputs"));
|
||||||
for (auto & output : drv.outputs) {
|
for (auto & output : drv.outputsAndPaths(*store)) {
|
||||||
auto outputObj(outputsObj.object(output.first));
|
auto outputObj(outputsObj.object(output.first));
|
||||||
outputObj.attr("path", store->printStorePath(output.second.path(*store, drv.name)));
|
outputObj.attr("path", store->printStorePath(output.second.second));
|
||||||
|
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](DerivationOutputInputAddressed doi) {
|
[&](DerivationOutputInputAddressed doi) {
|
||||||
|
@ -81,7 +81,7 @@ struct CmdShowDerivation : InstallablesCommand
|
||||||
[&](DerivationOutputCAFloating dof) {
|
[&](DerivationOutputCAFloating dof) {
|
||||||
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
||||||
},
|
},
|
||||||
}, output.second.output);
|
}, output.second.first.output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca)
|
if (!info->ca)
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
store->narFromPath(info->path, *hashSink);
|
store->narFromPath(info->path, *hashSink);
|
||||||
|
|
||||||
auto hash = hashSink->finish();
|
auto hash = hashSink->finish();
|
||||||
|
|
||||||
if (hash.first != *info->narHash) {
|
if (hash.first != info->narHash) {
|
||||||
corrupted++;
|
corrupted++;
|
||||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||||
logError({
|
logError({
|
||||||
|
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(info->path),
|
store->printStorePath(info->path),
|
||||||
info->narHash->to_string(Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
hash.first.to_string(Base32, true))
|
hash.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ let
|
||||||
shell = busybox;
|
shell = busybox;
|
||||||
name = "build-remote-input-1";
|
name = "build-remote-input-1";
|
||||||
buildCommand = "echo FOO > $out";
|
buildCommand = "echo FOO > $out";
|
||||||
|
requiredSystemFeatures = ["foo"];
|
||||||
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
|
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,6 +26,7 @@ let
|
||||||
shell = busybox;
|
shell = busybox;
|
||||||
name = "build-remote-input-2";
|
name = "build-remote-input-2";
|
||||||
buildCommand = "echo BAR > $out";
|
buildCommand = "echo BAR > $out";
|
||||||
|
requiredSystemFeatures = ["bar"];
|
||||||
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
|
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -35,6 +37,7 @@ let
|
||||||
read x < ${input2}
|
read x < ${input2}
|
||||||
echo $x BAZ > $out
|
echo $x BAZ > $out
|
||||||
'';
|
'';
|
||||||
|
requiredSystemFeatures = ["baz"];
|
||||||
outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s=";
|
outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s=";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -43,7 +46,8 @@ in
|
||||||
mkDerivation {
|
mkDerivation {
|
||||||
shell = busybox;
|
shell = busybox;
|
||||||
name = "build-remote";
|
name = "build-remote";
|
||||||
buildCommand = ''
|
buildCommand =
|
||||||
|
''
|
||||||
read x < ${input1}
|
read x < ${input1}
|
||||||
read y < ${input3}
|
read y < ${input3}
|
||||||
echo "$x $y" > $out
|
echo "$x $y" > $out
|
||||||
|
|
5
tests/build-remote-content-addressed-fixed.sh
Normal file
5
tests/build-remote-content-addressed-fixed.sh
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
file=build-hook-ca.nix
|
||||||
|
|
||||||
|
source build-remote.sh
|
5
tests/build-remote-input-addressed.sh
Normal file
5
tests/build-remote-input-addressed.sh
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
file=build-hook.nix
|
||||||
|
|
||||||
|
source build-remote.sh
|
|
@ -4,10 +4,8 @@ if ! [[ $busybox =~ busybox ]]; then exit; fi
|
||||||
unset NIX_STORE_DIR
|
unset NIX_STORE_DIR
|
||||||
unset NIX_STATE_DIR
|
unset NIX_STATE_DIR
|
||||||
|
|
||||||
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
# Note: ssh{-ng}://localhost bypasses ssh. See tests/build-remote.sh for
|
||||||
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
# more details.
|
||||||
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
|
||||||
|
|
||||||
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
|
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
|
||||||
--arg busybox $busybox \
|
--arg busybox $busybox \
|
||||||
--store $TEST_ROOT/local \
|
--store $TEST_ROOT/local \
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
source common.sh
|
|
||||||
|
|
||||||
if ! canUseSandbox; then exit; fi
|
if ! canUseSandbox; then exit; fi
|
||||||
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
||||||
|
|
||||||
|
@ -19,7 +17,7 @@ builders=(
|
||||||
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
||||||
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
||||||
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
||||||
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
|
nix build -L -v -f $file -o $TEST_ROOT/result --max-jobs 0 \
|
||||||
--arg busybox $busybox \
|
--arg busybox $busybox \
|
||||||
--store $TEST_ROOT/machine0 \
|
--store $TEST_ROOT/machine0 \
|
||||||
--builders "$(join_by '; ' "${builders[@]}")"
|
--builders "$(join_by '; ' "${builders[@]}")"
|
||||||
|
|
|
@ -14,7 +14,8 @@ nix_tests = \
|
||||||
placeholders.sh nix-shell.sh \
|
placeholders.sh nix-shell.sh \
|
||||||
linux-sandbox.sh \
|
linux-sandbox.sh \
|
||||||
build-dry.sh \
|
build-dry.sh \
|
||||||
build-remote.sh \
|
build-remote-input-addressed.sh \
|
||||||
|
build-remote-content-addressed-fixed.sh \
|
||||||
build-remote-trustless-should-pass-1.sh \
|
build-remote-trustless-should-pass-1.sh \
|
||||||
build-remote-trustless-should-pass-2.sh \
|
build-remote-trustless-should-pass-2.sh \
|
||||||
build-remote-trustless-should-pass-3.sh \
|
build-remote-trustless-should-pass-3.sh \
|
||||||
|
|
Loading…
Reference in a new issue