forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into single-ca-drv-build
This commit is contained in:
commit
3c8b5b6219
|
@ -370,6 +370,33 @@ false</literal>.</para>
|
||||||
|
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
|
||||||
|
<varlistentry xml:id="conf-hashed-mirrors"><term><literal>hashed-mirrors</literal></term>
|
||||||
|
|
||||||
|
<listitem><para>A list of web servers used by
|
||||||
|
<function>builtins.fetchurl</function> to obtain files by hash.
|
||||||
|
Given a hash type <replaceable>ht</replaceable> and a base-16 hash
|
||||||
|
<replaceable>h</replaceable>, Nix will try to download the file
|
||||||
|
from
|
||||||
|
<literal>hashed-mirror/<replaceable>ht</replaceable>/<replaceable>h</replaceable></literal>.
|
||||||
|
This allows files to be downloaded even if they have disappeared
|
||||||
|
from their original URI. For example, given the hashed mirror
|
||||||
|
<literal>http://tarballs.example.com/</literal>, when building the
|
||||||
|
derivation
|
||||||
|
|
||||||
|
<programlisting>
|
||||||
|
builtins.fetchurl {
|
||||||
|
url = "https://example.org/foo-1.2.3.tar.xz";
|
||||||
|
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
|
||||||
|
}
|
||||||
|
</programlisting>
|
||||||
|
|
||||||
|
Nix will attempt to download this file from
|
||||||
|
<literal>http://tarballs.example.com/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae</literal>
|
||||||
|
first. If it is not available there, if will try the original URI.</para></listitem>
|
||||||
|
|
||||||
|
</varlistentry>
|
||||||
|
|
||||||
|
|
||||||
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
||||||
|
|
||||||
<listitem><para>The maximum number of parallel TCP connections
|
<listitem><para>The maximum number of parallel TCP connections
|
||||||
|
|
|
@ -80,7 +80,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
|
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -303,13 +303,12 @@ SV * derivationFromPath(char * drvPath)
|
||||||
hash = newHV();
|
hash = newHV();
|
||||||
|
|
||||||
HV * outputs = newHV();
|
HV * outputs = newHV();
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndOptPaths(*store())) {
|
||||||
auto pathOpt = i.second.pathOpt(*store(), drv.name);
|
|
||||||
hv_store(
|
hv_store(
|
||||||
outputs, i.first.c_str(), i.first.size(),
|
outputs, i.first.c_str(), i.first.size(),
|
||||||
!pathOpt
|
!i.second.second
|
||||||
? newSV(0) /* null value */
|
? newSV(0) /* null value */
|
||||||
: newSVpv(store()->printStorePath(*pathOpt).c_str(), 0),
|
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0),
|
||||||
0);
|
0);
|
||||||
}
|
}
|
||||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||||
|
|
|
@ -38,9 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||||
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
|
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool allSupportedLocally(const std::set<std::string>& requiredFeatures) {
|
static bool allSupportedLocally(Store & store, const std::set<std::string>& requiredFeatures) {
|
||||||
for (auto & feature : requiredFeatures)
|
for (auto & feature : requiredFeatures)
|
||||||
if (!settings.systemFeatures.get().count(feature)) return false;
|
if (!store.systemFeatures.get().count(feature)) return false;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ static int _main(int argc, char * * argv)
|
||||||
auto canBuildLocally = amWilling
|
auto canBuildLocally = amWilling
|
||||||
&& ( neededSystem == settings.thisSystem
|
&& ( neededSystem == settings.thisSystem
|
||||||
|| settings.extraPlatforms.get().count(neededSystem) > 0)
|
|| settings.extraPlatforms.get().count(neededSystem) > 0)
|
||||||
&& allSupportedLocally(requiredFeatures);
|
&& allSupportedLocally(*store, requiredFeatures);
|
||||||
|
|
||||||
/* Error ignored here, will be caught later */
|
/* Error ignored here, will be caught later */
|
||||||
mkdir(currentLoad.c_str(), 0777);
|
mkdir(currentLoad.c_str(), 0777);
|
||||||
|
@ -201,7 +201,7 @@ static int _main(int argc, char * * argv)
|
||||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||||
}
|
}
|
||||||
|
|
||||||
logError({
|
logErrorInfo(lvlInfo, {
|
||||||
.name = "Remote build",
|
.name = "Remote build",
|
||||||
.description = "Failed to find a machine for remote build!",
|
.description = "Failed to find a machine for remote build!",
|
||||||
.hint = hint
|
.hint = hint
|
||||||
|
@ -224,15 +224,7 @@ static int _main(int argc, char * * argv)
|
||||||
|
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
|
||||||
|
|
||||||
Store::Params storeParams;
|
sshStore = bestMachine->openStore();
|
||||||
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
|
|
||||||
storeParams["max-connections"] = "1";
|
|
||||||
storeParams["log-fd"] = "4";
|
|
||||||
if (bestMachine->sshKey != "")
|
|
||||||
storeParams["ssh-key"] = bestMachine->sshKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
sshStore = openStore(bestMachine->storeUri, storeParams);
|
|
||||||
sshStore->connect();
|
sshStore->connect();
|
||||||
storeUri = bestMachine->storeUri;
|
storeUri = bestMachine->storeUri;
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,9 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
||||||
auto i = drv.outputs.find(outputName);
|
auto i = drv.outputs.find(outputName);
|
||||||
if (i == drv.outputs.end())
|
if (i == drv.outputs.end())
|
||||||
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
|
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
|
||||||
|
auto & [outputName, output] = *i;
|
||||||
|
|
||||||
auto optStorePath = i->second.pathOpt(*store, drv.name);
|
auto optStorePath = output.pathOpt(*store, drv.name, outputName);
|
||||||
if (optStorePath)
|
if (optStorePath)
|
||||||
outPath = store->printStorePath(*optStorePath);
|
outPath = store->printStorePath(*optStorePath);
|
||||||
}
|
}
|
||||||
|
|
|
@ -79,7 +79,7 @@ static void mkOutputString(EvalState & state, Value & v,
|
||||||
const StorePath & drvPath, const BasicDerivation & drv,
|
const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
std::pair<string, DerivationOutput> o)
|
std::pair<string, DerivationOutput> o)
|
||||||
{
|
{
|
||||||
auto optOutputPath = o.second.pathOpt(*state.store, drv.name);
|
auto optOutputPath = o.second.pathOpt(*state.store, drv.name, o.first);
|
||||||
mkString(
|
mkString(
|
||||||
*state.allocAttr(v, state.symbols.create(o.first)),
|
*state.allocAttr(v, state.symbols.create(o.first)),
|
||||||
state.store->printStorePath(optOutputPath
|
state.store->printStorePath(optOutputPath
|
||||||
|
|
|
@ -212,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
: hashFile(htSHA256, path);
|
: hashFile(htSHA256, path);
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
|
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
|
|
@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||||
tree.actualPath = store->toRealPath(tree.storePath);
|
tree.actualPath = store->toRealPath(tree.storePath);
|
||||||
|
|
||||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||||
|
|
||||||
if (auto prevNarHash = getNarHash()) {
|
if (auto prevNarHash = getNarHash()) {
|
||||||
if (narHash != *prevNarHash)
|
if (narHash != *prevNarHash)
|
||||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
|
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto prevLastModified = getLastModified()) {
|
if (auto prevLastModified = getLastModified()) {
|
||||||
|
|
|
@ -67,8 +67,10 @@ DownloadFileResult downloadFile(
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(*res.data, sink);
|
||||||
auto hash = hashString(htSHA256, *res.data);
|
auto hash = hashString(htSHA256, *res.data);
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
ValidPathInfo info {
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||||
|
hashString(htSHA256, *sink.s),
|
||||||
|
};
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Flat,
|
.method = FileIngestionMethod::Flat,
|
||||||
|
|
|
@ -362,7 +362,7 @@ public:
|
||||||
auto width = getWindowSize().second;
|
auto width = getWindowSize().second;
|
||||||
if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
|
if (width <= 0) width = std::numeric_limits<decltype(width)>::max();
|
||||||
|
|
||||||
writeToStderr("\r" + filterANSIEscapes(line, false, width) + "\e[K");
|
writeToStderr("\r" + filterANSIEscapes(line, false, width) + ANSI_NORMAL + "\e[K");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string getStatus(State & state)
|
std::string getStatus(State & state)
|
||||||
|
|
|
@ -143,7 +143,7 @@ struct FileSource : FdSource
|
||||||
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
assert(info.narHash && info.narSize);
|
assert(info.narSize);
|
||||||
|
|
||||||
if (!repair && isValidPath(info.path)) {
|
if (!repair && isValidPath(info.path)) {
|
||||||
// FIXME: copyNAR -> null sink
|
// FIXME: copyNAR -> null sink
|
||||||
|
@ -312,14 +312,10 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
|
||||||
{
|
{
|
||||||
auto info = queryPathInfo(storePath).cast<const NarInfo>();
|
auto info = queryPathInfo(storePath).cast<const NarInfo>();
|
||||||
|
|
||||||
uint64_t narSize = 0;
|
LengthSink narSize;
|
||||||
|
TeeSink tee { sink, narSize };
|
||||||
|
|
||||||
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
|
auto decompressor = makeDecompressionSink(info->compression, tee);
|
||||||
sink(data, len);
|
|
||||||
narSize += len;
|
|
||||||
});
|
|
||||||
|
|
||||||
auto decompressor = makeDecompressionSink(info->compression, wrapperSink);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
getFile(info->url, *decompressor);
|
getFile(info->url, *decompressor);
|
||||||
|
@ -331,7 +327,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
|
||||||
|
|
||||||
stats.narRead++;
|
stats.narRead++;
|
||||||
//stats.narReadCompressedBytes += nar->size(); // FIXME
|
//stats.narReadCompressedBytes += nar->size(); // FIXME
|
||||||
stats.narReadBytes += narSize;
|
stats.narReadBytes += narSize.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||||
|
@ -385,7 +381,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
h = hashString(hashAlgo, s);
|
h = hashString(hashAlgo, s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
|
ValidPathInfo info {
|
||||||
|
makeFixedOutputPath(method, *h, name),
|
||||||
|
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
|
||||||
|
};
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
|
@ -396,7 +395,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
||||||
const StorePathSet & references, RepairFlag repair)
|
const StorePathSet & references, RepairFlag repair)
|
||||||
{
|
{
|
||||||
ValidPathInfo info(computeStorePathForText(name, s, references));
|
ValidPathInfo info {
|
||||||
|
computeStorePathForText(name, s, references),
|
||||||
|
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
|
||||||
|
};
|
||||||
info.references = references;
|
info.references = references;
|
||||||
|
|
||||||
if (repair || !isValidPath(info.path)) {
|
if (repair || !isValidPath(info.path)) {
|
||||||
|
|
|
@ -1235,12 +1235,9 @@ void DerivationGoal::haveDerivation()
|
||||||
|
|
||||||
retrySubstitution = false;
|
retrySubstitution = false;
|
||||||
|
|
||||||
/* Temporarily root output paths that are known a priori building */
|
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||||
for (auto & i : drv->outputs) {
|
if (i.second.second)
|
||||||
auto optOutputPath = i.second.pathOpt(worker.store, drv->name);
|
worker.store.addTempRoot(*i.second.second);
|
||||||
if (optOutputPath)
|
|
||||||
worker.store.addTempRoot(*optOutputPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Check what outputs paths are not already valid. */
|
/* Check what outputs paths are not already valid. */
|
||||||
checkPathValidity();
|
checkPathValidity();
|
||||||
|
@ -1524,11 +1521,9 @@ void DerivationGoal::tryToBuild()
|
||||||
PathSet lockFiles;
|
PathSet lockFiles;
|
||||||
/* FIXME: Should lock something like the drv itself so we don't build same
|
/* FIXME: Should lock something like the drv itself so we don't build same
|
||||||
CA drv concurrently */
|
CA drv concurrently */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||||
auto optPath = i.second.pathOpt(worker.store, drv->name);
|
if (i.second.second)
|
||||||
if (optPath)
|
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
|
||||||
lockFiles.insert(worker.store.Store::toRealPath(*optPath));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||||
if (!actLock)
|
if (!actLock)
|
||||||
|
@ -1575,7 +1570,7 @@ void DerivationGoal::tryToBuild()
|
||||||
/* Don't do a remote build if the derivation has the attribute
|
/* Don't do a remote build if the derivation has the attribute
|
||||||
`preferLocalBuild' set. Also, check and repair modes are only
|
`preferLocalBuild' set. Also, check and repair modes are only
|
||||||
supported for local builds. */
|
supported for local builds. */
|
||||||
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
|
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
|
||||||
|
|
||||||
if (!buildLocally) {
|
if (!buildLocally) {
|
||||||
switch (tryBuildHook()) {
|
switch (tryBuildHook()) {
|
||||||
|
@ -2031,15 +2026,14 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
||||||
for (auto & j : paths2) {
|
for (auto & j : paths2) {
|
||||||
if (j.isDerivation()) {
|
if (j.isDerivation()) {
|
||||||
Derivation drv = worker.store.derivationFromPath(j);
|
Derivation drv = worker.store.derivationFromPath(j);
|
||||||
for (auto & k : drv.outputs) {
|
for (auto & k : drv.outputsAndOptPaths(worker.store)) {
|
||||||
auto optPath = k.second.pathOpt(worker.store, drv.name);
|
if (!k.second.second)
|
||||||
if (!optPath)
|
|
||||||
/* FIXME: I am confused why we are calling
|
/* FIXME: I am confused why we are calling
|
||||||
`computeFSClosure` on the output path, rather than
|
`computeFSClosure` on the output path, rather than
|
||||||
derivation itself. That doesn't seem right to me, so I
|
derivation itself. That doesn't seem right to me, so I
|
||||||
won't try to implemented this for CA derivations. */
|
won't try to implemented this for CA derivations. */
|
||||||
throw UnimplementedError("export references including CA derivations (themselves) is not yet implemented");
|
throw UnimplementedError("export references including CA derivations (themselves) is not yet implemented");
|
||||||
worker.store.computeFSClosure(*optPath, paths);
|
worker.store.computeFSClosure(*k.second.second, paths);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2084,13 +2078,13 @@ void linkOrCopy(const Path & from, const Path & to)
|
||||||
void DerivationGoal::startBuilder()
|
void DerivationGoal::startBuilder()
|
||||||
{
|
{
|
||||||
/* Right platform? */
|
/* Right platform? */
|
||||||
if (!parsedDrv->canBuildLocally())
|
if (!parsedDrv->canBuildLocally(worker.store))
|
||||||
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
|
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
|
||||||
drv->platform,
|
drv->platform,
|
||||||
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
|
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
|
||||||
worker.store.printStorePath(drvPath),
|
worker.store.printStorePath(drvPath),
|
||||||
settings.thisSystem,
|
settings.thisSystem,
|
||||||
concatStringsSep<StringSet>(", ", settings.systemFeatures));
|
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
|
||||||
|
|
||||||
if (drv->isBuiltin())
|
if (drv->isBuiltin())
|
||||||
preloadNSS();
|
preloadNSS();
|
||||||
|
@ -2376,15 +2370,14 @@ void DerivationGoal::startBuilder()
|
||||||
rebuilding a path that is in settings.dirsInChroot
|
rebuilding a path that is in settings.dirsInChroot
|
||||||
(typically the dependencies of /bin/sh). Throw them
|
(typically the dependencies of /bin/sh). Throw them
|
||||||
out. */
|
out. */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
||||||
/* If the name isn't known a priori (i.e. floating
|
/* If the name isn't known a priori (i.e. floating
|
||||||
content-addressed derivation), the temporary location we use
|
content-addressed derivation), the temporary location we use
|
||||||
should be fresh. Freshness means it is impossible that the path
|
should be fresh. Freshness means it is impossible that the path
|
||||||
is already in the sandbox, so we don't need to worry about
|
is already in the sandbox, so we don't need to worry about
|
||||||
removing it. */
|
removing it. */
|
||||||
auto optPath = i.second.pathOpt(worker.store, drv->name);
|
if (i.second.second)
|
||||||
if (optPath)
|
dirsInChroot.erase(worker.store.printStorePath(*i.second.second));
|
||||||
dirsInChroot.erase(worker.store.printStorePath(*optPath));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#elif __APPLE__
|
#elif __APPLE__
|
||||||
|
@ -3088,7 +3081,8 @@ void DerivationGoal::startDaemon()
|
||||||
FdSink to(remote.get());
|
FdSink to(remote.get());
|
||||||
try {
|
try {
|
||||||
daemon::processConnection(store, from, to,
|
daemon::processConnection(store, from, to,
|
||||||
daemon::NotTrusted, daemon::Recursive, "nobody", 65535);
|
daemon::NotTrusted, daemon::Recursive,
|
||||||
|
[&](Store & store) { store.createUser("nobody", 65535); });
|
||||||
debug("terminated daemon connection");
|
debug("terminated daemon connection");
|
||||||
} catch (SysError &) {
|
} catch (SysError &) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
|
@ -3347,7 +3341,7 @@ void DerivationGoal::runChild()
|
||||||
createDirs(chrootRootDir + "/dev/shm");
|
createDirs(chrootRootDir + "/dev/shm");
|
||||||
createDirs(chrootRootDir + "/dev/pts");
|
createDirs(chrootRootDir + "/dev/pts");
|
||||||
ss.push_back("/dev/full");
|
ss.push_back("/dev/full");
|
||||||
if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
|
if (worker.store.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
|
||||||
ss.push_back("/dev/kvm");
|
ss.push_back("/dev/kvm");
|
||||||
ss.push_back("/dev/null");
|
ss.push_back("/dev/null");
|
||||||
ss.push_back("/dev/random");
|
ss.push_back("/dev/random");
|
||||||
|
@ -3767,9 +3761,8 @@ void DerivationGoal::registerOutputs()
|
||||||
*/
|
*/
|
||||||
if (hook) {
|
if (hook) {
|
||||||
bool allValid = true;
|
bool allValid = true;
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
||||||
auto optStorePath = i.second.pathOpt(worker.store, drv->name);
|
if (!i.second.second || !worker.store.isValidPath(*i.second.second))
|
||||||
if (!optStorePath || !worker.store.isValidPath(*optStorePath))
|
|
||||||
allValid = false;
|
allValid = false;
|
||||||
}
|
}
|
||||||
if (allValid) return;
|
if (allValid) return;
|
||||||
|
@ -3997,6 +3990,9 @@ void DerivationGoal::registerOutputs()
|
||||||
}
|
}
|
||||||
auto got = caSink.finish().first;
|
auto got = caSink.finish().first;
|
||||||
auto refs = rewriteRefs();
|
auto refs = rewriteRefs();
|
||||||
|
HashModuloSink narSink { htSHA256, oldHashPart };
|
||||||
|
dumpPath(actualPath, narSink);
|
||||||
|
auto narHashAndSize = narSink.finish();
|
||||||
ValidPathInfo newInfo0 {
|
ValidPathInfo newInfo0 {
|
||||||
worker.store.makeFixedOutputPath(
|
worker.store.makeFixedOutputPath(
|
||||||
outputHash.method,
|
outputHash.method,
|
||||||
|
@ -4004,7 +4000,9 @@ void DerivationGoal::registerOutputs()
|
||||||
outputPathName(drv->name, outputName),
|
outputPathName(drv->name, outputName),
|
||||||
refs.second,
|
refs.second,
|
||||||
refs.first),
|
refs.first),
|
||||||
|
narHashAndSize.first,
|
||||||
};
|
};
|
||||||
|
newInfo0.narSize = narHashAndSize.second;
|
||||||
newInfo0.ca = FixedOutputHash {
|
newInfo0.ca = FixedOutputHash {
|
||||||
.method = outputHash.method,
|
.method = outputHash.method,
|
||||||
.hash = got,
|
.hash = got,
|
||||||
|
@ -4012,13 +4010,6 @@ void DerivationGoal::registerOutputs()
|
||||||
newInfo0.references = refs.second;
|
newInfo0.references = refs.second;
|
||||||
if (refs.first)
|
if (refs.first)
|
||||||
newInfo0.references.insert(newInfo0.path);
|
newInfo0.references.insert(newInfo0.path);
|
||||||
{
|
|
||||||
HashModuloSink narSink { htSHA256, oldHashPart };
|
|
||||||
dumpPath(actualPath, narSink);
|
|
||||||
auto narHashAndSize = narSink.finish();
|
|
||||||
newInfo0.narHash = narHashAndSize.first;
|
|
||||||
newInfo0.narSize = narHashAndSize.second;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(newInfo0.ca);
|
assert(newInfo0.ca);
|
||||||
return newInfo0;
|
return newInfo0;
|
||||||
|
@ -4036,8 +4027,7 @@ void DerivationGoal::registerOutputs()
|
||||||
std::string { requiredFinalPath.hashPart() });
|
std::string { requiredFinalPath.hashPart() });
|
||||||
rewriteOutput();
|
rewriteOutput();
|
||||||
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
||||||
ValidPathInfo newInfo0 { requiredFinalPath };
|
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||||
newInfo0.narHash = narHashAndSize.first;
|
|
||||||
newInfo0.narSize = narHashAndSize.second;
|
newInfo0.narSize = narHashAndSize.second;
|
||||||
auto refs = rewriteRefs();
|
auto refs = rewriteRefs();
|
||||||
newInfo0.references = refs.second;
|
newInfo0.references = refs.second;
|
||||||
|
@ -4081,7 +4071,7 @@ void DerivationGoal::registerOutputs()
|
||||||
floating CA derivations and hash-mismatching fixed-output
|
floating CA derivations and hash-mismatching fixed-output
|
||||||
derivations. */
|
derivations. */
|
||||||
PathLocks dynamicOutputLock;
|
PathLocks dynamicOutputLock;
|
||||||
auto optFixedPath = output.pathOpt(worker.store, drv->name);
|
auto optFixedPath = output.pathOpt(worker.store, drv->name, outputName);
|
||||||
if (!optFixedPath ||
|
if (!optFixedPath ||
|
||||||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
|
worker.store.printStorePath(*optFixedPath) != finalDestPath)
|
||||||
{
|
{
|
||||||
|
@ -4563,7 +4553,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryDerivationO
|
||||||
if (drv->type() != DerivationType::CAFloating) {
|
if (drv->type() != DerivationType::CAFloating) {
|
||||||
std::map<std::string, std::optional<StorePath>> res;
|
std::map<std::string, std::optional<StorePath>> res;
|
||||||
for (auto & [name, output] : drv->outputs)
|
for (auto & [name, output] : drv->outputs)
|
||||||
res.insert_or_assign(name, output.pathOpt(worker.store, drv->name));
|
res.insert_or_assign(name, output.pathOpt(worker.store, drv->name, name));
|
||||||
return res;
|
return res;
|
||||||
} else {
|
} else {
|
||||||
return worker.store.queryDerivationOutputMap(drvPath);
|
return worker.store.queryDerivationOutputMap(drvPath);
|
||||||
|
@ -4574,8 +4564,8 @@ OutputPathMap DerivationGoal::queryDerivationOutputMapAssumeTotal()
|
||||||
{
|
{
|
||||||
if (drv->type() != DerivationType::CAFloating) {
|
if (drv->type() != DerivationType::CAFloating) {
|
||||||
OutputPathMap res;
|
OutputPathMap res;
|
||||||
for (auto & [name, output] : drv->outputs)
|
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
||||||
res.insert_or_assign(name, *output.pathOpt(worker.store, drv->name));
|
res.insert_or_assign(name, *output.second);
|
||||||
return res;
|
return res;
|
||||||
} else {
|
} else {
|
||||||
return worker.store.queryDerivationOutputMapAssumeTotal(drvPath);
|
return worker.store.queryDerivationOutputMapAssumeTotal(drvPath);
|
||||||
|
@ -5427,7 +5417,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
if (!pathExists(store.printStorePath(path)))
|
if (!pathExists(store.printStorePath(path)))
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
|
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(htSHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,20 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* Try the hashed mirrors first. */
|
||||||
|
if (getAttr("outputHashMode") == "flat")
|
||||||
|
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||||
|
try {
|
||||||
|
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||||
|
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
||||||
|
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||||
|
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
|
||||||
|
return;
|
||||||
|
} catch (Error & e) {
|
||||||
|
debug(e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Otherwise try the specified URL. */
|
||||||
fetch(mainUrl);
|
fetch(mainUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto hash = store->queryPathInfo(path)->narHash;
|
auto hash = store->queryPathInfo(path)->narHash;
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << hash->to_string(Base16, false);
|
to << hash.to_string(Base16, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -638,7 +638,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||||
to << 1;
|
to << 1;
|
||||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||||
<< info->narHash->to_string(Base16, false);
|
<< info->narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*store, to, info->references);
|
writeStorePaths(*store, to, info->references);
|
||||||
to << info->registrationTime << info->narSize;
|
to << info->registrationTime << info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
|
@ -694,11 +694,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
|
|
||||||
case wopAddToStoreNar: {
|
case wopAddToStoreNar: {
|
||||||
bool repair, dontCheckSigs;
|
bool repair, dontCheckSigs;
|
||||||
ValidPathInfo info(store->parseStorePath(readString(from)));
|
auto path = store->parseStorePath(readString(from));
|
||||||
auto deriver = readString(from);
|
auto deriver = readString(from);
|
||||||
|
auto narHash = Hash::parseAny(readString(from), htSHA256);
|
||||||
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash::parseAny(readString(from), htSHA256);
|
|
||||||
info.references = readStorePaths<StorePathSet>(*store, from);
|
info.references = readStorePaths<StorePathSet>(*store, from);
|
||||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(from);
|
info.sigs = readStrings<StringSet>(from);
|
||||||
|
@ -817,8 +818,7 @@ void processConnection(
|
||||||
FdSink & to,
|
FdSink & to,
|
||||||
TrustedFlag trusted,
|
TrustedFlag trusted,
|
||||||
RecursiveFlag recursive,
|
RecursiveFlag recursive,
|
||||||
const std::string & userName,
|
std::function<void(Store &)> authHook)
|
||||||
uid_t userId)
|
|
||||||
{
|
{
|
||||||
auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr;
|
auto monitor = !recursive ? std::make_unique<MonitorFdHup>(from.fd) : nullptr;
|
||||||
|
|
||||||
|
@ -859,15 +859,7 @@ void processConnection(
|
||||||
|
|
||||||
/* If we can't accept clientVersion, then throw an error
|
/* If we can't accept clientVersion, then throw an error
|
||||||
*here* (not above). */
|
*here* (not above). */
|
||||||
|
authHook(*store);
|
||||||
#if 0
|
|
||||||
/* Prevent users from doing something very dangerous. */
|
|
||||||
if (geteuid() == 0 &&
|
|
||||||
querySetting("build-users-group", "") == "")
|
|
||||||
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
|
|
||||||
#endif
|
|
||||||
|
|
||||||
store->createUser(userName, userId);
|
|
||||||
|
|
||||||
tunnelLogger->stopWork();
|
tunnelLogger->stopWork();
|
||||||
to.flush();
|
to.flush();
|
||||||
|
|
|
@ -12,7 +12,10 @@ void processConnection(
|
||||||
FdSink & to,
|
FdSink & to,
|
||||||
TrustedFlag trusted,
|
TrustedFlag trusted,
|
||||||
RecursiveFlag recursive,
|
RecursiveFlag recursive,
|
||||||
const std::string & userName,
|
/* Arbitrary hook to check authorization / initialize user data / whatever
|
||||||
uid_t userId);
|
after the protocol has been negotiated. The idea is that this function
|
||||||
|
and everything it calls doesn't know about this stuff, and the
|
||||||
|
`nix-daemon` handles that instead. */
|
||||||
|
std::function<void(Store &)> authHook);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName) const
|
std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||||
{
|
{
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
|
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
|
||||||
|
@ -15,8 +15,7 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
|
||||||
},
|
},
|
||||||
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
|
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
|
||||||
return {
|
return {
|
||||||
// FIXME if we intend to support multiple CA outputs.
|
dof.path(store, drvName, outputName)
|
||||||
dof.path(store, drvName, "out")
|
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
|
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
|
||||||
|
@ -581,6 +580,16 @@ StringSet BasicDerivation::outputNames() const
|
||||||
return names;
|
return names;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
|
||||||
|
DerivationOutputsAndOptPaths outsAndOptPaths;
|
||||||
|
for (auto output : outputs)
|
||||||
|
outsAndOptPaths.insert(std::make_pair(
|
||||||
|
output.first,
|
||||||
|
std::make_pair(output.second, output.second.pathOpt(store, name, output.first))
|
||||||
|
)
|
||||||
|
);
|
||||||
|
return outsAndOptPaths;
|
||||||
|
}
|
||||||
|
|
||||||
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
||||||
auto nameWithSuffix = drvPath.name();
|
auto nameWithSuffix = drvPath.name();
|
||||||
|
|
|
@ -48,11 +48,23 @@ struct DerivationOutput
|
||||||
DerivationOutputCAFloating
|
DerivationOutputCAFloating
|
||||||
> output;
|
> output;
|
||||||
std::optional<HashType> hashAlgoOpt(const Store & store) const;
|
std::optional<HashType> hashAlgoOpt(const Store & store) const;
|
||||||
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
|
/* Note, when you use this function you should make sure that you're passing
|
||||||
|
the right derivation name. When in doubt, you should use the safer
|
||||||
|
interface provided by BasicDerivation::outputsAndOptPaths */
|
||||||
|
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName, std::string_view outputName) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||||
|
|
||||||
|
/* These are analogues to the previous DerivationOutputs data type, but they
|
||||||
|
also contains, for each output, the (optional) store path in which it would
|
||||||
|
be written. To calculate values of these types, see the corresponding
|
||||||
|
functions in BasicDerivation */
|
||||||
|
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
|
||||||
|
DerivationOutputsAndPaths;
|
||||||
|
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||||
|
DerivationOutputsAndOptPaths;
|
||||||
|
|
||||||
/* For inputs that are sub-derivations, we specify exactly which
|
/* For inputs that are sub-derivations, we specify exactly which
|
||||||
output IDs we are interested in. */
|
output IDs we are interested in. */
|
||||||
typedef std::map<StorePath, StringSet> DerivationInputs;
|
typedef std::map<StorePath, StringSet> DerivationInputs;
|
||||||
|
@ -99,6 +111,11 @@ struct BasicDerivation
|
||||||
/* Return the output names of a derivation. */
|
/* Return the output names of a derivation. */
|
||||||
StringSet outputNames() const;
|
StringSet outputNames() const;
|
||||||
|
|
||||||
|
/* Calculates the maps that contains all the DerivationOutputs, but
|
||||||
|
augmented with knowledge of the Store paths they would be written
|
||||||
|
into. */
|
||||||
|
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
|
||||||
|
|
||||||
static std::string_view nameFromPath(const StorePath & storePath);
|
static std::string_view nameFromPath(const StorePath & storePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashSink.currentHash().first;
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
|
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
|
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
@ -69,17 +69,18 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
if (magic != exportMagic)
|
if (magic != exportMagic)
|
||||||
throw Error("Nix archive cannot be imported; wrong format");
|
throw Error("Nix archive cannot be imported; wrong format");
|
||||||
|
|
||||||
ValidPathInfo info(parseStorePath(readString(source)));
|
auto path = parseStorePath(readString(source));
|
||||||
|
|
||||||
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
|
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
|
||||||
|
|
||||||
info.references = readStorePaths<StorePathSet>(*this, source);
|
auto references = readStorePaths<StorePathSet>(*this, source);
|
||||||
|
|
||||||
auto deriver = readString(source);
|
auto deriver = readString(source);
|
||||||
|
auto narHash = hashString(htSHA256, *saved.s);
|
||||||
|
|
||||||
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
|
info.references = references;
|
||||||
info.narHash = hashString(htSHA256, *saved.s);
|
|
||||||
info.narSize = saved.s->size();
|
info.narSize = saved.s->size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
|
|
|
@ -335,6 +335,9 @@ public:
|
||||||
"setuid/setgid bits or with file capabilities."};
|
"setuid/setgid bits or with file capabilities."};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
Setting<Strings> hashedMirrors{this, {}, "hashed-mirrors",
|
||||||
|
"A list of servers used by builtins.fetchurl to fetch files by hash."};
|
||||||
|
|
||||||
Setting<uint64_t> minFree{this, 0, "min-free",
|
Setting<uint64_t> minFree{this, 0, "min-free",
|
||||||
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,9 @@ struct LegacySSHStore : public Store
|
||||||
try {
|
try {
|
||||||
auto conn(connections->get());
|
auto conn(connections->get());
|
||||||
|
|
||||||
|
/* No longer support missing NAR hash */
|
||||||
|
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||||
|
|
||||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||||
|
|
||||||
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
|
conn->to << cmdQueryPathInfos << PathSet{printStorePath(path)};
|
||||||
|
@ -100,8 +103,10 @@ struct LegacySSHStore : public Store
|
||||||
|
|
||||||
auto p = readString(conn->from);
|
auto p = readString(conn->from);
|
||||||
if (p.empty()) return callback(nullptr);
|
if (p.empty()) return callback(nullptr);
|
||||||
auto info = std::make_shared<ValidPathInfo>(parseStorePath(p));
|
auto path2 = parseStorePath(p);
|
||||||
assert(path == info->path);
|
assert(path == path2);
|
||||||
|
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
|
||||||
|
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
|
||||||
|
|
||||||
PathSet references;
|
PathSet references;
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
|
@ -111,12 +116,14 @@ struct LegacySSHStore : public Store
|
||||||
readLongLong(conn->from); // download size
|
readLongLong(conn->from); // download size
|
||||||
info->narSize = readLongLong(conn->from);
|
info->narSize = readLongLong(conn->from);
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
{
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
info->narHash = s.empty() ? std::optional<Hash>{} : Hash::parseAnyPrefixed(s);
|
if (s == "")
|
||||||
|
throw Error("NAR hash is now mandatory");
|
||||||
|
info->narHash = Hash::parseAnyPrefixed(s);
|
||||||
|
}
|
||||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
}
|
|
||||||
|
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
assert(s == "");
|
assert(s == "");
|
||||||
|
@ -138,7 +145,7 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdAddToStoreNar
|
<< cmdAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash->to_string(Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
|
|
|
@ -613,7 +613,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
|
|
||||||
state.stmtRegisterValidPath.use()
|
state.stmtRegisterValidPath.use()
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
(info.narHash->to_string(Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
|
@ -637,12 +637,11 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
registration above is undone. */
|
registration above is undone. */
|
||||||
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
||||||
|
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndOptPaths(*this)) {
|
||||||
/* Floating CA derivations have indeterminate output paths until
|
/* Floating CA derivations have indeterminate output paths until
|
||||||
they are built, so don't register anything in that case */
|
they are built, so don't register anything in that case */
|
||||||
auto optPath = i.second.pathOpt(*this, drv.name);
|
if (i.second.second)
|
||||||
if (optPath)
|
linkDeriverToPath(state, id, i.first, *i.second.second);
|
||||||
linkDeriverToPath(state, id, i.first, *optPath);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -660,25 +659,28 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
|
||||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
auto info = std::make_shared<ValidPathInfo>(path);
|
|
||||||
|
|
||||||
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
|
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
/* Get the path info. */
|
/* Get the path info. */
|
||||||
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(info->path)));
|
auto useQueryPathInfo(state->stmtQueryPathInfo.use()(printStorePath(path)));
|
||||||
|
|
||||||
if (!useQueryPathInfo.next())
|
if (!useQueryPathInfo.next())
|
||||||
return std::shared_ptr<ValidPathInfo>();
|
return std::shared_ptr<ValidPathInfo>();
|
||||||
|
|
||||||
info->id = useQueryPathInfo.getInt(0);
|
auto id = useQueryPathInfo.getInt(0);
|
||||||
|
|
||||||
|
auto narHash = Hash::dummy;
|
||||||
try {
|
try {
|
||||||
info->narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
||||||
} catch (BadHash & e) {
|
} catch (BadHash & e) {
|
||||||
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
|
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auto info = std::make_shared<ValidPathInfo>(path, narHash);
|
||||||
|
|
||||||
|
info->id = id;
|
||||||
|
|
||||||
info->registrationTime = useQueryPathInfo.getInt(2);
|
info->registrationTime = useQueryPathInfo.getInt(2);
|
||||||
|
|
||||||
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
|
auto s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 3);
|
||||||
|
@ -713,7 +715,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
state.stmtUpdatePathInfo.use()
|
state.stmtUpdatePathInfo.use()
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.narHash->to_string(Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(renderContentAddress(info.ca), (bool) info.ca)
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
|
@ -943,7 +945,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
assert(i.narHash && i.narHash->type == htSHA256);
|
assert(i.narHash.type == htSHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -1007,9 +1009,6 @@ const PublicKeys & LocalStore::getPublicKeys()
|
||||||
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
if (!info.narHash)
|
|
||||||
throw Error("cannot add path '%s' because it lacks a hash", printStorePath(info.path));
|
|
||||||
|
|
||||||
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
|
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
|
||||||
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
||||||
|
|
||||||
|
@ -1044,11 +1043,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
||||||
|
|
||||||
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
|
TeeSource wrapperSource { source, *hashSink };
|
||||||
size_t n = source.read(data, len);
|
|
||||||
(*hashSink)(data, n);
|
|
||||||
return n;
|
|
||||||
});
|
|
||||||
|
|
||||||
restorePath(realPath, wrapperSource);
|
restorePath(realPath, wrapperSource);
|
||||||
|
|
||||||
|
@ -1056,7 +1051,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
|
@ -1178,8 +1173,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info { dstPath, narHash.first };
|
||||||
info.narHash = narHash.first;
|
|
||||||
info.narSize = narHash.second;
|
info.narSize = narHash.second;
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
|
@ -1222,8 +1216,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info { dstPath, narHash };
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.ca = TextHash { .hash = hash };
|
info.ca = TextHash { .hash = hash };
|
||||||
|
@ -1338,9 +1331,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca || !info->references.count(info->path))
|
if (!info->ca || !info->references.count(info->path))
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), *hashSink);
|
dumpPath(Store::toRealPath(i), *hashSink);
|
||||||
auto current = hashSink->finish();
|
auto current = hashSink->finish();
|
||||||
|
@ -1349,7 +1342,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash - path modified",
|
.name = "Invalid hash - path modified",
|
||||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
|
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "machines.hh"
|
#include "machines.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
|
@ -48,6 +49,29 @@ bool Machine::mandatoryMet(const std::set<string> & features) const {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ref<Store> Machine::openStore() const {
|
||||||
|
Store::Params storeParams;
|
||||||
|
if (hasPrefix(storeUri, "ssh://")) {
|
||||||
|
storeParams["max-connections"] = "1";
|
||||||
|
storeParams["log-fd"] = "4";
|
||||||
|
if (sshKey != "")
|
||||||
|
storeParams["ssh-key"] = sshKey;
|
||||||
|
}
|
||||||
|
{
|
||||||
|
auto & fs = storeParams["system-features"];
|
||||||
|
auto append = [&](auto feats) {
|
||||||
|
for (auto & f : feats) {
|
||||||
|
if (fs.size() > 0) fs += ' ';
|
||||||
|
fs += f;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
append(supportedFeatures);
|
||||||
|
append(mandatoryFeatures);
|
||||||
|
}
|
||||||
|
|
||||||
|
return nix::openStore(storeUri, storeParams);
|
||||||
|
}
|
||||||
|
|
||||||
void parseMachines(const std::string & s, Machines & machines)
|
void parseMachines(const std::string & s, Machines & machines)
|
||||||
{
|
{
|
||||||
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
|
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
struct Machine {
|
struct Machine {
|
||||||
|
|
||||||
const string storeUri;
|
const string storeUri;
|
||||||
|
@ -28,6 +30,8 @@ struct Machine {
|
||||||
decltype(supportedFeatures) supportedFeatures,
|
decltype(supportedFeatures) supportedFeatures,
|
||||||
decltype(mandatoryFeatures) mandatoryFeatures,
|
decltype(mandatoryFeatures) mandatoryFeatures,
|
||||||
decltype(sshPublicHostKey) sshPublicHostKey);
|
decltype(sshPublicHostKey) sshPublicHostKey);
|
||||||
|
|
||||||
|
ref<Store> openStore() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::vector<Machine> Machines;
|
typedef std::vector<Machine> Machines;
|
||||||
|
|
|
@ -189,13 +189,14 @@ public:
|
||||||
return {oInvalid, 0};
|
return {oInvalid, 0};
|
||||||
|
|
||||||
auto namePart = queryNAR.getStr(1);
|
auto namePart = queryNAR.getStr(1);
|
||||||
auto narInfo = make_ref<NarInfo>(StorePath(hashPart + "-" + namePart));
|
auto narInfo = make_ref<NarInfo>(
|
||||||
|
StorePath(hashPart + "-" + namePart),
|
||||||
|
Hash::parseAnyPrefixed(queryNAR.getStr(6)));
|
||||||
narInfo->url = queryNAR.getStr(2);
|
narInfo->url = queryNAR.getStr(2);
|
||||||
narInfo->compression = queryNAR.getStr(3);
|
narInfo->compression = queryNAR.getStr(3);
|
||||||
if (!queryNAR.isNull(4))
|
if (!queryNAR.isNull(4))
|
||||||
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
|
narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4));
|
||||||
narInfo->fileSize = queryNAR.getInt(5);
|
narInfo->fileSize = queryNAR.getInt(5);
|
||||||
narInfo->narHash = Hash::parseAnyPrefixed(queryNAR.getStr(6));
|
|
||||||
narInfo->narSize = queryNAR.getInt(7);
|
narInfo->narSize = queryNAR.getInt(7);
|
||||||
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
|
for (auto & r : tokenizeString<Strings>(queryNAR.getStr(8), " "))
|
||||||
narInfo->references.insert(StorePath(r));
|
narInfo->references.insert(StorePath(r));
|
||||||
|
@ -232,7 +233,7 @@ public:
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash->to_string(Base32, true))
|
(info->narHash.to_string(Base32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "nar-info.hh"
|
#include "nar-info.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||||
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||||
{
|
{
|
||||||
auto corrupt = [&]() {
|
auto corrupt = [&]() {
|
||||||
return Error("NAR info file '%1%' is corrupt", whence);
|
return Error("NAR info file '%1%' is corrupt", whence);
|
||||||
|
@ -19,6 +20,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
};
|
};
|
||||||
|
|
||||||
bool havePath = false;
|
bool havePath = false;
|
||||||
|
bool haveNarHash = false;
|
||||||
|
|
||||||
size_t pos = 0;
|
size_t pos = 0;
|
||||||
while (pos < s.size()) {
|
while (pos < s.size()) {
|
||||||
|
@ -46,8 +48,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "FileSize") {
|
else if (name == "FileSize") {
|
||||||
if (!string2Int(value, fileSize)) throw corrupt();
|
if (!string2Int(value, fileSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "NarHash")
|
else if (name == "NarHash") {
|
||||||
narHash = parseHashField(value);
|
narHash = parseHashField(value);
|
||||||
|
haveNarHash = true;
|
||||||
|
}
|
||||||
else if (name == "NarSize") {
|
else if (name == "NarSize") {
|
||||||
if (!string2Int(value, narSize)) throw corrupt();
|
if (!string2Int(value, narSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
|
@ -76,7 +80,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
|
|
||||||
if (compression == "") compression = "bzip2";
|
if (compression == "") compression = "bzip2";
|
||||||
|
|
||||||
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
|
if (!havePath || !haveNarHash || url.empty() || narSize == 0) throw corrupt();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string NarInfo::to_string(const Store & store) const
|
std::string NarInfo::to_string(const Store & store) const
|
||||||
|
@ -89,8 +93,8 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
assert(fileHash && fileHash->type == htSHA256);
|
assert(fileHash && fileHash->type == htSHA256);
|
||||||
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash && narHash->type == htSHA256);
|
assert(narHash.type == htSHA256);
|
||||||
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
|
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -2,10 +2,12 @@
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "store-api.hh"
|
#include "path-info.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
struct NarInfo : ValidPathInfo
|
struct NarInfo : ValidPathInfo
|
||||||
{
|
{
|
||||||
std::string url;
|
std::string url;
|
||||||
|
@ -15,7 +17,7 @@ struct NarInfo : ValidPathInfo
|
||||||
std::string system;
|
std::string system;
|
||||||
|
|
||||||
NarInfo() = delete;
|
NarInfo() = delete;
|
||||||
NarInfo(StorePath && path) : ValidPathInfo(std::move(path)) { }
|
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
|
||||||
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
||||||
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
||||||
|
|
||||||
|
|
|
@ -94,7 +94,7 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ParsedDerivation::canBuildLocally() const
|
bool ParsedDerivation::canBuildLocally(Store & localStore) const
|
||||||
{
|
{
|
||||||
if (drv.platform != settings.thisSystem.get()
|
if (drv.platform != settings.thisSystem.get()
|
||||||
&& !settings.extraPlatforms.get().count(drv.platform)
|
&& !settings.extraPlatforms.get().count(drv.platform)
|
||||||
|
@ -102,14 +102,14 @@ bool ParsedDerivation::canBuildLocally() const
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
for (auto & feature : getRequiredSystemFeatures())
|
for (auto & feature : getRequiredSystemFeatures())
|
||||||
if (!settings.systemFeatures.get().count(feature)) return false;
|
if (!localStore.systemFeatures.get().count(feature)) return false;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ParsedDerivation::willBuildLocally() const
|
bool ParsedDerivation::willBuildLocally(Store & localStore) const
|
||||||
{
|
{
|
||||||
return getBoolAttr("preferLocalBuild") && canBuildLocally();
|
return getBoolAttr("preferLocalBuild") && canBuildLocally(localStore);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ParsedDerivation::substitutesAllowed() const
|
bool ParsedDerivation::substitutesAllowed() const
|
||||||
|
|
|
@ -29,9 +29,9 @@ public:
|
||||||
|
|
||||||
StringSet getRequiredSystemFeatures() const;
|
StringSet getRequiredSystemFeatures() const;
|
||||||
|
|
||||||
bool canBuildLocally() const;
|
bool canBuildLocally(Store & localStore) const;
|
||||||
|
|
||||||
bool willBuildLocally() const;
|
bool willBuildLocally(Store & localStore) const;
|
||||||
|
|
||||||
bool substitutesAllowed() const;
|
bool substitutesAllowed() const;
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include "crypto.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
|
@ -29,7 +30,7 @@ struct ValidPathInfo
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<StorePath> deriver;
|
std::optional<StorePath> deriver;
|
||||||
// TODO document this
|
// TODO document this
|
||||||
std::optional<Hash> narHash;
|
Hash narHash;
|
||||||
StorePathSet references;
|
StorePathSet references;
|
||||||
time_t registrationTime = 0;
|
time_t registrationTime = 0;
|
||||||
uint64_t narSize = 0; // 0 = unknown
|
uint64_t narSize = 0; // 0 = unknown
|
||||||
|
@ -100,8 +101,8 @@ struct ValidPathInfo
|
||||||
|
|
||||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||||
|
|
||||||
ValidPathInfo(StorePath && path) : path(std::move(path)) { };
|
ValidPathInfo(StorePath && path, Hash narHash) : path(std::move(path)), narHash(narHash) { };
|
||||||
ValidPathInfo(const StorePath & path) : path(path) { };
|
ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
|
||||||
|
|
||||||
virtual ~ValidPathInfo() { }
|
virtual ~ValidPathInfo() { }
|
||||||
};
|
};
|
||||||
|
|
|
@ -425,10 +425,10 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||||
bool valid; conn->from >> valid;
|
bool valid; conn->from >> valid;
|
||||||
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||||
}
|
}
|
||||||
info = std::make_shared<ValidPathInfo>(StorePath(path));
|
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
|
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
|
||||||
|
info = std::make_shared<ValidPathInfo>(path, narHash);
|
||||||
if (deriver != "") info->deriver = parseStorePath(deriver);
|
if (deriver != "") info->deriver = parseStorePath(deriver);
|
||||||
info->narHash = Hash::parseAny(readString(conn->from), htSHA256);
|
|
||||||
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
||||||
conn->from >> info->registrationTime >> info->narSize;
|
conn->from >> info->registrationTime >> info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||||
|
@ -527,7 +527,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
conn->to << wopAddToStoreNar
|
conn->to << wopAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash->to_string(Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||||
|
|
|
@ -327,8 +327,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
if (expectedCAHash && expectedCAHash != hash)
|
if (expectedCAHash && expectedCAHash != hash)
|
||||||
throw Error("hash mismatch for '%s'", srcPath);
|
throw Error("hash mismatch for '%s'", srcPath);
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, hash, name));
|
ValidPathInfo info {
|
||||||
info.narHash = narHash;
|
makeFixedOutputPath(method, hash, name),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.narSize = narSize;
|
info.narSize = narSize;
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
|
|
||||||
|
@ -583,7 +585,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
auto info = queryPathInfo(i);
|
auto info = queryPathInfo(i);
|
||||||
|
|
||||||
if (showHash) {
|
if (showHash) {
|
||||||
s += info->narHash->to_string(Base16, false) + "\n";
|
s += info->narHash.to_string(Base16, false) + "\n";
|
||||||
s += (format("%1%\n") % info->narSize).str();
|
s += (format("%1%\n") % info->narSize).str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -615,7 +617,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
auto info = queryPathInfo(storePath);
|
auto info = queryPathInfo(storePath);
|
||||||
|
|
||||||
jsonPath
|
jsonPath
|
||||||
.attr("narHash", info->narHash->to_string(hashBase, true))
|
.attr("narHash", info->narHash.to_string(hashBase, true))
|
||||||
.attr("narSize", info->narSize);
|
.attr("narSize", info->narSize);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -743,20 +745,6 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
info = info2;
|
info = info2;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!info->narHash) {
|
|
||||||
StringSink sink;
|
|
||||||
srcStore->narFromPath({storePath}, sink);
|
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
|
||||||
info2->narHash = hashString(htSHA256, *sink.s);
|
|
||||||
if (!info->narSize) info2->narSize = sink.s->size();
|
|
||||||
if (info->ultimate) info2->ultimate = false;
|
|
||||||
info = info2;
|
|
||||||
|
|
||||||
StringSource source(*sink.s);
|
|
||||||
dstStore->addToStore(*info, source, repair, checkSigs);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (info->ultimate) {
|
if (info->ultimate) {
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||||
info2->ultimate = false;
|
info2->ultimate = false;
|
||||||
|
@ -764,12 +752,12 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
LambdaSink wrapperSink([&](const unsigned char * data, size_t len) {
|
LambdaSink progressSink([&](const unsigned char * data, size_t len) {
|
||||||
sink(data, len);
|
|
||||||
total += len;
|
total += len;
|
||||||
act.progress(total, info->narSize);
|
act.progress(total, info->narSize);
|
||||||
});
|
});
|
||||||
srcStore->narFromPath(storePath, wrapperSink);
|
TeeSink tee { sink, progressSink };
|
||||||
|
srcStore->narFromPath(storePath, tee);
|
||||||
}, [&]() {
|
}, [&]() {
|
||||||
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
|
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
|
||||||
});
|
});
|
||||||
|
@ -881,19 +869,22 @@ void copyClosure(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, bool hashGiven)
|
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
|
||||||
{
|
{
|
||||||
std::string path;
|
std::string path;
|
||||||
getline(str, path);
|
getline(str, path);
|
||||||
if (str.eof()) { return {}; }
|
if (str.eof()) { return {}; }
|
||||||
ValidPathInfo info(store.parseStorePath(path));
|
if (!hashGiven) {
|
||||||
if (hashGiven) {
|
|
||||||
string s;
|
string s;
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
info.narHash = Hash::parseAny(s, htSHA256);
|
auto narHash = Hash::parseAny(s, htSHA256);
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
if (!string2Int(s, info.narSize)) throw Error("number expected");
|
uint64_t narSize;
|
||||||
|
if (!string2Int(s, narSize)) throw Error("number expected");
|
||||||
|
hashGiven = { narHash, narSize };
|
||||||
}
|
}
|
||||||
|
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
|
||||||
|
info.narSize = hashGiven->second;
|
||||||
std::string deriver;
|
std::string deriver;
|
||||||
getline(str, deriver);
|
getline(str, deriver);
|
||||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||||
|
@ -928,12 +919,12 @@ string showPaths(const PathSet & paths)
|
||||||
|
|
||||||
std::string ValidPathInfo::fingerprint(const Store & store) const
|
std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
{
|
{
|
||||||
if (narSize == 0 || !narHash)
|
if (narSize == 0)
|
||||||
throw Error("cannot calculate fingerprint of path '%s' because its size/hash is not known",
|
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash->to_string(Base32, true) + ";"
|
+ narHash.to_string(Base32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
#include "serialise.hh"
|
#include "serialise.hh"
|
||||||
#include "crypto.hh"
|
|
||||||
#include "lru-cache.hh"
|
#include "lru-cache.hh"
|
||||||
#include "sync.hh"
|
#include "sync.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -164,6 +163,10 @@ public:
|
||||||
|
|
||||||
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
|
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
|
||||||
|
|
||||||
|
Setting<StringSet> systemFeatures{this, settings.systemFeatures,
|
||||||
|
"system-features",
|
||||||
|
"Optional features that the system this store builds on implements (like \"kvm\")."};
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
struct PathInfoCacheValue {
|
struct PathInfoCacheValue {
|
||||||
|
@ -769,7 +772,7 @@ string showPaths(const PathSet & paths);
|
||||||
std::optional<ValidPathInfo> decodeValidPathInfo(
|
std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
const Store & store,
|
const Store & store,
|
||||||
std::istream & str,
|
std::istream & str,
|
||||||
bool hashGiven = false);
|
std::optional<HashResult> hashGiven = std::nullopt);
|
||||||
|
|
||||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||||
|
|
|
@ -366,11 +366,7 @@ void copyNAR(Source & source, Sink & sink)
|
||||||
|
|
||||||
ParseSink parseSink; /* null sink; just parse the NAR */
|
ParseSink parseSink; /* null sink; just parse the NAR */
|
||||||
|
|
||||||
LambdaSource wrapper([&](unsigned char * data, size_t len) {
|
TeeSource wrapper { source, sink };
|
||||||
auto n = source.read(data, len);
|
|
||||||
sink(data, n);
|
|
||||||
return n;
|
|
||||||
});
|
|
||||||
|
|
||||||
parseDump(parseSink, wrapper);
|
parseDump(parseSink, wrapper);
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,6 +136,8 @@ std::string Hash::to_string(Base base, bool includeType) const
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Hash Hash::dummy(htSHA256);
|
||||||
|
|
||||||
Hash Hash::parseSRI(std::string_view original) {
|
Hash Hash::parseSRI(std::string_view original) {
|
||||||
auto rest = original;
|
auto rest = original;
|
||||||
|
|
||||||
|
|
|
@ -59,9 +59,6 @@ private:
|
||||||
Hash(std::string_view s, HashType type, bool isSRI);
|
Hash(std::string_view s, HashType type, bool isSRI);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/* Check whether a hash is set. */
|
|
||||||
operator bool () const { return (bool) type; }
|
|
||||||
|
|
||||||
/* Check whether two hash are equal. */
|
/* Check whether two hash are equal. */
|
||||||
bool operator == (const Hash & h2) const;
|
bool operator == (const Hash & h2) const;
|
||||||
|
|
||||||
|
@ -105,6 +102,8 @@ public:
|
||||||
assert(type == htSHA1);
|
assert(type == htSHA1);
|
||||||
return std::string(to_string(Base16, false), 0, 7);
|
return std::string(to_string(Base16, false), 0, 7);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Hash dummy;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Helper that defaults empty hashes to the 0 hash. */
|
/* Helper that defaults empty hashes to the 0 hash. */
|
||||||
|
|
|
@ -231,6 +231,17 @@ struct SizedSource : Source
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* A sink that that just counts the number of bytes given to it */
|
||||||
|
struct LengthSink : Sink
|
||||||
|
{
|
||||||
|
uint64_t length = 0;
|
||||||
|
|
||||||
|
virtual void operator () (const unsigned char * _, size_t len)
|
||||||
|
{
|
||||||
|
length += len;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/* Convert a function into a sink. */
|
/* Convert a function into a sink. */
|
||||||
struct LambdaSink : Sink
|
struct LambdaSink : Sink
|
||||||
{
|
{
|
||||||
|
|
|
@ -239,7 +239,15 @@ static void daemonLoop(char * * argv)
|
||||||
// Handle the connection.
|
// Handle the connection.
|
||||||
FdSource from(remote.get());
|
FdSource from(remote.get());
|
||||||
FdSink to(remote.get());
|
FdSink to(remote.get());
|
||||||
processConnection(openUncachedStore(), from, to, trusted, NotRecursive, user, peer.uid);
|
processConnection(openUncachedStore(), from, to, trusted, NotRecursive, [&](Store & store) {
|
||||||
|
#if 0
|
||||||
|
/* Prevent users from doing something very dangerous. */
|
||||||
|
if (geteuid() == 0 &&
|
||||||
|
querySetting("build-users-group", "") == "")
|
||||||
|
throw Error("if you run 'nix-daemon' as root, then you MUST set 'build-users-group'!");
|
||||||
|
#endif
|
||||||
|
store.createUser(user, peer.uid);
|
||||||
|
});
|
||||||
|
|
||||||
exit(0);
|
exit(0);
|
||||||
}, options);
|
}, options);
|
||||||
|
@ -324,7 +332,10 @@ static int _main(int argc, char * * argv)
|
||||||
} else {
|
} else {
|
||||||
FdSource from(STDIN_FILENO);
|
FdSource from(STDIN_FILENO);
|
||||||
FdSink to(STDOUT_FILENO);
|
FdSink to(STDOUT_FILENO);
|
||||||
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, "root", 0);
|
/* Auth hook is empty because in this mode we blindly trust the
|
||||||
|
standard streams. Limitting access to thoses is explicitly
|
||||||
|
not `nix-daemon`'s responsibility. */
|
||||||
|
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
daemonLoop(argv);
|
daemonLoop(argv);
|
||||||
|
|
|
@ -222,11 +222,10 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
|
||||||
StorePathSet outputs;
|
StorePathSet outputs;
|
||||||
if (forceRealise)
|
if (forceRealise)
|
||||||
return store->queryDerivationOutputs(storePath);
|
return store->queryDerivationOutputs(storePath);
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputsAndOptPaths(*store)) {
|
||||||
auto optPath = i.second.pathOpt(*store, drv.name);
|
if (!i.second.second)
|
||||||
if (!optPath)
|
|
||||||
throw UsageError("Cannot use output path of floating content-addressed derivation until we know what it is (e.g. by building it)");
|
throw UsageError("Cannot use output path of floating content-addressed derivation until we know what it is (e.g. by building it)");
|
||||||
outputs.insert(*optPath);
|
outputs.insert(*i.second.second);
|
||||||
}
|
}
|
||||||
return outputs;
|
return outputs;
|
||||||
}
|
}
|
||||||
|
@ -378,8 +377,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
||||||
auto info = store->queryPathInfo(j);
|
auto info = store->queryPathInfo(j);
|
||||||
if (query == qHash) {
|
if (query == qHash) {
|
||||||
assert(info->narHash && info->narHash->type == htSHA256);
|
assert(info->narHash.type == htSHA256);
|
||||||
cout << fmt("%s\n", info->narHash->to_string(Base32, true));
|
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
||||||
} else if (query == qSize)
|
} else if (query == qSize)
|
||||||
cout << fmt("%d\n", info->narSize);
|
cout << fmt("%d\n", info->narSize);
|
||||||
}
|
}
|
||||||
|
@ -501,7 +500,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
||||||
ValidPathInfos infos;
|
ValidPathInfos infos;
|
||||||
|
|
||||||
while (1) {
|
while (1) {
|
||||||
auto info = decodeValidPathInfo(*store, cin, hashGiven);
|
// We use a dummy value because we'll set it below. FIXME be correct by
|
||||||
|
// construction and avoid dummy value.
|
||||||
|
auto hashResultOpt = !hashGiven ? std::optional<HashResult> { {Hash::dummy, -1} } : std::nullopt;
|
||||||
|
auto info = decodeValidPathInfo(*store, cin, hashResultOpt);
|
||||||
if (!info) break;
|
if (!info) break;
|
||||||
if (!store->isValidPath(info->path) || reregister) {
|
if (!store->isValidPath(info->path) || reregister) {
|
||||||
/* !!! races */
|
/* !!! races */
|
||||||
|
@ -729,7 +731,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
auto path = store->followLinksToStorePath(i);
|
auto path = store->followLinksToStorePath(i);
|
||||||
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
||||||
auto info = store->queryPathInfo(path);
|
auto info = store->queryPathInfo(path);
|
||||||
HashSink sink(info->narHash->type);
|
HashSink sink(info->narHash.type);
|
||||||
store->narFromPath(path, sink);
|
store->narFromPath(path, sink);
|
||||||
auto current = sink.finish();
|
auto current = sink.finish();
|
||||||
if (current.first != info->narHash) {
|
if (current.first != info->narHash) {
|
||||||
|
@ -738,7 +740,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(path),
|
store->printStorePath(path),
|
||||||
info->narHash->to_string(Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
current.first.to_string(Base32, true))
|
current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
status = 1;
|
status = 1;
|
||||||
|
@ -868,7 +870,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
out << info->narSize // downloadSize
|
out << info->narSize // downloadSize
|
||||||
<< info->narSize;
|
<< info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||||
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
|
out << info->narHash.to_string(Base32, true)
|
||||||
<< renderContentAddress(info->ca)
|
<< renderContentAddress(info->ca)
|
||||||
<< info->sigs;
|
<< info->sigs;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
|
@ -950,11 +952,13 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
if (!writeAllowed) throw Error("importing paths is not allowed");
|
if (!writeAllowed) throw Error("importing paths is not allowed");
|
||||||
|
|
||||||
auto path = readString(in);
|
auto path = readString(in);
|
||||||
ValidPathInfo info(store->parseStorePath(path));
|
|
||||||
auto deriver = readString(in);
|
auto deriver = readString(in);
|
||||||
|
ValidPathInfo info {
|
||||||
|
store->parseStorePath(path),
|
||||||
|
Hash::parseAny(readString(in), htSHA256),
|
||||||
|
};
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash::parseAny(readString(in), htSHA256);
|
|
||||||
info.references = readStorePaths<StorePathSet>(*store, in);
|
info.references = readStorePaths<StorePathSet>(*store, in);
|
||||||
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(in);
|
info.sigs = readStrings<StringSet>(in);
|
||||||
|
|
|
@ -60,8 +60,10 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
hash = hsink.finish().first;
|
hash = hsink.finish().first;
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(ingestionMethod, hash, *namePart));
|
ValidPathInfo info {
|
||||||
info.narHash = narHash;
|
store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = std::optional { FixedOutputHash {
|
info.ca = std::optional { FixedOutputHash {
|
||||||
.method = ingestionMethod,
|
.method = ingestionMethod,
|
||||||
|
|
|
@ -304,8 +304,8 @@ struct InstallableStorePath : Installable
|
||||||
if (storePath.isDerivation()) {
|
if (storePath.isDerivation()) {
|
||||||
std::map<std::string, std::optional<StorePath>> outputs;
|
std::map<std::string, std::optional<StorePath>> outputs;
|
||||||
auto drv = store->readDerivation(storePath);
|
auto drv = store->readDerivation(storePath);
|
||||||
for (auto & [name, output] : drv.outputs)
|
for (auto & [name, output] : drv.outputsAndOptPaths(*store))
|
||||||
outputs.emplace(name, output.pathOpt(*store, drv.name));
|
outputs.emplace(name, output.second);
|
||||||
return {
|
return {
|
||||||
BuildableFromDrv {
|
BuildableFromDrv {
|
||||||
.drvPath = storePath,
|
.drvPath = storePath,
|
||||||
|
|
|
@ -77,14 +77,16 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
|
|
||||||
auto narHash = hashModuloSink.finish().first;
|
auto narHash = hashModuloSink.finish().first;
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
|
ValidPathInfo info {
|
||||||
|
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
if (hasSelfReference) info.references.insert(info.path);
|
if (hasSelfReference) info.references.insert(info.path);
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Recursive,
|
.method = FileIngestionMethod::Recursive,
|
||||||
.hash = *info.narHash,
|
.hash = info.narHash,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!json)
|
if (!json)
|
||||||
|
|
|
@ -129,11 +129,13 @@ struct ProfileManifest
|
||||||
|
|
||||||
auto narHash = hashString(htSHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
|
ValidPathInfo info {
|
||||||
|
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
|
||||||
|
narHash,
|
||||||
|
};
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
info.narHash = narHash;
|
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
|
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
store->addToStore(info, source);
|
store->addToStore(info, source);
|
||||||
|
|
|
@ -67,21 +67,21 @@ struct CmdShowDerivation : InstallablesCommand
|
||||||
|
|
||||||
{
|
{
|
||||||
auto outputsObj(drvObj.object("outputs"));
|
auto outputsObj(drvObj.object("outputs"));
|
||||||
for (auto & output : drv.outputs) {
|
for (auto & [outputName, output] : drv.outputs) {
|
||||||
auto outputObj(outputsObj.object(output.first));
|
auto outputObj { outputsObj.object(outputName) };
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](DerivationOutputInputAddressed doi) {
|
[&](DerivationOutputInputAddressed doi) {
|
||||||
outputObj.attr("path", store->printStorePath(doi.path));
|
outputObj.attr("path", store->printStorePath(doi.path));
|
||||||
},
|
},
|
||||||
[&](DerivationOutputCAFixed dof) {
|
[&](DerivationOutputCAFixed dof) {
|
||||||
outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, output.first)));
|
outputObj.attr("path", store->printStorePath(dof.path(*store, drv.name, outputName)));
|
||||||
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
|
outputObj.attr("hashAlgo", dof.hash.printMethodAlgo());
|
||||||
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
|
outputObj.attr("hash", dof.hash.hash.to_string(Base16, false));
|
||||||
},
|
},
|
||||||
[&](DerivationOutputCAFloating dof) {
|
[&](DerivationOutputCAFloating dof) {
|
||||||
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
outputObj.attr("hashAlgo", makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
||||||
},
|
},
|
||||||
}, output.second.output);
|
}, output.output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca)
|
if (!info->ca)
|
||||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
hashSink = std::make_unique<HashSink>(info->narHash.type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
store->narFromPath(info->path, *hashSink);
|
store->narFromPath(info->path, *hashSink);
|
||||||
|
|
||||||
auto hash = hashSink->finish();
|
auto hash = hashSink->finish();
|
||||||
|
|
||||||
if (hash.first != *info->narHash) {
|
if (hash.first != info->narHash) {
|
||||||
corrupted++;
|
corrupted++;
|
||||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||||
logError({
|
logError({
|
||||||
|
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(info->path),
|
store->printStorePath(info->path),
|
||||||
info->narHash->to_string(Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
hash.first.to_string(Base32, true))
|
hash.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ let
|
||||||
shell = busybox;
|
shell = busybox;
|
||||||
name = "build-remote-input-2";
|
name = "build-remote-input-2";
|
||||||
buildCommand = "echo BAR > $out";
|
buildCommand = "echo BAR > $out";
|
||||||
|
requiredSystemFeatures = ["bar"];
|
||||||
};
|
};
|
||||||
|
|
||||||
in
|
in
|
||||||
|
@ -34,6 +35,6 @@ in
|
||||||
''
|
''
|
||||||
read x < ${input1}
|
read x < ${input1}
|
||||||
read y < ${input2}
|
read y < ${input2}
|
||||||
echo $x$y > $out
|
echo "$x $y" > $out
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,31 +1,36 @@
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
clearStore
|
|
||||||
|
|
||||||
if ! canUseSandbox; then exit; fi
|
if ! canUseSandbox; then exit; fi
|
||||||
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
if ! [[ $busybox =~ busybox ]]; then exit; fi
|
||||||
|
|
||||||
chmod -R u+w $TEST_ROOT/machine0 || true
|
|
||||||
chmod -R u+w $TEST_ROOT/machine1 || true
|
|
||||||
chmod -R u+w $TEST_ROOT/machine2 || true
|
|
||||||
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
|
|
||||||
rm -f $TEST_ROOT/result
|
|
||||||
|
|
||||||
unset NIX_STORE_DIR
|
unset NIX_STORE_DIR
|
||||||
unset NIX_STATE_DIR
|
unset NIX_STATE_DIR
|
||||||
|
|
||||||
|
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
|
||||||
|
|
||||||
|
builders=(
|
||||||
|
# system-features will automatically be added to the outer URL, but not inner
|
||||||
|
# remote-store URL.
|
||||||
|
"ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=foo - - 1 1 foo"
|
||||||
|
"$TEST_ROOT/machine2 - - 1 1 bar"
|
||||||
|
)
|
||||||
|
|
||||||
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
|
||||||
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
# child process. This allows us to test LegacySSHStore::buildDerivation().
|
||||||
|
# ssh-ng://... likewise allows us to test RemoteStore::buildDerivation().
|
||||||
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
|
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
|
||||||
--arg busybox $busybox \
|
--arg busybox $busybox \
|
||||||
--store $TEST_ROOT/machine0 \
|
--store $TEST_ROOT/machine0 \
|
||||||
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
|
--builders "$(join_by '; ' "${builders[@]}")"
|
||||||
--system-features foo
|
|
||||||
|
|
||||||
outPath=$(readlink -f $TEST_ROOT/result)
|
outPath=$(readlink -f $TEST_ROOT/result)
|
||||||
|
|
||||||
cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
|
grep 'FOO BAR' $TEST_ROOT/machine0/$outPath
|
||||||
|
|
||||||
# Ensure that input1 was built on store2 due to the required feature.
|
# Ensure that input1 was built on store1 due to the required feature.
|
||||||
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
|
(! nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh)
|
||||||
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh
|
nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh
|
||||||
|
|
||||||
|
# Ensure that input2 was built on store2 due to the required feature.
|
||||||
|
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-2.sh)
|
||||||
|
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-2.sh
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
nix_tests = \
|
nix_tests = \
|
||||||
init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
|
hash.sh lang.sh add.sh simple.sh dependencies.sh \
|
||||||
config.sh \
|
config.sh \
|
||||||
gc.sh \
|
gc.sh \
|
||||||
gc-concurrent.sh \
|
gc-concurrent.sh \
|
||||||
|
|
|
@ -2,6 +2,9 @@ source common.sh
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
|
|
||||||
|
# Ensure "fake ssh" remote store works just as legacy fake ssh would.
|
||||||
|
nix --store ssh-ng://localhost?remote-store=$TEST_ROOT/other-store doctor
|
||||||
|
|
||||||
startDaemon
|
startDaemon
|
||||||
|
|
||||||
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh
|
storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh
|
||||||
|
|
Loading…
Reference in a new issue