Merge branch 'hash-always-has-type' of github.com:obsidiansystems/nix into better-ca-parse-errors

This commit is contained in:
John Ericson 2020-06-29 18:40:34 +00:00
commit a83566e5bc
43 changed files with 320 additions and 217 deletions

6
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View file

@ -10,5 +10,5 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v8
- uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings

View file

@ -97,7 +97,7 @@ $ rm -rf /nix
installation on your system:
</para>
<screen>sh &lt;(curl https://nixos.org/nix/install) --daemon</screen>
<screen>sh &lt;(curl -L https://nixos.org/nix/install) --daemon</screen>
<para>
The multi-user installation of Nix will create build users between
@ -178,7 +178,7 @@ sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist
is a bit of a misnomer). To use this approach, just install Nix with:
</para>
<screen>$ sh &lt;(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<screen>$ sh &lt;(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume</screen>
<para>
If you don't like the sound of this, you'll want to weigh the
@ -429,7 +429,7 @@ LABEL=Nix\040Store /nix apfs rw,nobrowse
NixOS.org installation script:
<screen>
sh &lt;(curl https://nixos.org/nix/install)
sh &lt;(curl -L https://nixos.org/nix/install)
</screen>
</para>

View file

@ -80,7 +80,7 @@ SV * queryReferences(char * path)
SV * queryPathHash(char * path)
PPCODE:
try {
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef);
else
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info->registrationTime);
mXPUSHi(info->narSize);

View file

@ -526,7 +526,7 @@ This script is going to call sudo a lot. Normally, it would show you
exactly what commands it is running and why. However, the script is
run in a headless fashion, like this:
$ curl https://nixos.org/nix/install | sh
$ curl -L https://nixos.org/nix/install | sh
or maybe in a CI pipeline. Because of that, we're going to skip the
verbose output in the interest of brevity.
@ -534,7 +534,7 @@ verbose output in the interest of brevity.
If you would like to
see the output, try like this:
$ curl -o install-nix https://nixos.org/nix/install
$ curl -L -o install-nix https://nixos.org/nix/install
$ sh ./install-nix
EOF

View file

@ -113,7 +113,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
(
echo ""
echo "Installing on macOS >=10.15 requires relocating the store to an apfs volume."
echo "Use sh <(curl https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "Use sh <(curl -L https://nixos.org/nix/install) --darwin-use-unencrypted-nix-store-volume or run the preparation steps manually."
echo "See https://nixos.org/nix/manual/#sect-macos-installation"
echo ""
) >&2

View file

@ -1122,7 +1122,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
{
const auto path = evalSettings.pureEval && expectedHash ?
path_ :
@ -1153,7 +1153,7 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
std::optional<StorePath> expectedStorePath;
if (expectedHash)
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode
@ -1187,7 +1187,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.nixCode = NixCode { .errPos = pos }
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
}
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -1197,7 +1197,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
string name;
Value * filterFun = nullptr;
auto method = FileIngestionMethod::Recursive;
Hash expectedHash;
Hash expectedHash(htSHA256);
for (auto & attr : *args[0]->attrs) {
const string & n(attr.name);

View file

@ -23,7 +23,7 @@ void emitTreeAttrs(
assert(tree.info.narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
tree.info.narHash.to_string(SRI, true));
tree.info.narHash->to_string(SRI, true));
if (input->getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
@ -147,7 +147,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
: hashFile(htSHA256, path);
if (hash != *expectedHash)
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
}
if (state.allowedPaths)

View file

@ -8,7 +8,7 @@ namespace nix::fetchers {
StorePath TreeInfo::computeStorePath(Store & store) const
{
assert(narHash);
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "source");
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
}
}

View file

@ -11,7 +11,7 @@ namespace nix::fetchers {
struct TreeInfo
{
Hash narHash;
std::optional<Hash> narHash;
std::optional<uint64_t> revCount;
std::optional<time_t> lastModified;

View file

@ -181,7 +181,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
duration);
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "br" ? ".br" :
@ -338,7 +338,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
Hash h;
std::optional<Hash> h;
if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s);
@ -348,7 +348,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s);
}
ValidPathInfo info(makeFixedOutputPath(method, h, name));
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs, nullptr);

View file

@ -1950,8 +1950,11 @@ void linkOrCopy(const Path & from, const Path & to)
/* Hard-linking fails if we exceed the maximum link count on a
file (e.g. 32000 of ext3), which is quite possible after a
'nix-store --optimise'. FIXME: actually, why don't we just
bind-mount in this case? */
if (errno != EMLINK)
bind-mount in this case?
It can also fail with EPERM in BeegFS v7 and earlier versions
which don't allow hard-links to other directories */
if (errno != EMLINK && errno != EPERM)
throw SysError("linking '%s' to '%s'", to, from);
copyPath(from, to);
}
@ -3730,8 +3733,8 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */
Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
? hashPath(*i.second.hash->hash.type, actualPath).first
: hashFile(*i.second.hash->hash.type, actualPath);
? hashPath(i.second.hash->hash.type, actualPath).first
: hashFile(i.second.hash->hash.type, actualPath);
auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
@ -3780,8 +3783,10 @@ void DerivationGoal::registerOutputs()
time. The hash is stored in the database so that we can
verify later on whether nobody has messed with the store. */
debug("scanning for references inside '%1%'", path);
HashResult hash;
auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash));
// HashResult hash;
auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
HashResult hash = pathSetAndHash.second;
if (buildMode == bmCheck) {
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
@ -5003,7 +5008,7 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
Hash nullHash(htSHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}

View file

@ -58,14 +58,17 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
}
};
/* We always have one output, and if it's a fixed-output derivation (as
checked below) it must be the only output */
auto & output = drv.outputs.begin()->second;
/* Try the hashed mirrors first. */
if (getAttr("outputHashMode") == "flat")
if (output.hash && output.hash->method == FileIngestionMethod::Flat)
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
auto h = Hash(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
auto & h = output.hash->hash;
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(Base16, false));
return;
} catch (Error & e) {
debug(e.what());

View file

@ -4,7 +4,7 @@
namespace nix {
std::string FixedOutputHash::printMethodAlgo() const {
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
return makeFileIngestionPrefix(method) + printHashType(hash.type);
}
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {

View file

@ -314,7 +314,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto hash = store->queryPathInfo(path)->narHash;
logger->stopWork();
to << hash.to_string(Base16, false);
to << hash->to_string(Base16, false);
break;
}
@ -646,7 +646,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
<< info->narHash->to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {

View file

@ -404,7 +404,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
auto hashAlgo = readString(in);
const auto hash = readString(in);
auto hash = readString(in);
std::optional<FixedOutputHash> fsh;
if (hashAlgo != "") {
@ -413,7 +413,7 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
method = FileIngestionMethod::Recursive;
hashAlgo = string(hashAlgo, 2);
}
const HashType hashType = parseHashType(hashAlgo);
auto hashType = parseHashType(hashAlgo);
fsh = FixedOutputHash {
.method = std::move(method),
.hash = Hash(hash, hashType),
@ -463,11 +463,16 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
for (auto & i : drv.outputs)
for (auto & i : drv.outputs) {
out << i.first
<< store.printStorePath(i.second.path)
<< i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false);
<< store.printStorePath(i.second.path);
if (i.second.hash) {
out << i.second.hash->printMethodAlgo()
<< i.second.hash->hash.to_string(Base16, false);
} else {
out << "" << "";
}
}
writeStorePaths(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
out << drv.env.size();

View file

@ -55,9 +55,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashAndWriteSink.currentHash();
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
hashAndWriteSink
<< exportMagic

View file

@ -113,7 +113,7 @@ struct LegacySSHStore : public Store
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
auto s = readString(conn->from);
info->narHash = s.empty() ? Hash() : Hash(s);
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
info->ca = parseContentAddressOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from);
}
@ -139,7 +139,7 @@ struct LegacySSHStore : public Store
<< cmdAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
<< info.narHash->to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to
<< info.registrationTime

View file

@ -586,7 +586,7 @@ uint64_t LocalStore::addValidPath(State & state,
state.stmtRegisterValidPath.use()
(printStorePath(info.path))
(info.narHash.to_string(Base16, true))
(info.narHash->to_string(Base16, true))
(info.registrationTime == 0 ? time(0) : info.registrationTime)
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
(info.narSize, info.narSize != 0)
@ -686,7 +686,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
{
state.stmtUpdatePathInfo.use()
(info.narSize, info.narSize != 0)
(info.narHash.to_string(Base16, true))
(info.narHash->to_string(Base16, true))
(info.ultimate ? 1 : 0, info.ultimate)
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
@ -897,7 +897,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & i : infos) {
assert(i.narHash.type == htSHA256);
assert(i.narHash && i.narHash->type == htSHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@ -1010,7 +1010,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
@ -1067,12 +1067,12 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
the path in the database. We may just have computed it
above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */
HashResult hash;
if (method == FileIngestionMethod::Recursive) {
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
hash.second = dump.size();
} else
hash = hashPath(htSHA256, realPath);
HashResult hash = method == FileIngestionMethod::Recursive
? HashResult {
hashAlgo == htSHA256 ? h : hashString(htSHA256, dump),
dump.size(),
}
: hashPath(htSHA256, realPath);
optimisePath(realPath); // FIXME: combine with hashPath()
@ -1255,9 +1255,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(*info->narHash.type);
hashSink = std::make_unique<HashSink>(info->narHash->type);
else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
@ -1266,7 +1266,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
});
if (repair) repairPath(i); else errors = true;
} else {

View file

@ -230,9 +230,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string(Base32, true))
(info->narHash->to_string(Base32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -7,15 +7,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
{
auto corrupt = [&]() {
throw Error("NAR info file '%1%' is corrupt", whence);
return Error("NAR info file '%1%' is corrupt", whence);
};
auto parseHashField = [&](const string & s) {
try {
return Hash(s);
} catch (BadHash &) {
corrupt();
return Hash(); // never reached
throw corrupt();
}
};
@ -25,12 +24,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
while (pos < s.size()) {
size_t colon = s.find(':', pos);
if (colon == std::string::npos) corrupt();
if (colon == std::string::npos) throw corrupt();
std::string name(s, pos, colon - pos);
size_t eol = s.find('\n', colon + 2);
if (eol == std::string::npos) corrupt();
if (eol == std::string::npos) throw corrupt();
std::string value(s, colon + 2, eol - colon - 2);
@ -45,16 +44,16 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "FileHash")
fileHash = parseHashField(value);
else if (name == "FileSize") {
if (!string2Int(value, fileSize)) corrupt();
if (!string2Int(value, fileSize)) throw corrupt();
}
else if (name == "NarHash")
narHash = parseHashField(value);
else if (name == "NarSize") {
if (!string2Int(value, narSize)) corrupt();
if (!string2Int(value, narSize)) throw corrupt();
}
else if (name == "References") {
auto refs = tokenizeString<Strings>(value, " ");
if (!references.empty()) corrupt();
if (!references.empty()) throw corrupt();
for (auto & r : refs)
references.insert(StorePath(r));
}
@ -67,7 +66,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
if (ca) corrupt();
if (!value.empty()) throw corrupt();
// FIXME: allow blank ca or require skipping field?
ca = parseContentAddressOpt(value);
}
@ -77,7 +76,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (compression == "") compression = "bzip2";
if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt();
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
}
std::string NarInfo::to_string(const Store & store) const
@ -87,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash.type == htSHA256);
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
assert(narHash && narHash->type == htSHA256);
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -10,7 +10,7 @@ struct NarInfo : ValidPathInfo
{
std::string url;
std::string compression;
Hash fileHash;
std::optional<Hash> fileHash;
uint64_t fileSize = 0;
std::string system;

View file

@ -79,8 +79,8 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
}
PathSet scanForReferences(const string & path,
const PathSet & refs, HashResult & hash)
std::pair<PathSet, HashResult> scanForReferences(const string & path,
const PathSet & refs)
{
RefScanSink sink;
std::map<string, Path> backMap;
@ -112,9 +112,9 @@ PathSet scanForReferences(const string & path,
found.insert(j->second);
}
hash = sink.hashSink.finish();
auto hash = sink.hashSink.finish();
return found;
return std::pair<PathSet, HashResult>(found, hash);
}

View file

@ -5,8 +5,7 @@
namespace nix {
PathSet scanForReferences(const Path & path, const PathSet & refs,
HashResult & hash);
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
struct RewritingSink : Sink
{

View file

@ -462,7 +462,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn->to << wopAddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
<< info.narHash->to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)

View file

@ -430,7 +430,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
auto info = queryPathInfo(i);
if (showHash) {
s += info->narHash.to_string(Base16, false) + "\n";
s += info->narHash->to_string(Base16, false) + "\n";
s += (format("%1%\n") % info->narSize).str();
}
@ -462,7 +462,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
auto info = queryPathInfo(storePath);
jsonPath
.attr("narHash", info->narHash.to_string(hashBase, true))
.attr("narHash", info->narHash->to_string(hashBase, true))
.attr("narSize", info->narSize);
{
@ -505,7 +505,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
if (!narInfo->url.empty())
jsonPath.attr("url", narInfo->url);
if (narInfo->fileHash)
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true));
jsonPath.attr("downloadHash", narInfo->fileHash->to_string(Base32, true));
if (narInfo->fileSize)
jsonPath.attr("downloadSize", narInfo->fileSize);
if (showClosureSize)
@ -746,7 +746,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(Base32, true) + ";"
+ narHash->to_string(Base32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}

View file

@ -115,7 +115,8 @@ struct ValidPathInfo
{
StorePath path;
std::optional<StorePath> deriver;
Hash narHash;
// TODO document this
std::optional<Hash> narHash;
StorePathSet references;
time_t registrationTime = 0;
uint64_t narSize = 0; // 0 = unknown

View file

@ -1,6 +1,7 @@
#pragma once
#include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string>
#include "ansicolor.hh"
@ -103,7 +104,9 @@ class hintformat
public:
hintformat(const string &format) :fmt(format)
{
fmt.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
}
hintformat(const hintformat &hf)
@ -117,6 +120,13 @@ public:
return *this;
}
template<class T>
hintformat& operator%(const normaltxt<T> &value)
{
fmt % value.value;
return *this;
}
std::string str() const
{
return fmt.str();
@ -136,4 +146,9 @@ inline hintformat hintfmt(const std::string & fs, const Args & ... args)
return f;
}
inline hintformat hintfmt(std::string plain_string)
{
// we won't be receiving any args in this case, so just print the original string
return hintfmt("%s", normaltxt(plain_string));
}
}

View file

@ -16,16 +16,19 @@
namespace nix {
static size_t regularHashSize(HashType type) {
switch (type) {
case htMD5: return md5HashSize;
case htSHA1: return sha1HashSize;
case htSHA256: return sha256HashSize;
case htSHA512: return sha512HashSize;
}
abort();
}
void Hash::init()
{
if (!type) abort();
switch (*type) {
case htMD5: hashSize = md5HashSize; break;
case htSHA1: hashSize = sha1HashSize; break;
case htSHA256: hashSize = sha256HashSize; break;
case htSHA512: hashSize = sha512HashSize; break;
}
hashSize = regularHashSize(type);
assert(hashSize <= maxHashSize);
memset(hash, 0, maxHashSize);
}
@ -101,22 +104,16 @@ static string printHash32(const Hash & hash)
string printHash16or32(const Hash & hash)
{
assert(hash.type);
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
}
HashType assertInitHashType(const Hash & h) {
if (h.type)
return *h.type;
else
abort();
}
std::string Hash::to_string(Base base, bool includeType) const
{
std::string s;
if (base == SRI || includeType) {
s += printHashType(assertInitHashType(*this));
s += printHashType(type);
s += base == SRI ? '-' : ':';
}
switch (base) {
@ -137,60 +134,66 @@ std::string Hash::to_string(Base base, bool includeType) const
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
Hash::Hash(std::string_view s, std::optional<HashType> type)
: type(type)
Hash::Hash(std::string_view original, std::optional<HashType> optType)
{
auto rest = original;
size_t pos = 0;
bool isSRI = false;
auto sep = s.find(':');
if (sep == string::npos) {
sep = s.find('-');
if (sep != string::npos) {
isSRI = true;
} else if (! type)
throw BadHash("hash '%s' does not include a type", s);
// Parse the has type before the separater, if there was one.
std::optional<HashType> optParsedType;
{
auto sep = rest.find(':');
if (sep == std::string_view::npos) {
sep = rest.find('-');
if (sep != std::string_view::npos)
isSRI = true;
}
if (sep != std::string_view::npos) {
auto hashRaw = rest.substr(0, sep);
optParsedType = parseHashType(hashRaw);
rest = rest.substr(sep + 1);
}
}
if (sep != string::npos) {
string hts = string(s, 0, sep);
this->type = parseHashType(hts);
if (!this->type)
throw BadHash("unknown hash type '%s'", hts);
if (type && type != this->type)
throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type));
pos = sep + 1;
// Either the string or user must provide the type, if they both do they
// must agree.
if (!optParsedType && !optType) {
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest);
} else {
this->type = optParsedType ? *optParsedType : *optType;
if (optParsedType && optType && *optParsedType != *optType)
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
}
init();
size_t size = s.size() - pos;
if (!isSRI && size == base16Len()) {
if (!isSRI && rest.size() == base16Len()) {
auto parseHexDigit = [&](char c) {
if (c >= '0' && c <= '9') return c - '0';
if (c >= 'A' && c <= 'F') return c - 'A' + 10;
if (c >= 'a' && c <= 'f') return c - 'a' + 10;
throw BadHash("invalid base-16 hash '%s'", s);
throw BadHash("invalid base-16 hash '%s'", original);
};
for (unsigned int i = 0; i < hashSize; i++) {
hash[i] =
parseHexDigit(s[pos + i * 2]) << 4
| parseHexDigit(s[pos + i * 2 + 1]);
parseHexDigit(rest[pos + i * 2]) << 4
| parseHexDigit(rest[pos + i * 2 + 1]);
}
}
else if (!isSRI && size == base32Len()) {
else if (!isSRI && rest.size() == base32Len()) {
for (unsigned int n = 0; n < size; ++n) {
char c = s[pos + size - n - 1];
for (unsigned int n = 0; n < rest.size(); ++n) {
char c = rest[rest.size() - n - 1];
unsigned char digit;
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
if (base32Chars[digit] == c) break;
if (digit >= 32)
throw BadHash("invalid base-32 hash '%s'", s);
throw BadHash("invalid base-32 hash '%s'", original);
unsigned int b = n * 5;
unsigned int i = b / 8;
unsigned int j = b % 8;
@ -200,21 +203,21 @@ Hash::Hash(std::string_view s, std::optional<HashType> type)
hash[i + 1] |= digit >> (8 - j);
} else {
if (digit >> (8 - j))
throw BadHash("invalid base-32 hash '%s'", s);
throw BadHash("invalid base-32 hash '%s'", original);
}
}
}
else if (isSRI || size == base64Len()) {
auto d = base64Decode(s.substr(pos));
else if (isSRI || rest.size() == base64Len()) {
auto d = base64Decode(rest);
if (d.size() != hashSize)
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s);
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", original);
assert(hashSize);
memcpy(hash, d.data(), hashSize);
}
else
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type));
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
}
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
@ -267,7 +270,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
}
Hash hashString(HashType ht, const string & s)
Hash hashString(HashType ht, std::string_view s)
{
Ctx ctx;
Hash hash(ht);
@ -334,7 +337,7 @@ HashResult hashPath(
Hash compressHash(const Hash & hash, unsigned int newSize)
{
Hash h;
Hash h(hash.type);
h.hashSize = newSize;
for (unsigned int i = 0; i < hash.hashSize; ++i)
h.hash[i % newSize] ^= hash.hash[i];
@ -363,14 +366,15 @@ HashType parseHashType(std::string_view s)
string printHashType(HashType ht)
{
switch (ht) {
case htMD5: return "md5"; break;
case htSHA1: return "sha1"; break;
case htSHA256: return "sha256"; break;
case htSHA512: return "sha512"; break;
case htMD5: return "md5";
case htSHA1: return "sha1";
case htSHA256: return "sha256";
case htSHA512: return "sha512";
default:
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
abort();
}
// illegal hash type enum value internally, as opposed to external input
// which should be validated with nice error message.
abort();
}
}

View file

@ -10,7 +10,7 @@ namespace nix {
MakeError(BadHash, Error);
enum HashType : char { htMD5, htSHA1, htSHA256, htSHA512 };
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
const int md5HashSize = 16;
@ -25,14 +25,11 @@ enum Base : int { Base64, Base32, Base16, SRI };
struct Hash
{
static const unsigned int maxHashSize = 64;
unsigned int hashSize = 0;
unsigned char hash[maxHashSize] = {};
constexpr static size_t maxHashSize = 64;
size_t hashSize = 0;
uint8_t hash[maxHashSize] = {};
std::optional<HashType> type = {};
/* Create an unset hash object. */
Hash() { };
HashType type;
/* Create a zero-filled hash object. */
Hash(HashType type) : type(type) { init(); };
@ -105,7 +102,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
string printHash16or32(const Hash & hash);
/* Compute the hash of the given string. */
Hash hashString(HashType ht, const string & s);
Hash hashString(HashType ht, std::string_view s);
/* Compute the hash of the given file. */
Hash hashFile(HashType ht, const Path & path);

View file

@ -1,6 +1,7 @@
#include "logging.hh"
#include "nixexpr.hh"
#include "util.hh"
#include <fstream>
#include <gtest/gtest.h>
@ -42,7 +43,7 @@ namespace nix {
logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
}
}
@ -60,8 +61,7 @@ namespace nix {
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
}
}
@ -69,9 +69,9 @@ namespace nix {
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlInfo,
.name = "Info name",
.description = "Info description",
});
.name = "Info name",
.description = "Info description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
@ -85,7 +85,7 @@ namespace nix {
logger->logEI({ .level = lvlTalkative,
.name = "Talkative name",
.description = "Talkative description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -99,7 +99,7 @@ namespace nix {
logger->logEI({ .level = lvlChatty,
.name = "Chatty name",
.description = "Talkative description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
@ -113,7 +113,7 @@ namespace nix {
logger->logEI({ .level = lvlDebug,
.name = "Debug name",
.description = "Debug description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
@ -127,7 +127,7 @@ namespace nix {
logger->logEI({ .level = lvlVomit,
.name = "Vomit name",
.description = "Vomit description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
@ -144,7 +144,7 @@ namespace nix {
logError({
.name = "name",
.description = "error description",
});
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
@ -160,13 +160,13 @@ namespace nix {
.name = "error name",
.description = "error with code lines",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
}});
@ -183,10 +183,10 @@ namespace nix {
.name = "error name",
.description = "error without any code lines.",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr();
@ -202,7 +202,7 @@ namespace nix {
.name = "error name",
.hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr();
@ -218,10 +218,10 @@ namespace nix {
testing::internal::CaptureStderr();
logWarning({
.name = "name",
.description = "error description",
.hint = hintfmt("there was a %1%", "warning"),
});
.name = "name",
.description = "error description",
.hint = hintfmt("there was a %1%", "warning"),
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
@ -238,13 +238,13 @@ namespace nix {
.name = "warning name",
.description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
@ -252,4 +252,41 @@ namespace nix {
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
/* ----------------------------------------------------------------------------
* hintfmt
* --------------------------------------------------------------------------*/
TEST(hintfmt, percentStringWithoutArgs) {
const char *teststr = "this is 100%s correct!";
ASSERT_STREQ(
hintfmt(teststr).str().c_str(),
teststr);
}
TEST(hintfmt, fmtToHintfmt) {
ASSERT_STREQ(
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
"the color of this this text is not yellow");
}
TEST(hintfmt, tooFewArguments) {
ASSERT_STREQ(
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
"only one arg " ANSI_YELLOW "fulfilled" ANSI_NORMAL " ");
}
TEST(hintfmt, tooManyArguments) {
ASSERT_STREQ(
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
"what about this " ANSI_YELLOW "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
}
}

View file

@ -153,7 +153,7 @@ static int _main(int argc, char * * argv)
/* If an expected hash is given, the file may already exist in
the store. */
Hash hash, expectedHash(ht);
Hash hash(ht), expectedHash(ht);
std::optional<StorePath> storePath;
if (args.size() == 2) {
expectedHash = Hash(args[1], ht);

View file

@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
auto info = store->queryPathInfo(j);
if (query == qHash) {
assert(info->narHash.type == htSHA256);
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
assert(info->narHash && info->narHash->type == htSHA256);
cout << fmt("%s\n", info->narHash->to_string(Base32, true));
} else if (query == qSize)
cout << fmt("%d\n", info->narSize);
}
@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
auto path = store->followLinksToStorePath(i);
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
auto info = store->queryPathInfo(path);
HashSink sink(*info->narHash.type);
HashSink sink(info->narHash->type);
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
@ -734,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
info->narHash.to_string(Base32, true),
info->narHash->to_string(Base32, true),
current.first.to_string(Base32, true))
});
status = 1;
@ -864,7 +864,9 @@ static void opServe(Strings opFlags, Strings opArgs)
out << info->narSize // downloadSize
<< info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
<< renderContentAddress(info->ca)
<< info->sigs;
} catch (InvalidPath &) {
}
}

View file

@ -50,7 +50,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
info.narSize = sink.s->size();
info.ca = std::optional { FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
.hash = *info.narHash,
} };
if (!dryRun) {

View file

@ -139,7 +139,7 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
.path = shellOutPath,
.hash = FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = Hash { },
.hash = Hash { htSHA256 },
},
});
drv.env["out"] = store->printStorePath(shellOutPath);

View file

@ -84,7 +84,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
info.narSize = sink.s->size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
.hash = *info.narHash,
};
if (!json)

View file

@ -88,15 +88,15 @@ struct CmdVerify : StorePathsCommand
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca)
hashSink = std::make_unique<HashSink>(*info->narHash.type);
hashSink = std::make_unique<HashSink>(info->narHash->type);
else
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
store->narFromPath(info->path, *hashSink);
auto hash = hashSink->finish();
if (hash.first != info->narHash) {
if (hash.first != *info->narHash) {
corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path));
logError({
@ -104,7 +104,7 @@ struct CmdVerify : StorePathsCommand
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
info->narHash.to_string(Base32, true),
info->narHash->to_string(Base32, true),
hash.first.to_string(Base32, true))
});
}

View file

@ -1,23 +1,39 @@
{ busybox }:
with import ./config.nix;
let
mkDerivation = args:
derivation ({
inherit system;
builder = busybox;
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
} // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
input1 = mkDerivation {
name = "build-hook-input-1";
buildCommand = "mkdir $out; echo FOO > $out/foo";
shell = busybox;
name = "build-remote-input-1";
buildCommand = "echo FOO > $out";
requiredSystemFeatures = ["foo"];
};
input2 = mkDerivation {
name = "build-hook-input-2";
buildCommand = "mkdir $out; echo BAR > $out/bar";
shell = busybox;
name = "build-remote-input-2";
buildCommand = "echo BAR > $out";
};
in
mkDerivation {
name = "build-hook";
builder = ./dependencies.builder0.sh;
input1 = " " + input1 + "/.";
input2 = " ${input2}/.";
shell = busybox;
name = "build-remote";
buildCommand =
''
read x < ${input1}
read y < ${input2}
echo $x$y > $out
'';
}

View file

@ -3,22 +3,29 @@ source common.sh
clearStore
if ! canUseSandbox; then exit; fi
if [[ ! $SHELL =~ /nix/store ]]; then exit; fi
if ! [[ $busybox =~ busybox ]]; then exit; fi
chmod -R u+w $TEST_ROOT/store0 || true
chmod -R u+w $TEST_ROOT/store1 || true
rm -rf $TEST_ROOT/store0 $TEST_ROOT/store1
chmod -R u+w $TEST_ROOT/machine0 || true
chmod -R u+w $TEST_ROOT/machine1 || true
chmod -R u+w $TEST_ROOT/machine2 || true
rm -rf $TEST_ROOT/machine0 $TEST_ROOT/machine1 $TEST_ROOT/machine2
rm -f $TEST_ROOT/result
nix build -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--sandbox-paths /nix/store --sandbox-build-dir /build-tmp \
--builders "$TEST_ROOT/store0; $TEST_ROOT/store1 - - 1 1 foo" \
unset NIX_STORE_DIR
unset NIX_STATE_DIR
# Note: ssh://localhost bypasses ssh, directly invoking nix-store as a
# child process. This allows us to test LegacySSHStore::buildDerivation().
nix build -L -v -f build-hook.nix -o $TEST_ROOT/result --max-jobs 0 \
--arg busybox $busybox \
--store $TEST_ROOT/machine0 \
--builders "ssh://localhost?remote-store=$TEST_ROOT/machine1; $TEST_ROOT/machine2 - - 1 1 foo" \
--system-features foo
outPath=$TEST_ROOT/result
outPath=$(readlink -f $TEST_ROOT/result)
cat $outPath/foobar | grep FOOBAR
cat $TEST_ROOT/machine0/$outPath | grep FOOBAR
# Ensure that input1 was built on store1 due to the required feature.
p=$(readlink -f $outPath/input-2)
(! nix path-info --store $TEST_ROOT/store0 --all | grep builder-build-hook-input-1.sh)
nix path-info --store $TEST_ROOT/store1 --all | grep builder-build-hook-input-1.sh
# Ensure that input1 was built on store2 due to the required feature.
(! nix path-info --store $TEST_ROOT/machine1 --all | grep builder-build-remote-input-1.sh)
nix path-info --store $TEST_ROOT/machine2 --all | grep builder-build-remote-input-1.sh

View file

@ -35,6 +35,7 @@ export xmllint="@xmllint@"
export SHELL="@bash@"
export PAGER=cat
export HAVE_SODIUM="@HAVE_SODIUM@"
export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@

View file

@ -2,6 +2,8 @@ source common.sh
clearStore
rm -f $TEST_ROOT/result
export REMOTE_STORE=$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store

View file

@ -5,6 +5,8 @@ if [[ $(uname) != Linux ]]; then exit; fi
clearStore
rm -f $TEST_ROOT/result
export unreachable=$(nix add-to-store ./recursive.sh)
nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L '(

View file

@ -2,6 +2,8 @@ source common.sh
clearStore
rm -f $TEST_ROOT/result
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
[[ $(cat $TEST_ROOT/result/foo) = bar ]]