Merge branch 'misc-ca' of github.com:obsidiansystems/nix into new-interface-for-path-pathOpt
This commit is contained in:
commit
c318d398f3
|
@ -80,7 +80,7 @@ SV * queryReferences(char * path)
|
|||
SV * queryPathHash(char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
|||
XPUSHs(&PL_sv_undef);
|
||||
else
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
mXPUSHi(info->registrationTime);
|
||||
mXPUSHi(info->narSize);
|
||||
|
|
|
@ -285,11 +285,10 @@ static std::shared_ptr<AttrDb> makeAttrDb(const Hash & fingerprint)
|
|||
}
|
||||
|
||||
EvalCache::EvalCache(
|
||||
bool useCache,
|
||||
const Hash & fingerprint,
|
||||
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||
EvalState & state,
|
||||
RootLoader rootLoader)
|
||||
: db(useCache ? makeAttrDb(fingerprint) : nullptr)
|
||||
: db(useCache ? makeAttrDb(*useCache) : nullptr)
|
||||
, state(state)
|
||||
, rootLoader(rootLoader)
|
||||
{
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "hash.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#include <functional>
|
||||
#include <variant>
|
||||
|
||||
namespace nix::eval_cache {
|
||||
|
@ -26,8 +27,7 @@ class EvalCache : public std::enable_shared_from_this<EvalCache>
|
|||
public:
|
||||
|
||||
EvalCache(
|
||||
bool useCache,
|
||||
const Hash & fingerprint,
|
||||
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||
EvalState & state,
|
||||
RootLoader rootLoader);
|
||||
|
||||
|
|
|
@ -1126,7 +1126,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
|||
|
||||
|
||||
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
||||
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
|
||||
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
|
||||
{
|
||||
const auto path = evalSettings.pureEval && expectedHash ?
|
||||
path_ :
|
||||
|
@ -1157,7 +1157,7 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
|
|||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
if (expectedHash)
|
||||
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
|
||||
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
|
||||
Path dstPath;
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
dstPath = state.store->printStorePath(settings.readOnlyMode
|
||||
|
@ -1191,7 +1191,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
|
|||
.errPos = pos
|
||||
});
|
||||
|
||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
|
||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
|
||||
}
|
||||
|
||||
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
|
@ -1201,7 +1201,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
string name;
|
||||
Value * filterFun = nullptr;
|
||||
auto method = FileIngestionMethod::Recursive;
|
||||
Hash expectedHash;
|
||||
Hash expectedHash(htSHA256);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
const string & n(attr.name);
|
||||
|
|
|
@ -159,7 +159,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
: hashFile(htSHA256, path);
|
||||
if (hash != *expectedHash)
|
||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
|
||||
}
|
||||
|
||||
if (state.allowedPaths)
|
||||
|
|
|
@ -130,12 +130,12 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
tree.actualPath = store->toRealPath(tree.storePath);
|
||||
|
||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||
|
||||
if (auto prevNarHash = getNarHash()) {
|
||||
if (narHash != *prevNarHash)
|
||||
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
|
||||
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
|
||||
}
|
||||
|
||||
if (auto prevLastModified = getLastModified()) {
|
||||
|
|
|
@ -178,7 +178,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
|||
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||
narInfo->fileHash = fileHash;
|
||||
narInfo->fileSize = fileSize;
|
||||
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
||||
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
|
||||
+ (compression == "xz" ? ".xz" :
|
||||
compression == "bzip2" ? ".bz2" :
|
||||
compression == "br" ? ".br" :
|
||||
|
@ -372,7 +372,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
method for very large paths, but `copyPath' is mainly used for
|
||||
small files. */
|
||||
StringSink sink;
|
||||
Hash h;
|
||||
std::optional<Hash> h;
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
dumpPath(srcPath, sink, filter);
|
||||
h = hashString(hashAlgo, *sink.s);
|
||||
|
@ -382,7 +382,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
h = hashString(hashAlgo, s);
|
||||
}
|
||||
|
||||
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
||||
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
|
||||
|
||||
auto source = StringSource { *sink.s };
|
||||
addToStore(info, source, repair, CheckSigs);
|
||||
|
|
|
@ -3730,7 +3730,7 @@ void DerivationGoal::registerOutputs()
|
|||
[&](DerivationOutputFixed dof) {
|
||||
outputHash = DerivationOutputFloating {
|
||||
.method = dof.hash.method,
|
||||
.hashType = *dof.hash.hash.type,
|
||||
.hashType = dof.hash.hash.type,
|
||||
};
|
||||
},
|
||||
[&](DerivationOutputFloating dof) {
|
||||
|
@ -3811,8 +3811,10 @@ void DerivationGoal::registerOutputs()
|
|||
time. The hash is stored in the database so that we can
|
||||
verify later on whether nobody has messed with the store. */
|
||||
debug("scanning for references inside '%1%'", path);
|
||||
HashResult hash;
|
||||
auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash));
|
||||
// HashResult hash;
|
||||
auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
|
||||
auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
|
||||
HashResult hash = pathSetAndHash.second;
|
||||
|
||||
if (buildMode == bmCheck) {
|
||||
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
|
||||
|
@ -5034,7 +5036,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
||||
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
|
||||
Hash nullHash(htSHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current.first;
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
for (auto hashedMirror : settings.hashedMirrors.get()) {
|
||||
try {
|
||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||
fetch(hashedMirror + printHashType(*hash->hash.hash.type) + "/" + hash->hash.hash.to_string(Base16, false));
|
||||
fetch(hashedMirror + printHashType(hash->hash.hash.type) + "/" + hash->hash.hash.to_string(Base16, false));
|
||||
return;
|
||||
} catch (Error & e) {
|
||||
debug(e.what());
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
namespace nix {
|
||||
|
||||
std::string FixedOutputHash::printMethodAlgo() const {
|
||||
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
|
||||
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
||||
}
|
||||
|
||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
||||
|
@ -42,7 +42,7 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
|||
if (prefix == "text") {
|
||||
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||
Hash hash = Hash(string(hashTypeAndHash));
|
||||
if (*hash.type != htSHA256) {
|
||||
if (hash.type != htSHA256) {
|
||||
throw Error("parseContentAddress: the text hash should have type SHA256");
|
||||
}
|
||||
return TextHash { hash };
|
||||
|
|
|
@ -289,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
logger->startWork();
|
||||
auto hash = store->queryPathInfo(path)->narHash;
|
||||
logger->stopWork();
|
||||
to << hash.to_string(Base16, false);
|
||||
to << hash->to_string(Base16, false);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -632,7 +632,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||
to << 1;
|
||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||
<< info->narHash.to_string(Base16, false);
|
||||
<< info->narHash->to_string(Base16, false);
|
||||
writeStorePaths(*store, to, info->references);
|
||||
to << info->registrationTime << info->narSize;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||
|
|
|
@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
|||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashSink.currentHash().first;
|
||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
|
||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||
|
||||
teeSink
|
||||
<< exportMagic
|
||||
|
|
|
@ -113,7 +113,7 @@ struct LegacySSHStore : public Store
|
|||
|
||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
||||
auto s = readString(conn->from);
|
||||
info->narHash = s.empty() ? Hash() : Hash(s);
|
||||
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
|
||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||
info->sigs = readStrings<StringSet>(conn->from);
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ struct LegacySSHStore : public Store
|
|||
<< cmdAddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(Base16, false);
|
||||
<< info.narHash->to_string(Base16, false);
|
||||
writeStorePaths(*this, conn->to, info.references);
|
||||
conn->to
|
||||
<< info.registrationTime
|
||||
|
|
|
@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
|||
|
||||
state.stmtRegisterValidPath.use()
|
||||
(printStorePath(info.path))
|
||||
(info.narHash.to_string(Base16, true))
|
||||
(info.narHash->to_string(Base16, true))
|
||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||
(info.narSize, info.narSize != 0)
|
||||
|
@ -694,7 +694,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
|||
{
|
||||
state.stmtUpdatePathInfo.use()
|
||||
(info.narSize, info.narSize != 0)
|
||||
(info.narHash.to_string(Base16, true))
|
||||
(info.narHash->to_string(Base16, true))
|
||||
(info.ultimate ? 1 : 0, info.ultimate)
|
||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||
(renderContentAddress(info.ca), (bool) info.ca)
|
||||
|
@ -908,7 +908,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
|||
StorePathSet paths;
|
||||
|
||||
for (auto & i : infos) {
|
||||
assert(i.narHash.type == htSHA256);
|
||||
assert(i.narHash && i.narHash->type == htSHA256);
|
||||
if (isValidPath_(*state, i.path))
|
||||
updatePathInfo(*state, i);
|
||||
else
|
||||
|
@ -1021,7 +1021,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
if (hashResult.first != info.narHash)
|
||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||
|
||||
if (hashResult.second != info.narSize)
|
||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||
|
@ -1317,9 +1317,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (!info->ca || !info->references.count(info->path))
|
||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||
|
||||
dumpPath(Store::toRealPath(i), *hashSink);
|
||||
auto current = hashSink->finish();
|
||||
|
@ -1328,7 +1328,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
logError({
|
||||
.name = "Invalid hash - path modified",
|
||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
||||
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
|
||||
});
|
||||
if (repair) repairPath(i); else errors = true;
|
||||
} else {
|
||||
|
|
|
@ -230,9 +230,9 @@ public:
|
|||
(std::string(info->path.name()))
|
||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||
(info->narHash.to_string(Base32, true))
|
||||
(info->narHash->to_string(Base32, true))
|
||||
(info->narSize)
|
||||
(concatStringsSep(" ", info->shortRefs()))
|
||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||
|
|
|
@ -7,15 +7,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
||||
{
|
||||
auto corrupt = [&]() {
|
||||
throw Error("NAR info file '%1%' is corrupt", whence);
|
||||
return Error("NAR info file '%1%' is corrupt", whence);
|
||||
};
|
||||
|
||||
auto parseHashField = [&](const string & s) {
|
||||
try {
|
||||
return Hash(s);
|
||||
} catch (BadHash &) {
|
||||
corrupt();
|
||||
return Hash(); // never reached
|
||||
throw corrupt();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -25,12 +24,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
while (pos < s.size()) {
|
||||
|
||||
size_t colon = s.find(':', pos);
|
||||
if (colon == std::string::npos) corrupt();
|
||||
if (colon == std::string::npos) throw corrupt();
|
||||
|
||||
std::string name(s, pos, colon - pos);
|
||||
|
||||
size_t eol = s.find('\n', colon + 2);
|
||||
if (eol == std::string::npos) corrupt();
|
||||
if (eol == std::string::npos) throw corrupt();
|
||||
|
||||
std::string value(s, colon + 2, eol - colon - 2);
|
||||
|
||||
|
@ -45,16 +44,16 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
else if (name == "FileHash")
|
||||
fileHash = parseHashField(value);
|
||||
else if (name == "FileSize") {
|
||||
if (!string2Int(value, fileSize)) corrupt();
|
||||
if (!string2Int(value, fileSize)) throw corrupt();
|
||||
}
|
||||
else if (name == "NarHash")
|
||||
narHash = parseHashField(value);
|
||||
else if (name == "NarSize") {
|
||||
if (!string2Int(value, narSize)) corrupt();
|
||||
if (!string2Int(value, narSize)) throw corrupt();
|
||||
}
|
||||
else if (name == "References") {
|
||||
auto refs = tokenizeString<Strings>(value, " ");
|
||||
if (!references.empty()) corrupt();
|
||||
if (!references.empty()) throw corrupt();
|
||||
for (auto & r : refs)
|
||||
references.insert(StorePath(r));
|
||||
}
|
||||
|
@ -67,7 +66,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
else if (name == "Sig")
|
||||
sigs.insert(value);
|
||||
else if (name == "CA") {
|
||||
if (ca) corrupt();
|
||||
if (ca) throw corrupt();
|
||||
// FIXME: allow blank ca or require skipping field?
|
||||
ca = parseContentAddressOpt(value);
|
||||
}
|
||||
|
@ -77,7 +76,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
|||
|
||||
if (compression == "") compression = "bzip2";
|
||||
|
||||
if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt();
|
||||
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
|
||||
}
|
||||
|
||||
std::string NarInfo::to_string(const Store & store) const
|
||||
|
@ -87,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
|
|||
res += "URL: " + url + "\n";
|
||||
assert(compression != "");
|
||||
res += "Compression: " + compression + "\n";
|
||||
assert(fileHash.type == htSHA256);
|
||||
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
|
||||
assert(fileHash && fileHash->type == htSHA256);
|
||||
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||
assert(narHash.type == htSHA256);
|
||||
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
||||
assert(narHash && narHash->type == htSHA256);
|
||||
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
|
||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||
|
||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||
|
|
|
@ -10,7 +10,7 @@ struct NarInfo : ValidPathInfo
|
|||
{
|
||||
std::string url;
|
||||
std::string compression;
|
||||
Hash fileHash;
|
||||
std::optional<Hash> fileHash;
|
||||
uint64_t fileSize = 0;
|
||||
std::string system;
|
||||
|
||||
|
|
|
@ -76,8 +76,8 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
|
|||
}
|
||||
|
||||
|
||||
PathSet scanForReferences(const string & path,
|
||||
const PathSet & refs, HashResult & hash)
|
||||
std::pair<PathSet, HashResult> scanForReferences(const string & path,
|
||||
const PathSet & refs)
|
||||
{
|
||||
RefScanSink refsSink;
|
||||
HashSink hashSink { htSHA256 };
|
||||
|
@ -111,9 +111,9 @@ PathSet scanForReferences(const string & path,
|
|||
found.insert(j->second);
|
||||
}
|
||||
|
||||
hash = hashSink.finish();
|
||||
auto hash = hashSink.finish();
|
||||
|
||||
return found;
|
||||
return std::pair<PathSet, HashResult>(found, hash);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
PathSet scanForReferences(const Path & path, const PathSet & refs,
|
||||
HashResult & hash);
|
||||
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
|
||||
|
||||
struct RewritingSink : Sink
|
||||
{
|
||||
|
|
|
@ -498,7 +498,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
conn->to << wopAddToStoreNar
|
||||
<< printStorePath(info.path)
|
||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||
<< info.narHash.to_string(Base16, false);
|
||||
<< info.narHash->to_string(Base16, false);
|
||||
writeStorePaths(*this, conn->to, info.references);
|
||||
conn->to << info.registrationTime << info.narSize
|
||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||
|
|
|
@ -538,7 +538,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
|||
auto info = queryPathInfo(i);
|
||||
|
||||
if (showHash) {
|
||||
s += info->narHash.to_string(Base16, false) + "\n";
|
||||
s += info->narHash->to_string(Base16, false) + "\n";
|
||||
s += (format("%1%\n") % info->narSize).str();
|
||||
}
|
||||
|
||||
|
@ -570,7 +570,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
|||
auto info = queryPathInfo(storePath);
|
||||
|
||||
jsonPath
|
||||
.attr("narHash", info->narHash.to_string(hashBase, true))
|
||||
.attr("narHash", info->narHash->to_string(hashBase, true))
|
||||
.attr("narSize", info->narSize);
|
||||
|
||||
{
|
||||
|
@ -613,7 +613,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
|||
if (!narInfo->url.empty())
|
||||
jsonPath.attr("url", narInfo->url);
|
||||
if (narInfo->fileHash)
|
||||
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(hashBase, true));
|
||||
jsonPath.attr("downloadHash", narInfo->fileHash->to_string(hashBase, true));
|
||||
if (narInfo->fileSize)
|
||||
jsonPath.attr("downloadSize", narInfo->fileSize);
|
||||
if (showClosureSize)
|
||||
|
@ -854,7 +854,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
|||
store.printStorePath(path));
|
||||
return
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(Base32, true) + ";"
|
||||
+ narHash->to_string(Base32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||
}
|
||||
|
|
|
@ -115,7 +115,8 @@ struct ValidPathInfo
|
|||
{
|
||||
StorePath path;
|
||||
std::optional<StorePath> deriver;
|
||||
Hash narHash;
|
||||
// TODO document this
|
||||
std::optional<Hash> narHash;
|
||||
StorePathSet references;
|
||||
time_t registrationTime = 0;
|
||||
uint64_t narSize = 0; // 0 = unknown
|
||||
|
|
|
@ -15,19 +15,22 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static size_t regularHashSize(HashType type) {
|
||||
switch (type) {
|
||||
case htMD5: return md5HashSize;
|
||||
case htSHA1: return sha1HashSize;
|
||||
case htSHA256: return sha256HashSize;
|
||||
case htSHA512: return sha512HashSize;
|
||||
}
|
||||
abort();
|
||||
}
|
||||
|
||||
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
||||
|
||||
|
||||
void Hash::init()
|
||||
{
|
||||
assert(type);
|
||||
switch (*type) {
|
||||
case htMD5: hashSize = md5HashSize; break;
|
||||
case htSHA1: hashSize = sha1HashSize; break;
|
||||
case htSHA256: hashSize = sha256HashSize; break;
|
||||
case htSHA512: hashSize = sha512HashSize; break;
|
||||
}
|
||||
hashSize = regularHashSize(type);
|
||||
assert(hashSize <= maxHashSize);
|
||||
memset(hash, 0, maxHashSize);
|
||||
}
|
||||
|
@ -108,17 +111,11 @@ string printHash16or32(const Hash & hash)
|
|||
}
|
||||
|
||||
|
||||
HashType assertInitHashType(const Hash & h)
|
||||
{
|
||||
assert(h.type);
|
||||
return *h.type;
|
||||
}
|
||||
|
||||
std::string Hash::to_string(Base base, bool includeType) const
|
||||
{
|
||||
std::string s;
|
||||
if (base == SRI || includeType) {
|
||||
s += printHashType(assertInitHashType(*this));
|
||||
s += printHashType(type);
|
||||
s += base == SRI ? '-' : ':';
|
||||
}
|
||||
switch (base) {
|
||||
|
@ -139,60 +136,66 @@ std::string Hash::to_string(Base base, bool includeType) const
|
|||
Hash::Hash(std::string_view s, HashType type) : Hash(s, std::optional { type }) { }
|
||||
Hash::Hash(std::string_view s) : Hash(s, std::optional<HashType>{}) { }
|
||||
|
||||
Hash::Hash(std::string_view s, std::optional<HashType> type)
|
||||
: type(type)
|
||||
Hash::Hash(std::string_view original, std::optional<HashType> optType)
|
||||
{
|
||||
auto rest = original;
|
||||
|
||||
size_t pos = 0;
|
||||
bool isSRI = false;
|
||||
|
||||
auto sep = s.find(':');
|
||||
if (sep == string::npos) {
|
||||
sep = s.find('-');
|
||||
if (sep != string::npos) {
|
||||
isSRI = true;
|
||||
} else if (! type)
|
||||
throw BadHash("hash '%s' does not include a type", s);
|
||||
// Parse the has type before the separater, if there was one.
|
||||
std::optional<HashType> optParsedType;
|
||||
{
|
||||
auto sep = rest.find(':');
|
||||
if (sep == std::string_view::npos) {
|
||||
sep = rest.find('-');
|
||||
if (sep != std::string_view::npos)
|
||||
isSRI = true;
|
||||
}
|
||||
if (sep != std::string_view::npos) {
|
||||
auto hashRaw = rest.substr(0, sep);
|
||||
optParsedType = parseHashType(hashRaw);
|
||||
rest = rest.substr(sep + 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (sep != string::npos) {
|
||||
string hts = string(s, 0, sep);
|
||||
this->type = parseHashType(hts);
|
||||
if (!this->type)
|
||||
throw BadHash("unknown hash type '%s'", hts);
|
||||
if (type && type != this->type)
|
||||
throw BadHash("hash '%s' should have type '%s'", s, printHashType(*type));
|
||||
pos = sep + 1;
|
||||
// Either the string or user must provide the type, if they both do they
|
||||
// must agree.
|
||||
if (!optParsedType && !optType) {
|
||||
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context.", rest);
|
||||
} else {
|
||||
this->type = optParsedType ? *optParsedType : *optType;
|
||||
if (optParsedType && optType && *optParsedType != *optType)
|
||||
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
|
||||
}
|
||||
|
||||
init();
|
||||
|
||||
size_t size = s.size() - pos;
|
||||
|
||||
if (!isSRI && size == base16Len()) {
|
||||
if (!isSRI && rest.size() == base16Len()) {
|
||||
|
||||
auto parseHexDigit = [&](char c) {
|
||||
if (c >= '0' && c <= '9') return c - '0';
|
||||
if (c >= 'A' && c <= 'F') return c - 'A' + 10;
|
||||
if (c >= 'a' && c <= 'f') return c - 'a' + 10;
|
||||
throw BadHash("invalid base-16 hash '%s'", s);
|
||||
throw BadHash("invalid base-16 hash '%s'", original);
|
||||
};
|
||||
|
||||
for (unsigned int i = 0; i < hashSize; i++) {
|
||||
hash[i] =
|
||||
parseHexDigit(s[pos + i * 2]) << 4
|
||||
| parseHexDigit(s[pos + i * 2 + 1]);
|
||||
parseHexDigit(rest[pos + i * 2]) << 4
|
||||
| parseHexDigit(rest[pos + i * 2 + 1]);
|
||||
}
|
||||
}
|
||||
|
||||
else if (!isSRI && size == base32Len()) {
|
||||
else if (!isSRI && rest.size() == base32Len()) {
|
||||
|
||||
for (unsigned int n = 0; n < size; ++n) {
|
||||
char c = s[pos + size - n - 1];
|
||||
for (unsigned int n = 0; n < rest.size(); ++n) {
|
||||
char c = rest[rest.size() - n - 1];
|
||||
unsigned char digit;
|
||||
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
||||
if (base32Chars[digit] == c) break;
|
||||
if (digit >= 32)
|
||||
throw BadHash("invalid base-32 hash '%s'", s);
|
||||
throw BadHash("invalid base-32 hash '%s'", original);
|
||||
unsigned int b = n * 5;
|
||||
unsigned int i = b / 8;
|
||||
unsigned int j = b % 8;
|
||||
|
@ -202,21 +205,21 @@ Hash::Hash(std::string_view s, std::optional<HashType> type)
|
|||
hash[i + 1] |= digit >> (8 - j);
|
||||
} else {
|
||||
if (digit >> (8 - j))
|
||||
throw BadHash("invalid base-32 hash '%s'", s);
|
||||
throw BadHash("invalid base-32 hash '%s'", original);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
else if (isSRI || size == base64Len()) {
|
||||
auto d = base64Decode(s.substr(pos));
|
||||
else if (isSRI || rest.size() == base64Len()) {
|
||||
auto d = base64Decode(rest);
|
||||
if (d.size() != hashSize)
|
||||
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", s);
|
||||
throw BadHash("invalid %s hash '%s'", isSRI ? "SRI" : "base-64", original);
|
||||
assert(hashSize);
|
||||
memcpy(hash, d.data(), hashSize);
|
||||
}
|
||||
|
||||
else
|
||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", s, printHashType(*type));
|
||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
|
||||
}
|
||||
|
||||
Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht)
|
||||
|
@ -269,7 +272,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
|||
}
|
||||
|
||||
|
||||
Hash hashString(HashType ht, const string & s)
|
||||
Hash hashString(HashType ht, std::string_view s)
|
||||
{
|
||||
Ctx ctx;
|
||||
Hash hash(ht);
|
||||
|
@ -336,7 +339,7 @@ HashResult hashPath(
|
|||
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||
{
|
||||
Hash h;
|
||||
Hash h(hash.type);
|
||||
h.hashSize = newSize;
|
||||
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
||||
h.hash[i % newSize] ^= hash.hash[i];
|
||||
|
@ -344,7 +347,7 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
|||
}
|
||||
|
||||
|
||||
std::optional<HashType> parseHashTypeOpt(const string & s)
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
||||
{
|
||||
if (s == "md5") return htMD5;
|
||||
else if (s == "sha1") return htSHA1;
|
||||
|
@ -353,7 +356,7 @@ std::optional<HashType> parseHashTypeOpt(const string & s)
|
|||
else return std::optional<HashType> {};
|
||||
}
|
||||
|
||||
HashType parseHashType(const string & s)
|
||||
HashType parseHashType(std::string_view s)
|
||||
{
|
||||
auto opt_h = parseHashTypeOpt(s);
|
||||
if (opt_h)
|
||||
|
|
|
@ -27,14 +27,11 @@ enum Base : int { Base64, Base32, Base16, SRI };
|
|||
|
||||
struct Hash
|
||||
{
|
||||
static const unsigned int maxHashSize = 64;
|
||||
unsigned int hashSize = 0;
|
||||
unsigned char hash[maxHashSize] = {};
|
||||
constexpr static size_t maxHashSize = 64;
|
||||
size_t hashSize = 0;
|
||||
uint8_t hash[maxHashSize] = {};
|
||||
|
||||
std::optional<HashType> type = {};
|
||||
|
||||
/* Create an unset hash object. */
|
||||
Hash() { };
|
||||
HashType type;
|
||||
|
||||
/* Create a zero-filled hash object. */
|
||||
Hash(HashType type) : type(type) { init(); };
|
||||
|
@ -107,7 +104,7 @@ Hash newHashAllowEmpty(std::string hashStr, std::optional<HashType> ht);
|
|||
string printHash16or32(const Hash & hash);
|
||||
|
||||
/* Compute the hash of the given string. */
|
||||
Hash hashString(HashType ht, const string & s);
|
||||
Hash hashString(HashType ht, std::string_view s);
|
||||
|
||||
/* Compute the hash of the given file. */
|
||||
Hash hashFile(HashType ht, const Path & path);
|
||||
|
@ -123,10 +120,10 @@ HashResult hashPath(HashType ht, const Path & path,
|
|||
Hash compressHash(const Hash & hash, unsigned int newSize);
|
||||
|
||||
/* Parse a string representing a hash type. */
|
||||
HashType parseHashType(const string & s);
|
||||
HashType parseHashType(std::string_view s);
|
||||
|
||||
/* Will return nothing on parse error */
|
||||
std::optional<HashType> parseHashTypeOpt(const string & s);
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s);
|
||||
|
||||
/* And the reverse. */
|
||||
string printHashType(HashType ht);
|
||||
|
|
|
@ -154,7 +154,7 @@ static int _main(int argc, char * * argv)
|
|||
/* If an expected hash is given, the file may already exist in
|
||||
the store. */
|
||||
std::optional<Hash> expectedHash;
|
||||
Hash hash;
|
||||
Hash hash(ht);
|
||||
std::optional<StorePath> storePath;
|
||||
if (args.size() == 2) {
|
||||
expectedHash = Hash(args[1], ht);
|
||||
|
|
|
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
|||
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
||||
auto info = store->queryPathInfo(j);
|
||||
if (query == qHash) {
|
||||
assert(info->narHash.type == htSHA256);
|
||||
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
||||
assert(info->narHash && info->narHash->type == htSHA256);
|
||||
cout << fmt("%s\n", info->narHash->to_string(Base32, true));
|
||||
} else if (query == qSize)
|
||||
cout << fmt("%d\n", info->narSize);
|
||||
}
|
||||
|
@ -725,7 +725,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
|||
auto path = store->followLinksToStorePath(i);
|
||||
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
||||
auto info = store->queryPathInfo(path);
|
||||
HashSink sink(*info->narHash.type);
|
||||
HashSink sink(info->narHash->type);
|
||||
store->narFromPath(path, sink);
|
||||
auto current = sink.finish();
|
||||
if (current.first != info->narHash) {
|
||||
|
@ -734,7 +734,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
|||
.hint = hintfmt(
|
||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
info->narHash->to_string(Base32, true),
|
||||
current.first.to_string(Base32, true))
|
||||
});
|
||||
status = 1;
|
||||
|
@ -864,7 +864,9 @@ static void opServe(Strings opFlags, Strings opArgs)
|
|||
out << info->narSize // downloadSize
|
||||
<< info->narSize;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << renderContentAddress(info->ca) << info->sigs;
|
||||
out << (info->narHash ? info->narHash->to_string(Base32, true) : "")
|
||||
<< renderContentAddress(info->ca)
|
||||
<< info->sigs;
|
||||
} catch (InvalidPath &) {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
|||
info.narSize = sink.s->size();
|
||||
info.ca = std::optional { FixedOutputHash {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = info.narHash,
|
||||
.hash = *info.narHash,
|
||||
} };
|
||||
|
||||
if (!dryRun) {
|
||||
|
|
|
@ -276,7 +276,7 @@ std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
|||
Installable::getCursors(EvalState & state, bool useEvalCache)
|
||||
{
|
||||
auto evalCache =
|
||||
std::make_shared<nix::eval_cache::EvalCache>(false, Hash(), state,
|
||||
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
|
||||
[&]() { return toValue(state).first; });
|
||||
return {{evalCache->getRoot(), ""}};
|
||||
}
|
||||
|
@ -437,9 +437,11 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
std::shared_ptr<flake::LockedFlake> lockedFlake,
|
||||
bool useEvalCache)
|
||||
{
|
||||
return ref(std::make_shared<nix::eval_cache::EvalCache>(
|
||||
useEvalCache && evalSettings.pureEval,
|
||||
lockedFlake->getFingerprint(),
|
||||
auto fingerprint = lockedFlake->getFingerprint();
|
||||
return make_ref<nix::eval_cache::EvalCache>(
|
||||
useEvalCache && evalSettings.pureEval
|
||||
? std::optional { std::cref(fingerprint) }
|
||||
: std::nullopt,
|
||||
state,
|
||||
[&state, lockedFlake]()
|
||||
{
|
||||
|
@ -457,7 +459,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
assert(aOutputs);
|
||||
|
||||
return aOutputs->value;
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
static std::string showAttrPaths(const std::vector<std::string> & paths)
|
||||
|
|
|
@ -84,7 +84,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
|||
info.narSize = sink.s->size();
|
||||
info.ca = FixedOutputHash {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = info.narHash,
|
||||
.hash = *info.narHash,
|
||||
};
|
||||
|
||||
if (!json)
|
||||
|
|
|
@ -133,7 +133,7 @@ struct ProfileManifest
|
|||
info.references = std::move(references);
|
||||
info.narHash = narHash;
|
||||
info.narSize = sink.s->size();
|
||||
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
|
||||
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
|
||||
|
||||
auto source = StringSource { *sink.s };
|
||||
store->addToStore(info, source);
|
||||
|
|
|
@ -91,15 +91,15 @@ struct CmdVerify : StorePathsCommand
|
|||
|
||||
std::unique_ptr<AbstractHashSink> hashSink;
|
||||
if (!info->ca)
|
||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
||||
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
||||
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||
|
||||
store->narFromPath(info->path, *hashSink);
|
||||
|
||||
auto hash = hashSink->finish();
|
||||
|
||||
if (hash.first != info->narHash) {
|
||||
if (hash.first != *info->narHash) {
|
||||
corrupted++;
|
||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||
logError({
|
||||
|
@ -107,7 +107,7 @@ struct CmdVerify : StorePathsCommand
|
|||
.hint = hintfmt(
|
||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(info->path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
info->narHash->to_string(Base32, true),
|
||||
hash.first.to_string(Base32, true))
|
||||
});
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue