* Store the size of a store path in the database (to be precise, the

size of the NAR serialisation of the path, i.e., `nix-store --dump
  PATH').  This is useful for Hydra.
This commit is contained in:
Eelco Dolstra 2010-11-16 17:11:46 +00:00
parent fb9368b5a0
commit a3883cbd28
16 changed files with 144 additions and 91 deletions

View file

@ -47,7 +47,7 @@ build-sqlite:
else else
build-sqlite: $(SQLITE) build-sqlite: $(SQLITE)
(cd $(SQLITE) && \ (cd $(SQLITE) && \
CC="$(CC)" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \ CC="$(CC)" CFLAGS="-DSQLITE_ENABLE_COLUMN_METADATA=1" ./configure --disable-static --prefix=$(pkglibdir)/dummy --libdir=${pkglibdir} $(SUB_CONFIGURE_FLAGS) && \
$(MAKE) ) $(MAKE) )
touch build-sqlite touch build-sqlite

View file

@ -1547,7 +1547,7 @@ void DerivationGoal::startBuilder()
/* Write closure info to `fileName'. */ /* Write closure info to `fileName'. */
writeFile(tmpDir + "/" + fileName, writeFile(tmpDir + "/" + fileName,
makeValidityRegistration(paths, false, false)); worker.store.makeValidityRegistration(paths, false, false));
} }
@ -1870,7 +1870,7 @@ PathSet parseReferenceSpecifiers(const Derivation & drv, string attr)
void DerivationGoal::computeClosure() void DerivationGoal::computeClosure()
{ {
map<Path, PathSet> allReferences; map<Path, PathSet> allReferences;
map<Path, Hash> contentHashes; map<Path, HashResult> contentHashes;
/* When using a build hook, the build hook can register the output /* When using a build hook, the build hook can register the output
as valid (by doing `nix-store --import'). If so we don't have as valid (by doing `nix-store --import'). If so we don't have
@ -1927,7 +1927,7 @@ void DerivationGoal::computeClosure()
if (ht == htUnknown) if (ht == htUnknown)
throw BuildError(format("unknown hash algorithm `%1%'") % algo); throw BuildError(format("unknown hash algorithm `%1%'") % algo);
Hash h = parseHash(ht, i->second.hash); Hash h = parseHash(ht, i->second.hash);
Hash h2 = recursive ? hashPath(ht, path) : hashFile(ht, path); Hash h2 = recursive ? hashPath(ht, path).first : hashFile(ht, path);
if (h != h2) if (h != h2)
throw BuildError( throw BuildError(
format("output path `%1%' should have %2% hash `%3%', instead has `%4%'") format("output path `%1%' should have %2% hash `%3%', instead has `%4%'")
@ -1941,7 +1941,7 @@ void DerivationGoal::computeClosure()
contained in it. Compute the SHA-256 NAR hash at the same contained in it. Compute the SHA-256 NAR hash at the same
time. The hash is stored in the database so that we can time. The hash is stored in the database so that we can
verify later on whether nobody has messed with the store. */ verify later on whether nobody has messed with the store. */
Hash hash; HashResult hash;
PathSet references = scanForReferences(path, allPaths, hash); PathSet references = scanForReferences(path, allPaths, hash);
contentHashes[path] = hash; contentHashes[path] = hash;
@ -1970,14 +1970,18 @@ void DerivationGoal::computeClosure()
} }
/* Register each output path as valid, and register the sets of /* Register each output path as valid, and register the sets of
paths referenced by each of them. !!! this should be paths referenced by each of them. */
atomic so that either all paths are registered as valid, or ValidPathInfos infos;
none are. */ foreach (DerivationOutputs::iterator, i, drv.outputs) {
foreach (DerivationOutputs::iterator, i, drv.outputs) ValidPathInfo info;
worker.store.registerValidPath(i->second.path, info.path = i->second.path;
contentHashes[i->second.path], info.hash = contentHashes[i->second.path].first;
allReferences[i->second.path], info.narSize = contentHashes[i->second.path].second;
drvPath); info.references = allReferences[i->second.path];
info.deriver = drvPath;
infos.push_back(info);
}
worker.store.registerValidPaths(infos);
/* It is now safe to delete the lock files, since all future /* It is now safe to delete the lock files, since all future
lockers will see that the output paths are valid; they will not lockers will see that the output paths are valid; they will not
@ -2385,10 +2389,15 @@ void SubstitutionGoal::finished()
canonicalisePathMetaData(storePath); canonicalisePathMetaData(storePath);
Hash contentHash = hashPath(htSHA256, storePath); HashResult hash = hashPath(htSHA256, storePath);
worker.store.registerValidPath(storePath, contentHash, ValidPathInfo info2;
info.references, info.deriver); info2.path = storePath;
info2.hash = hash.first;
info2.narSize = hash.second;
info2.references = info.references;
info2.deriver = info.deriver;
worker.store.registerValidPath(info2);
outputLock->setDeletion(true); outputLock->setDeletion(true);

View file

@ -327,9 +327,16 @@ void LocalStore::openDB(bool create)
throw SQLiteError(db, "initialising database schema"); throw SQLiteError(db, "initialising database schema");
} }
/* Backwards compatibility with old (pre-release) databases. Can
remove this eventually. */
if (sqlite3_table_column_metadata(db, 0, "ValidPaths", "narSize", 0, 0, 0, 0, 0) != SQLITE_OK) {
if (sqlite3_exec(db, "alter table ValidPaths add column narSize integer" , 0, 0, 0) != SQLITE_OK)
throw SQLiteError(db, "adding column narSize");
}
/* Prepare SQL statements. */ /* Prepare SQL statements. */
stmtRegisterValidPath.create(db, stmtRegisterValidPath.create(db,
"insert into ValidPaths (path, hash, registrationTime, deriver) values (?, ?, ?, ?);"); "insert into ValidPaths (path, hash, registrationTime, deriver, narSize) values (?, ?, ?, ?, ?);");
stmtAddReference.create(db, stmtAddReference.create(db,
"insert or replace into Refs (referrer, reference) values (?, ?);"); "insert or replace into Refs (referrer, reference) values (?, ?);");
stmtQueryPathInfo.create(db, stmtQueryPathInfo.create(db,
@ -431,19 +438,6 @@ void canonicalisePathMetaData(const Path & path)
} }
void LocalStore::registerValidPath(const Path & path,
const Hash & hash, const PathSet & references,
const Path & deriver)
{
ValidPathInfo info;
info.path = path;
info.hash = hash;
info.references = references;
info.deriver = deriver;
registerValidPath(info);
}
unsigned long long LocalStore::addValidPath(const ValidPathInfo & info) unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
{ {
SQLiteStmtUse use(stmtRegisterValidPath); SQLiteStmtUse use(stmtRegisterValidPath);
@ -454,6 +448,10 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info)
stmtRegisterValidPath.bind(info.deriver); stmtRegisterValidPath.bind(info.deriver);
else else
stmtRegisterValidPath.bind(); // null stmtRegisterValidPath.bind(); // null
if (info.narSize != 0)
stmtRegisterValidPath.bind(info.narSize);
else
stmtRegisterValidPath.bind(); // null
if (sqlite3_step(stmtRegisterValidPath) != SQLITE_DONE) if (sqlite3_step(stmtRegisterValidPath) != SQLITE_DONE)
throw SQLiteError(db, format("registering valid path `%1%' in database") % info.path); throw SQLiteError(db, format("registering valid path `%1%' in database") % info.path);
unsigned long long id = sqlite3_last_insert_rowid(db); unsigned long long id = sqlite3_last_insert_rowid(db);
@ -920,10 +918,18 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
the path in the database. We may just have computed it the path in the database. We may just have computed it
above (if called with recursive == true and hashAlgo == above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */ sha256); otherwise, compute it here. */
registerValidPath(dstPath, HashResult hash;
(recursive && hashAlgo == htSHA256) ? h : if (recursive) {
(recursive ? hashString(htSHA256, dump) : hashPath(htSHA256, dstPath)), hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
PathSet(), ""); hash.second = dump.size();
} else
hash = hashPath(htSHA256, dstPath);
ValidPathInfo info;
info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
registerValidPath(info);
} }
outputLock.setDeletion(true); outputLock.setDeletion(true);
@ -970,9 +976,15 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
writeFile(dstPath, s); writeFile(dstPath, s);
canonicalisePathMetaData(dstPath); canonicalisePathMetaData(dstPath);
HashResult hash = hashPath(htSHA256, dstPath);
registerValidPath(dstPath, ValidPathInfo info;
hashPath(htSHA256, dstPath), references, ""); info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
info.references = references;
registerValidPath(info);
} }
outputLock.setDeletion(true); outputLock.setDeletion(true);
@ -998,7 +1010,7 @@ struct HashAndWriteSink : Sink
Hash currentHash() Hash currentHash()
{ {
HashSink hashSinkClone(hashSink); HashSink hashSinkClone(hashSink);
return hashSinkClone.finish(); return hashSinkClone.finish().first;
} }
}; };
@ -1136,7 +1148,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
Path deriver = readString(hashAndReadSource); Path deriver = readString(hashAndReadSource);
if (deriver != "") assertStorePath(deriver); if (deriver != "") assertStorePath(deriver);
Hash hash = hashAndReadSource.hashSink.finish(); Hash hash = hashAndReadSource.hashSink.finish().first;
hashAndReadSource.hashing = false; hashAndReadSource.hashing = false;
bool haveSignature = readInt(hashAndReadSource) == 1; bool haveSignature = readInt(hashAndReadSource) == 1;
@ -1200,9 +1212,15 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
/* !!! if we were clever, we could prevent the hashPath() /* !!! if we were clever, we could prevent the hashPath()
here. */ here. */
if (deriver != "" && !isValidPath(deriver)) deriver = ""; HashResult hash = hashPath(htSHA256, dstPath);
registerValidPath(dstPath,
hashPath(htSHA256, dstPath), references, deriver); ValidPathInfo info;
info.path = dstPath;
info.hash = hash.first;
info.narSize = hash.second;
info.references = references;
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
registerValidPath(info);
} }
outputLock.setDeletion(true); outputLock.setDeletion(true);
@ -1263,12 +1281,14 @@ void LocalStore::verifyStore(bool checkContents)
/* Check the content hash (optionally - slow). */ /* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i); printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
Hash current = hashPath(info.hash.type, *i); Hash current = hashPath(info.hash.type, *i).first;
if (current != info.hash) { if (current != info.hash) {
printMsg(lvlError, format("path `%1%' was modified! " printMsg(lvlError, format("path `%1%' was modified! "
"expected hash `%2%', got `%3%'") "expected hash `%2%', got `%3%'")
% *i % printHash(info.hash) % printHash(current)); % *i % printHash(info.hash) % printHash(current));
} }
/* !!! Check info.narSize */
} }
} }
} }

View file

@ -97,6 +97,8 @@ public:
PathSet queryValidPaths(); PathSet queryValidPaths();
ValidPathInfo queryPathInfo(const Path & path);
Hash queryPathHash(const Path & path); Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references); void queryReferences(const Path & path, PathSet & references);
@ -173,8 +175,7 @@ public:
execution of the derivation (or something equivalent). Also execution of the derivation (or something equivalent). Also
register the hash of the file system contents of the path. The register the hash of the file system contents of the path. The
hash must be a SHA-256 hash. */ hash must be a SHA-256 hash. */
void registerValidPath(const Path & path, void registerValidPath(const ValidPathInfo & info);
const Hash & hash, const PathSet & references, const Path & deriver);
void registerValidPaths(const ValidPathInfos & infos); void registerValidPaths(const ValidPathInfos & infos);
@ -224,10 +225,6 @@ private:
void addReference(unsigned long long referrer, unsigned long long reference); void addReference(unsigned long long referrer, unsigned long long reference);
void registerValidPath(const ValidPathInfo & info);
ValidPathInfo queryPathInfo(const Path & path);
void appendReferrer(const Path & from, const Path & to, bool lock); void appendReferrer(const Path & from, const Path & to, bool lock);
void rewriteReferrers(const Path & path, bool purge, PathSet referrers); void rewriteReferrers(const Path & path, bool purge, PathSet referrers);

View file

@ -68,7 +68,7 @@ static void hashAndLink(bool dryRun, HashToPath & hashToPath,
the contents of the symlink (i.e. the result of the contents of the symlink (i.e. the result of
readlink()), not the contents of the target (which may not readlink()), not the contents of the target (which may not
even exist). */ even exist). */
Hash hash = hashPath(htSHA256, path); Hash hash = hashPath(htSHA256, path).first;
stats.totalFiles++; stats.totalFiles++;
printMsg(lvlDebug, format("`%1%' has hash `%2%'") % path % printHash(hash)); printMsg(lvlDebug, format("`%1%' has hash `%2%'") % path % printHash(hash));

View file

@ -81,7 +81,7 @@ void RefScanSink::operator () (const unsigned char * data, unsigned int len)
PathSet scanForReferences(const string & path, PathSet scanForReferences(const string & path,
const PathSet & refs, Hash & hash) const PathSet & refs, HashResult & hash)
{ {
RefScanSink sink; RefScanSink sink;
std::map<string, Path> backMap; std::map<string, Path> backMap;

View file

@ -7,7 +7,7 @@
namespace nix { namespace nix {
PathSet scanForReferences(const Path & path, const PathSet & refs, PathSet scanForReferences(const Path & path, const PathSet & refs,
Hash & hash); HashResult & hash);
} }

View file

@ -247,6 +247,12 @@ bool RemoteStore::querySubstitutablePathInfo(const Path & path,
} }
ValidPathInfo RemoteStore::queryPathInfo(const Path & path)
{
throw Error("not implemented");
}
Hash RemoteStore::queryPathHash(const Path & path) Hash RemoteStore::queryPathHash(const Path & path)
{ {
openConnection(); openConnection();

View file

@ -29,6 +29,8 @@ public:
PathSet queryValidPaths(); PathSet queryValidPaths();
ValidPathInfo queryPathInfo(const Path & path);
Hash queryPathHash(const Path & path); Hash queryPathHash(const Path & path);
void queryReferences(const Path & path, PathSet & references); void queryReferences(const Path & path, PathSet & references);

View file

@ -3,7 +3,8 @@ create table if not exists ValidPaths (
path text unique not null, path text unique not null,
hash text not null, hash text not null,
registrationTime integer not null, registrationTime integer not null,
deriver text deriver text,
narSize integer
); );
create table if not exists Refs ( create table if not exists Refs (

View file

@ -190,7 +190,7 @@ std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter) bool recursive, HashType hashAlgo, PathFilter & filter)
{ {
HashType ht(hashAlgo); HashType ht(hashAlgo);
Hash h = recursive ? hashPath(ht, srcPath, filter) : hashFile(ht, srcPath); Hash h = recursive ? hashPath(ht, srcPath, filter).first : hashFile(ht, srcPath);
string name = baseNameOf(srcPath); string name = baseNameOf(srcPath);
Path dstPath = makeFixedOutputPath(recursive, hashAlgo, h, name); Path dstPath = makeFixedOutputPath(recursive, hashAlgo, h, name);
return std::pair<Path, Hash>(dstPath, h); return std::pair<Path, Hash>(dstPath, h);
@ -216,7 +216,7 @@ Path computeStorePathForText(const string & name, const string & s,
/* Return a string accepted by decodeValidPathInfo() that /* Return a string accepted by decodeValidPathInfo() that
registers the specified paths as valid. Note: it's the registers the specified paths as valid. Note: it's the
responsibility of the caller to provide a closure. */ responsibility of the caller to provide a closure. */
string makeValidityRegistration(const PathSet & paths, string StoreAPI::makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash) bool showDerivers, bool showHash)
{ {
string s = ""; string s = "";
@ -224,18 +224,19 @@ string makeValidityRegistration(const PathSet & paths,
foreach (PathSet::iterator, i, paths) { foreach (PathSet::iterator, i, paths) {
s += *i + "\n"; s += *i + "\n";
if (showHash) ValidPathInfo info = queryPathInfo(*i);
s += printHash(store->queryPathHash(*i)) + "\n";
Path deriver = showDerivers ? store->queryDeriver(*i) : ""; if (showHash) {
s += printHash(info.hash) + "\n";
s += (format("%1%\n") % info.narSize).str();
}
Path deriver = showDerivers ? info.deriver : "";
s += deriver + "\n"; s += deriver + "\n";
PathSet references; s += (format("%1%\n") % info.references.size()).str();
store->queryReferences(*i, references);
s += (format("%1%\n") % references.size()).str(); foreach (PathSet::iterator, j, info.references)
foreach (PathSet::iterator, j, references)
s += *j + "\n"; s += *j + "\n";
} }
@ -252,6 +253,8 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
string s; string s;
getline(str, s); getline(str, s);
info.hash = parseHash(htSHA256, s); info.hash = parseHash(htSHA256, s);
getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected");
} }
getline(str, info.deriver); getline(str, info.deriver);
string s; int n; string s; int n;

View file

@ -90,6 +90,21 @@ struct SubstitutablePathInfo
}; };
struct ValidPathInfo
{
Path path;
Path deriver;
Hash hash;
PathSet references;
time_t registrationTime;
unsigned long long narSize; // 0 = unknown
unsigned long long id; // internal use only
ValidPathInfo() : registrationTime(0), narSize(0) { }
};
typedef list<ValidPathInfo> ValidPathInfos;
class StoreAPI class StoreAPI
{ {
public: public:
@ -102,6 +117,9 @@ public:
/* Query the set of valid paths. */ /* Query the set of valid paths. */
virtual PathSet queryValidPaths() = 0; virtual PathSet queryValidPaths() = 0;
/* Query information about a valid path. */
virtual ValidPathInfo queryPathInfo(const Path & path) = 0;
/* Queries the hash of a valid path. */ /* Queries the hash of a valid path. */
virtual Hash queryPathHash(const Path & path) = 0; virtual Hash queryPathHash(const Path & path) = 0;
@ -214,6 +232,12 @@ public:
/* Clear the "failed" status of the given paths. The special /* Clear the "failed" status of the given paths. The special
value `*' causes all failed paths to be cleared. */ value `*' causes all failed paths to be cleared. */
virtual void clearFailedPaths(const PathSet & paths) = 0; virtual void clearFailedPaths(const PathSet & paths) = 0;
/* Return a string representing information about the path that
can be loaded into the database using `nix-store --load-db' or
`nix-store --register-validity'. */
string makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash);
}; };
@ -307,22 +331,6 @@ boost::shared_ptr<StoreAPI> openStore();
string showPaths(const PathSet & paths); string showPaths(const PathSet & paths);
string makeValidityRegistration(const PathSet & paths,
bool showDerivers, bool showHash);
struct ValidPathInfo
{
Path path;
Path deriver;
Hash hash;
PathSet references;
time_t registrationTime;
unsigned long long id; // internal use only
ValidPathInfo() : registrationTime(0) { }
};
typedef list<ValidPathInfo> ValidPathInfos;
ValidPathInfo decodeValidPathInfo(std::istream & str, ValidPathInfo decodeValidPathInfo(std::istream & str,
bool hashGiven = false); bool hashGiven = false);

View file

@ -286,12 +286,14 @@ Hash hashFile(HashType ht, const Path & path)
HashSink::HashSink(HashType ht) : ht(ht) HashSink::HashSink(HashType ht) : ht(ht)
{ {
ctx = new Ctx; ctx = new Ctx;
bytes = 0;
start(ht, *ctx); start(ht, *ctx);
} }
HashSink::HashSink(const HashSink & h) HashSink::HashSink(const HashSink & h)
{ {
ht = h.ht; ht = h.ht;
bytes = h.bytes;
ctx = new Ctx; ctx = new Ctx;
*ctx = *h.ctx; *ctx = *h.ctx;
} }
@ -304,18 +306,20 @@ HashSink::~HashSink()
void HashSink::operator () void HashSink::operator ()
(const unsigned char * data, unsigned int len) (const unsigned char * data, unsigned int len)
{ {
bytes += len;
update(ht, *ctx, data, len); update(ht, *ctx, data, len);
} }
Hash HashSink::finish() HashResult HashSink::finish()
{ {
Hash hash(ht); Hash hash(ht);
nix::finish(ht, *ctx, hash.hash); nix::finish(ht, *ctx, hash.hash);
return hash; return HashResult(hash, bytes);
} }
Hash hashPath(HashType ht, const Path & path, PathFilter & filter) HashResult hashPath(
HashType ht, const Path & path, PathFilter & filter)
{ {
HashSink sink(ht); HashSink sink(ht);
dumpPath(path, sink, filter); dumpPath(path, sink, filter);

View file

@ -40,7 +40,6 @@ struct Hash
/* For sorting. */ /* For sorting. */
bool operator < (const Hash & h) const; bool operator < (const Hash & h) const;
}; };
@ -72,7 +71,8 @@ Hash hashFile(HashType ht, const Path & path);
(essentially) hashString(ht, dumpPath(path)). */ (essentially) hashString(ht, dumpPath(path)). */
struct PathFilter; struct PathFilter;
extern PathFilter defaultPathFilter; extern PathFilter defaultPathFilter;
Hash hashPath(HashType ht, const Path & path, typedef std::pair<Hash, unsigned long long> HashResult;
HashResult hashPath(HashType ht, const Path & path,
PathFilter & filter = defaultPathFilter); PathFilter & filter = defaultPathFilter);
/* Compress a hash to the specified number of bytes by cyclically /* Compress a hash to the specified number of bytes by cyclically
@ -93,13 +93,14 @@ class HashSink : public Sink
private: private:
HashType ht; HashType ht;
Ctx * ctx; Ctx * ctx;
unsigned long long bytes;
public: public:
HashSink(HashType ht); HashSink(HashType ht);
HashSink(const HashSink & h); HashSink(const HashSink & h);
~HashSink(); ~HashSink();
virtual void operator () (const unsigned char * data, unsigned int len); virtual void operator () (const unsigned char * data, unsigned int len);
Hash finish(); HashResult finish();
}; };

View file

@ -44,7 +44,7 @@ void run(Strings args)
if (op == opHash) { if (op == opHash) {
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) { for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) {
Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i); Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i).first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20); if (truncate && h.hashSize > 20) h = compressHash(h, 20);
std::cout << format("%1%\n") % std::cout << format("%1%\n") %
(base32 ? printHash32(h) : printHash(h)); (base32 ? printHash32(h) : printHash(h));

View file

@ -393,9 +393,8 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
if (!opArgs.empty()) if (!opArgs.empty())
throw UsageError("no arguments expected"); throw UsageError("no arguments expected");
PathSet validPaths = store->queryValidPaths(); PathSet validPaths = store->queryValidPaths();
foreach (PathSet::iterator, i, validPaths) { foreach (PathSet::iterator, i, validPaths)
cout << makeValidityRegistration(singleton<PathSet>(*i), true, true); cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true);
}
} }
@ -410,8 +409,11 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
/* !!! races */ /* !!! races */
if (canonicalise) if (canonicalise)
canonicalisePathMetaData(info.path); canonicalisePathMetaData(info.path);
if (!hashGiven) if (!hashGiven) {
info.hash = hashPath(htSHA256, info.path); HashResult hash = hashPath(htSHA256, info.path);
info.hash = hash.first;
info.narSize = hash.second;
}
infos.push_back(info); infos.push_back(info);
} }
} }