forked from lix-project/lix
Merge pull request #9325 from NixOS/accessor-add-to-store
Content addressing and adding to store cleanup
This commit is contained in:
commit
7cfd6c0efe
|
@ -13,6 +13,7 @@
|
|||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "crypto.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <sodium.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -205,7 +206,10 @@ void importPaths(int fd, int dontCheckSigs)
|
|||
SV * hashPath(char * algo, int base32, char * path)
|
||||
PPCODE:
|
||||
try {
|
||||
Hash h = hashPath(parseHashAlgo(algo), path).first;
|
||||
PosixSourceAccessor accessor;
|
||||
Hash h = hashPath(
|
||||
accessor, CanonPath::fromCwd(path),
|
||||
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
|
||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
|
@ -281,7 +285,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
|
|||
PPCODE:
|
||||
try {
|
||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
|
||||
PosixSourceAccessor accessor;
|
||||
auto path = store()->addToStore(
|
||||
std::string(baseNameOf(srcPath)),
|
||||
accessor, CanonPath::fromCwd(srcPath),
|
||||
method, parseHashAlgo(algo));
|
||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||
} catch (Error & e) {
|
||||
croak("%s", e.what());
|
||||
|
|
|
@ -44,7 +44,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
|||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
if (v.type() == nPath) {
|
||||
auto storePath = v.path().fetchToStore(state->store);
|
||||
auto storePath = v.path().fetchToStore(*state->store);
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
|
|
|
@ -2317,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
|
|||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
|
|
|
@ -2072,8 +2072,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
}
|
||||
|
||||
auto storePath = settings.readOnlyMode
|
||||
? state.store->computeStorePathForText(name, contents, refs)
|
||||
: state.store->addTextToStore(name, contents, refs, state.repair);
|
||||
? state.store->makeFixedOutputPathFromCA(name, TextInfo {
|
||||
.hash = hashString(HashAlgorithm::SHA256, contents),
|
||||
.references = std::move(refs),
|
||||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
|
||||
});
|
||||
|
||||
/* Note: we don't need to add `context' to the context of the
|
||||
result, since `storePath' itself has references to the paths
|
||||
|
@ -2229,7 +2235,7 @@ static void addPath(
|
|||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
|
||||
auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
|
|
|
@ -106,7 +106,7 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
void add(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
|
@ -115,13 +115,13 @@ struct CacheImpl : Cache
|
|||
_state.lock()->add.use()
|
||||
(attrsToJSON(inAttrs).dump())
|
||||
(attrsToJSON(infoAttrs).dump())
|
||||
(store->printStorePath(storePath))
|
||||
(store.printStorePath(storePath))
|
||||
(locked)
|
||||
(time(0)).exec();
|
||||
}
|
||||
|
||||
std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) override
|
||||
{
|
||||
if (auto res = lookupExpired(store, inAttrs)) {
|
||||
|
@ -134,7 +134,7 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
std::optional<Result> lookupExpired(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) override
|
||||
{
|
||||
auto state(_state.lock());
|
||||
|
@ -148,19 +148,19 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
auto infoJSON = stmt.getStr(0);
|
||||
auto storePath = store->parseStorePath(stmt.getStr(1));
|
||||
auto storePath = store.parseStorePath(stmt.getStr(1));
|
||||
auto locked = stmt.getInt(2) != 0;
|
||||
auto timestamp = stmt.getInt(3);
|
||||
|
||||
store->addTempRoot(storePath);
|
||||
if (!store->isValidPath(storePath)) {
|
||||
store.addTempRoot(storePath);
|
||||
if (!store.isValidPath(storePath)) {
|
||||
// FIXME: we could try to substitute 'storePath'.
|
||||
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
|
||||
return {};
|
||||
}
|
||||
|
||||
debug("using cache entry '%s' -> '%s', '%s'",
|
||||
inAttrsJSON, infoJSON, store->printStorePath(storePath));
|
||||
inAttrsJSON, infoJSON, store.printStorePath(storePath));
|
||||
|
||||
return Result {
|
||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
|
|
|
@ -50,14 +50,14 @@ struct Cache
|
|||
|
||||
/* Old cache for things that have a store path. */
|
||||
virtual void add(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool locked) = 0;
|
||||
|
||||
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) = 0;
|
||||
|
||||
struct Result
|
||||
|
@ -68,7 +68,7 @@ struct Cache
|
|||
};
|
||||
|
||||
virtual std::optional<Result> lookupExpired(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) = 0;
|
||||
};
|
||||
|
||||
|
|
|
@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
|||
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||
{
|
||||
auto [accessor, input2] = getAccessor(store, input);
|
||||
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
|
||||
auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName());
|
||||
return {storePath, input2};
|
||||
}
|
||||
|
||||
|
|
|
@ -368,14 +368,14 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
RepoInfo getRepoInfo(const Input & input) const
|
||||
{
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
if (auto rev = input.getRev())
|
||||
checkHashType(rev);
|
||||
checkHashAlgorithm(rev);
|
||||
|
||||
RepoInfo repoInfo;
|
||||
|
||||
|
|
|
@ -201,7 +201,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
{"rev", rev->gitRev()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
||||
if (auto res = getCache()->lookup(*store, lockedAttrs)) {
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||
return {std::move(res->second), input};
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
lockedAttrs,
|
||||
{
|
||||
{"rev", rev->gitRev()},
|
||||
|
|
|
@ -5,10 +5,10 @@
|
|||
namespace nix {
|
||||
|
||||
StorePath InputAccessor::fetchToStore(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const CanonPath & path,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
ContentAddressMethod method,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
|
@ -20,10 +20,24 @@ StorePath InputAccessor::fetchToStore(
|
|||
if (!filter && fingerprint) {
|
||||
cacheKey = fetchers::Attrs{
|
||||
{"_what", "fetchToStore"},
|
||||
{"store", store->storeDir},
|
||||
{"store", store.storeDir},
|
||||
{"name", std::string(name)},
|
||||
{"fingerprint", *fingerprint},
|
||||
{"method", (uint8_t) method},
|
||||
{
|
||||
"method",
|
||||
std::visit(overloaded {
|
||||
[](const TextIngestionMethod &) {
|
||||
return "text";
|
||||
},
|
||||
[](const FileIngestionMethod & fim) {
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat: return "flat";
|
||||
case FileIngestionMethod::Recursive: return "nar";
|
||||
default: assert(false);
|
||||
}
|
||||
},
|
||||
}, method.raw),
|
||||
},
|
||||
{"path", path.abs()}
|
||||
};
|
||||
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
|
||||
|
@ -35,17 +49,14 @@ StorePath InputAccessor::fetchToStore(
|
|||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
dumpPath(path, sink, filter ? *filter : defaultPathFilter);
|
||||
else
|
||||
readFile(path, sink);
|
||||
});
|
||||
auto filter2 = filter ? *filter : defaultPathFilter;
|
||||
|
||||
auto storePath =
|
||||
settings.readOnlyMode
|
||||
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
|
||||
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
|
||||
? store.computeStorePath(
|
||||
name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
: store.addToStore(
|
||||
name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||
|
@ -60,9 +71,9 @@ std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
|||
}
|
||||
|
||||
StorePath SourcePath::fetchToStore(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
ContentAddressMethod method,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair) const
|
||||
{
|
||||
|
|
|
@ -30,10 +30,10 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
|
|||
}
|
||||
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const CanonPath & path,
|
||||
std::string_view name = "source",
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair);
|
||||
};
|
||||
|
@ -116,9 +116,9 @@ struct SourcePath
|
|||
* Copy this `SourcePath` to the Nix store.
|
||||
*/
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
std::string_view name = "source",
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair) const;
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
|
@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
|
||||
PosixSourceAccessor accessor;
|
||||
auto storePath = store->addToStore(
|
||||
input.getName(),
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
|
||||
filter);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
|
@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||
|
@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
checkHashType(input.getRev());
|
||||
checkHashAlgorithm(input.getRev());
|
||||
|
||||
return Attrs({
|
||||
{"type", "hg"},
|
||||
|
@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme
|
|||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
|
@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme
|
|||
{"ref", *input.getRef()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme
|
|||
auto revCount = std::stoull(tokens[1]);
|
||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
auto storePath = store->addToStore(name, tmpDir);
|
||||
PosixSourceAccessor accessor;
|
||||
auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
|
||||
|
||||
Attrs infoAttrs({
|
||||
{"rev", input.getRev()->gitRev()},
|
||||
|
@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "types.hh"
|
||||
#include "split.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -26,7 +27,7 @@ DownloadFileResult downloadFile(
|
|||
{"name", name},
|
||||
});
|
||||
|
||||
auto cached = getCache()->lookupExpired(store, inAttrs);
|
||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
||||
|
||||
auto useCached = [&]() -> DownloadFileResult
|
||||
{
|
||||
|
@ -91,7 +92,7 @@ DownloadFileResult downloadFile(
|
|||
}
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
|
@ -99,7 +100,7 @@ DownloadFileResult downloadFile(
|
|||
|
||||
if (url != res.effectiveUri)
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
{
|
||||
{"type", "file"},
|
||||
{"url", res.effectiveUri},
|
||||
|
@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball(
|
|||
{"name", name},
|
||||
});
|
||||
|
||||
auto cached = getCache()->lookupExpired(store, inAttrs);
|
||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
||||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
|
@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball(
|
|||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
lastModified = lstat(topDir).st_mtime;
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
|
||||
PosixSourceAccessor accessor;
|
||||
unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
|
||||
}
|
||||
|
||||
Attrs infoAttrs({
|
||||
|
@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball(
|
|||
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*unpackedStorePath,
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include "thread-pool.hh"
|
||||
#include "callback.hh"
|
||||
#include "signals.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include <future>
|
||||
|
@ -300,24 +301,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
std::optional<Hash> caHash;
|
||||
std::string nar;
|
||||
|
||||
if (auto * dump2p = dynamic_cast<StringSource *>(&dump)) {
|
||||
auto & dump2 = *dump2p;
|
||||
// Hack, this gives us a "replayable" source so we can compute
|
||||
// multiple hashes more easily.
|
||||
caHash = hashString(HashAlgorithm::SHA256, dump2.s);
|
||||
switch (method.getFileIngestionMethod()) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
// The dump is already NAR in this case, just use it.
|
||||
nar = dump2.s;
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
// The dump is Flat, so we need to convert it to NAR with a
|
||||
// single file.
|
||||
StringSink s;
|
||||
dumpString(dump2.s, s);
|
||||
nar = std::move(s.s);
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Otherwise, we have to do th same hashing as NAR so our single
|
||||
// hash will suffice for both purposes.
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
}
|
||||
StringSource narDump { nar };
|
||||
|
||||
// Use `narDump` if we wrote to `nar`.
|
||||
Source & narDump2 = nar.size() > 0
|
||||
? static_cast<Source &>(narDump)
|
||||
: dump;
|
||||
|
||||
return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = nar.first,
|
||||
.references = {
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
caHash ? *caHash : nar.first,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
|
@ -399,72 +436,36 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
|
||||
non-recursive+sha256 so we can just use the default
|
||||
implementation of this method in terms of addToStoreFromDump. */
|
||||
|
||||
HashSink sink { hashAlgo };
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
dumpPath(srcPath, sink, filter);
|
||||
} else {
|
||||
readFile(srcPath, sink);
|
||||
}
|
||||
auto h = sink.finish().first;
|
||||
auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(srcPath, sink, filter);
|
||||
accessor.dumpPath(path, sink, filter);
|
||||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
.references = {
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
h,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
||||
|
||||
if (!repair && isValidPath(path))
|
||||
return path;
|
||||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
StringSource source(sink.s);
|
||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
std::string { name },
|
||||
TextInfo {
|
||||
.hash = textHash,
|
||||
.references = references,
|
||||
},
|
||||
}),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
|
|
|
@ -123,22 +123,22 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "child.hh"
|
||||
#include "unix-domain-socket.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <regex>
|
||||
#include <queue>
|
||||
|
@ -1290,13 +1291,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override
|
||||
{ throw Error("addToStore"); }
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
|
@ -1306,26 +1308,15 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
goal.addDependency(info.path);
|
||||
}
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair = NoRepair) override
|
||||
{
|
||||
auto path = next->addTextToStore(name, s, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
@ -2453,8 +2444,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
throw BuildError(
|
||||
"output path %1% without valid stats info",
|
||||
actualPath);
|
||||
if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } ||
|
||||
outputHash.method == ContentAddressMethod { TextIngestionMethod {} })
|
||||
if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
|
||||
{
|
||||
/* The output path should be a regular file without execute permission. */
|
||||
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
|
||||
|
@ -2466,38 +2456,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
rewriteOutput(outputRewrites);
|
||||
/* FIXME optimize and deduplicate with addToStore */
|
||||
std::string oldHashPart { scratchPath->hashPart() };
|
||||
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(actualPath, caSink);
|
||||
},
|
||||
[&](const FileIngestionMethod & m2) {
|
||||
switch (m2) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(actualPath, caSink);
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(actualPath, caSink);
|
||||
break;
|
||||
}
|
||||
},
|
||||
}, outputHash.method.raw);
|
||||
auto got = caSink.finish().first;
|
||||
auto got = ({
|
||||
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
|
||||
PosixSourceAccessor accessor;
|
||||
dumpPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
caSink,
|
||||
outputHash.method.getFileIngestionMethod());
|
||||
caSink.finish().first;
|
||||
});
|
||||
|
||||
auto optCA = ContentAddressWithReferences::fromPartsOpt(
|
||||
outputHash.method,
|
||||
std::move(got),
|
||||
rewriteRefs());
|
||||
if (!optCA) {
|
||||
// TODO track distinct failure modes separately (at the time of
|
||||
// writing there is just one but `nullopt` is unclear) so this
|
||||
// message can't get out of sync.
|
||||
throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing");
|
||||
}
|
||||
ValidPathInfo newInfo0 {
|
||||
worker.store,
|
||||
outputPathName(drv->name, outputName),
|
||||
std::move(*optCA),
|
||||
ContentAddressWithReferences::fromParts(
|
||||
outputHash.method,
|
||||
std::move(got),
|
||||
rewriteRefs()),
|
||||
Hash::dummy,
|
||||
};
|
||||
if (*scratchPath != newInfo0.path) {
|
||||
|
@ -2511,9 +2486,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
std::string(newInfo0.path.hashPart())}});
|
||||
}
|
||||
|
||||
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
{
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
}
|
||||
|
||||
assert(newInfo0.ca);
|
||||
return newInfo0;
|
||||
|
@ -2531,7 +2511,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
std::string { scratchPath->hashPart() },
|
||||
std::string { requiredFinalPath.hashPart() });
|
||||
rewriteOutput(outputRewrites);
|
||||
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
auto refs = rewriteRefs();
|
||||
|
|
|
@ -519,7 +519,9 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
|
||||
HashResult current = hashPath(
|
||||
*store.getFSAccessor(), CanonPath { store.printStorePath(path) },
|
||||
FileIngestionMethod::Recursive, info->narHash.algo);
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current.first;
|
||||
}
|
||||
|
|
|
@ -50,6 +50,18 @@ std::string ContentAddressMethod::render(HashAlgorithm ha) const
|
|||
}, raw);
|
||||
}
|
||||
|
||||
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & th) {
|
||||
return FileIngestionMethod::Flat;
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
return fim;
|
||||
}
|
||||
}, raw);
|
||||
}
|
||||
|
||||
std::string ContentAddress::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
|
@ -79,7 +91,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
prefix = *optPrefix;
|
||||
}
|
||||
|
||||
auto parseHashType_ = [&](){
|
||||
auto parseHashAlgorithm_ = [&](){
|
||||
auto hashTypeRaw = splitPrefixTo(rest, ':');
|
||||
if (!hashTypeRaw)
|
||||
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
|
||||
|
@ -90,7 +102,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
// Switch on prefix
|
||||
if (prefix == "text") {
|
||||
// No parsing of the ingestion method, "text" only support flat.
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
TextIngestionMethod {},
|
||||
std::move(hashAlgo),
|
||||
|
@ -100,7 +112,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
auto method = FileIngestionMethod::Flat;
|
||||
if (splitPrefix(rest, "r:"))
|
||||
method = FileIngestionMethod::Recursive;
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
std::move(method),
|
||||
std::move(hashAlgo),
|
||||
|
@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con
|
|||
}, ca.method.raw);
|
||||
}
|
||||
|
||||
std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPartsOpt(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept
|
||||
ContentAddressWithReferences ContentAddressWithReferences::fromParts(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](TextIngestionMethod _) -> std::optional<ContentAddressWithReferences> {
|
||||
[&](TextIngestionMethod _) -> ContentAddressWithReferences {
|
||||
if (refs.self)
|
||||
return std::nullopt;
|
||||
throw Error("self-reference not allowed with text hashing");
|
||||
return ContentAddressWithReferences {
|
||||
TextInfo {
|
||||
.hash = std::move(hash),
|
||||
|
@ -190,7 +202,7 @@ std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPa
|
|||
}
|
||||
};
|
||||
},
|
||||
[&](FileIngestionMethod m2) -> std::optional<ContentAddressWithReferences> {
|
||||
[&](FileIngestionMethod m2) -> ContentAddressWithReferences {
|
||||
return ContentAddressWithReferences {
|
||||
FixedOutputInfo {
|
||||
.method = m2,
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include <variant>
|
||||
#include "hash.hh"
|
||||
#include "path.hh"
|
||||
#include "file-content-address.hh"
|
||||
#include "comparator.hh"
|
||||
#include "variant-wrapper.hh"
|
||||
|
||||
|
@ -31,22 +32,6 @@ namespace nix {
|
|||
*/
|
||||
struct TextIngestionMethod : std::monostate { };
|
||||
|
||||
/**
|
||||
* An enumeration of the main ways we can serialize file system
|
||||
* objects.
|
||||
*/
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
/**
|
||||
* Flat-file hashing. Directly ingest the contents of a single file
|
||||
*/
|
||||
Flat = 0,
|
||||
/**
|
||||
* Recursive (or NAR) hashing. Serializes the file-system object in Nix
|
||||
* Archive format and ingest that
|
||||
*/
|
||||
Recursive = 1
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the prefix to the hash algorithm which indicates how the
|
||||
* files were ingested.
|
||||
|
@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t {
|
|||
std::string makeFileIngestionPrefix(FileIngestionMethod m);
|
||||
|
||||
/**
|
||||
* An enumeration of all the ways we can serialize file system objects.
|
||||
* An enumeration of all the ways we can content-address store objects.
|
||||
*
|
||||
* Just the type of a content address. Combine with the hash itself, and
|
||||
* we have a `ContentAddress` as defined below. Combine that, in turn,
|
||||
|
@ -102,7 +87,15 @@ struct ContentAddressMethod
|
|||
*
|
||||
* The rough inverse of `parse()`.
|
||||
*/
|
||||
std::string render(HashAlgorithm ha) const;
|
||||
std::string render(HashAlgorithm ht) const;
|
||||
|
||||
/**
|
||||
* Get the underlying way to content-address file system objects.
|
||||
*
|
||||
* Different ways of hashing store objects may use the same method
|
||||
* for hashing file systeme objects.
|
||||
*/
|
||||
FileIngestionMethod getFileIngestionMethod() const;
|
||||
};
|
||||
|
||||
|
||||
|
@ -116,11 +109,11 @@ struct ContentAddressMethod
|
|||
* serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
|
||||
* following forms:
|
||||
*
|
||||
* - ‘text:sha256:<sha256 hash of file contents>’: For paths
|
||||
* computed by Store::makeTextPath() / Store::addTextToStore().
|
||||
* - `TextIngestionMethod`:
|
||||
* ‘text:sha256:<sha256 hash of file contents>’
|
||||
*
|
||||
* - ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||
* Store::makeFixedOutputPath() / Store::addToStore().
|
||||
* - `FixedIngestionMethod`:
|
||||
* ‘fixed:<r?>:<hash type>:<hash of file contents>’
|
||||
*/
|
||||
struct ContentAddress
|
||||
{
|
||||
|
@ -266,11 +259,12 @@ struct ContentAddressWithReferences
|
|||
*
|
||||
* @param refs References to other store objects or oneself.
|
||||
*
|
||||
* Do note that not all combinations are supported; `nullopt` is
|
||||
* returns for invalid combinations.
|
||||
* @note note that all combinations are supported. This is a
|
||||
* *partial function* and exceptions will be thrown for invalid
|
||||
* combinations.
|
||||
*/
|
||||
static std::optional<ContentAddressWithReferences> fromPartsOpt(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept;
|
||||
static ContentAddressWithReferences fromParts(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs);
|
||||
|
||||
ContentAddressMethod getMethod() const;
|
||||
|
||||
|
|
|
@ -403,22 +403,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
|
||||
auto hashAlgo = hashAlgo_; // work around clang bug
|
||||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
if (hashAlgo != HashAlgorithm::SHA256)
|
||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||
name, printHashAlgo(hashAlgo));
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod.raw);
|
||||
// TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
}();
|
||||
logger->stopWork();
|
||||
|
||||
|
@ -496,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
std::string s = readString(from);
|
||||
auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
|
||||
logger->startWork();
|
||||
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
|
||||
auto path = ({
|
||||
StringSource source { s };
|
||||
store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
|
||||
});
|
||||
logger->stopWork();
|
||||
to << store->printStorePath(path);
|
||||
break;
|
||||
|
|
|
@ -143,8 +143,14 @@ StorePath writeDerivation(Store & store,
|
|||
auto suffix = std::string(drv.name) + drvExtension;
|
||||
auto contents = drv.unparse(store, false);
|
||||
return readOnly || settings.readOnlyMode
|
||||
? store.computeStorePathForText(suffix, contents, references)
|
||||
: store.addTextToStore(suffix, contents, references, repair);
|
||||
? store.makeFixedOutputPathFromCA(suffix, TextInfo {
|
||||
.hash = hashString(HashAlgorithm::SHA256, contents),
|
||||
.references = std::move(references),
|
||||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -58,13 +58,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
||||
|
|
|
@ -59,21 +59,15 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
private:
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include "compression.hh"
|
||||
#include "signals.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
@ -1088,11 +1089,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
if (info.ca) {
|
||||
auto & specified = *info.ca;
|
||||
auto actualHash = hashCAPath(
|
||||
specified.method,
|
||||
specified.hash.algo,
|
||||
info.path
|
||||
);
|
||||
auto actualHash = ({
|
||||
HashModuloSink caSink {
|
||||
specified.hash.algo,
|
||||
std::string { info.path.hashPart() },
|
||||
};
|
||||
PosixSourceAccessor accessor;
|
||||
dumpPath(
|
||||
*getFSAccessor(false),
|
||||
CanonPath { printStorePath(info.path) },
|
||||
caSink,
|
||||
specified.method.getFileIngestionMethod());
|
||||
ContentAddress {
|
||||
.method = specified.method,
|
||||
.hash = caSink.finish().first,
|
||||
};
|
||||
});
|
||||
if (specified.hash != actualHash.hash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
|
@ -1115,8 +1127,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath LocalStore::addToStoreFromDump(
|
||||
Source & source0,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
/* For computing the store path. */
|
||||
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||
|
@ -1166,25 +1183,21 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||
tempPath = tempDir + "/x";
|
||||
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
restorePath(tempPath, bothSource);
|
||||
else
|
||||
writeFile(tempPath, bothSource);
|
||||
restorePath(tempPath, bothSource, method.getFileIngestionMethod());
|
||||
|
||||
dump.clear();
|
||||
}
|
||||
|
||||
auto [hash, size] = hashSink->finish();
|
||||
|
||||
ContentAddressWithReferences desc = FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = hash,
|
||||
.references = {
|
||||
auto desc = ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
hash,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
auto dstPath = makeFixedOutputPathFromCA(name, desc);
|
||||
|
||||
|
@ -1207,11 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
|
||||
if (inMemory) {
|
||||
StringSource dumpSource { dump };
|
||||
/* Restore from the NAR in memory. */
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
restorePath(realPath, dumpSource);
|
||||
else
|
||||
writeFile(realPath, dumpSource);
|
||||
/* Restore from the buffer in memory. */
|
||||
restorePath(realPath, dumpSource, method.getFileIngestionMethod());
|
||||
} else {
|
||||
/* Move the temporary path we restored above. */
|
||||
moveFile(tempPath, realPath);
|
||||
|
@ -1247,58 +1257,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto dstPath = makeTextPath(name, TextInfo {
|
||||
.hash = hash,
|
||||
.references = references,
|
||||
});
|
||||
|
||||
addTempRoot(dstPath);
|
||||
|
||||
if (repair || !isValidPath(dstPath)) {
|
||||
|
||||
auto realPath = Store::toRealPath(dstPath);
|
||||
|
||||
PathLocks outputLock({realPath});
|
||||
|
||||
if (repair || !isValidPath(dstPath)) {
|
||||
|
||||
deletePath(realPath);
|
||||
|
||||
autoGC();
|
||||
|
||||
writeFile(realPath, s);
|
||||
|
||||
canonicalisePathMetaData(realPath, {});
|
||||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
|
||||
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
ValidPathInfo info { dstPath, narHash };
|
||||
info.narSize = sink.s.size();
|
||||
info.references = references;
|
||||
info.ca = {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = hash,
|
||||
};
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
/* Create a temporary directory in the store that won't be
|
||||
garbage-collected until the returned FD is closed. */
|
||||
std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
|
||||
|
@ -1389,7 +1347,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
for (auto & link : readDirectory(linksDir)) {
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||
Path linkPath = linksDir + "/" + link.name;
|
||||
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
|
||||
PosixSourceAccessor accessor;
|
||||
std::string hash = hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
|
@ -1696,42 +1657,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
|||
}
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path)
|
||||
{
|
||||
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
)
|
||||
{
|
||||
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(path, caSink);
|
||||
},
|
||||
[&](const FileIngestionMethod & m2) {
|
||||
switch (m2) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(path, caSink);
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(path, caSink);
|
||||
break;
|
||||
}
|
||||
},
|
||||
}, method.raw);
|
||||
return ContentAddress {
|
||||
.method = method,
|
||||
.hash = caSink.finish().first,
|
||||
};
|
||||
}
|
||||
|
||||
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||
{
|
||||
assert(drvPath.isDerivation());
|
||||
|
|
|
@ -177,12 +177,11 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
|
@ -350,19 +349,6 @@ private:
|
|||
void signPathInfo(ValidPathInfo & info);
|
||||
void signRealisation(Realisation &);
|
||||
|
||||
// XXX: Make a generic `Store` method
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path);
|
||||
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
);
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
friend struct LocalDerivationGoal;
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "globals.hh"
|
||||
#include "signals.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
|
@ -146,7 +147,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
Also note that if `path' is a symlink, then we're hashing the
|
||||
contents of the symlink (i.e. the result of readlink()), not
|
||||
the contents of the target (which may not even exist). */
|
||||
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
|
||||
Hash hash = ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { path },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
});
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
/* Check if this is a known hash. */
|
||||
|
@ -156,7 +162,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
if (pathExists(linkPath)) {
|
||||
auto stLink = lstat(linkPath);
|
||||
if (st.st_size != stLink.st_size
|
||||
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
|
||||
|| (repair && hash != ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
})))
|
||||
{
|
||||
// XXX: Consider overwriting linkPath with our valid version.
|
||||
warn("removing corrupted link '%s'", linkPath);
|
||||
|
|
|
@ -502,8 +502,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
}
|
||||
|
||||
|
||||
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath RemoteStore::addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
|
||||
}
|
||||
|
@ -603,16 +608,6 @@ void RemoteStore::addMultipleToStore(
|
|||
}
|
||||
|
||||
|
||||
StorePath RemoteStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
StringSource source(s);
|
||||
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
|
||||
}
|
||||
|
||||
void RemoteStore::registerDrvOutput(const Realisation & info)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
|
|
|
@ -82,10 +82,15 @@ public:
|
|||
RepairFlag repair);
|
||||
|
||||
/**
|
||||
* Add a content-addressable store path. Does not support references. `dump` will be drained.
|
||||
* Add a content-addressable store path. `dump` will be drained.
|
||||
*/
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
@ -101,12 +106,6 @@ public:
|
|||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
|
|
|
@ -205,25 +205,19 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
|
|||
}
|
||||
|
||||
|
||||
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
|
||||
{
|
||||
assert(info.hash.algo == HashAlgorithm::SHA256);
|
||||
return makeStorePath(
|
||||
makeType(*this, "text", StoreReferences {
|
||||
.others = info.references,
|
||||
.self = false,
|
||||
}),
|
||||
info.hash,
|
||||
name);
|
||||
}
|
||||
|
||||
|
||||
StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const
|
||||
{
|
||||
// New template
|
||||
return std::visit(overloaded {
|
||||
[&](const TextInfo & ti) {
|
||||
return makeTextPath(name, ti);
|
||||
assert(ti.hash.algo == HashAlgorithm::SHA256);
|
||||
return makeStorePath(
|
||||
makeType(*this, "text", StoreReferences {
|
||||
.others = ti.references,
|
||||
.self = false,
|
||||
}),
|
||||
ti.hash,
|
||||
name);
|
||||
},
|
||||
[&](const FixedOutputInfo & foi) {
|
||||
return makeFixedOutputPath(name, foi);
|
||||
|
@ -232,54 +226,45 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
|
|||
}
|
||||
|
||||
|
||||
std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references) const
|
||||
{
|
||||
HashSink sink(hashAlgo);
|
||||
dump.drainInto(sink);
|
||||
auto h = sink.finish().first;
|
||||
FixedOutputInfo caInfo {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
.references = {},
|
||||
};
|
||||
return std::make_pair(makeFixedOutputPath(name, caInfo), h);
|
||||
}
|
||||
|
||||
|
||||
StorePath StoreDirConfig::computeStorePathForText(
|
||||
std::pair<StorePath, Hash> StoreDirConfig::computeStorePath(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references) const
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter) const
|
||||
{
|
||||
return makeTextPath(name, TextInfo {
|
||||
.hash = hashString(HashAlgorithm::SHA256, s),
|
||||
.references = references,
|
||||
});
|
||||
auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
|
||||
return {
|
||||
makeFixedOutputPathFromCA(
|
||||
name,
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
h,
|
||||
{
|
||||
.others = references,
|
||||
.self = false,
|
||||
})),
|
||||
h,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
StorePath Store::addToStore(
|
||||
std::string_view name,
|
||||
const Path & _srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
Path srcPath(absPath(_srcPath));
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
dumpPath(srcPath, sink, filter);
|
||||
else
|
||||
readFile(srcPath, sink);
|
||||
dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter);
|
||||
});
|
||||
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
|
||||
return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
|
||||
}
|
||||
|
||||
void Store::addMultipleToStore(
|
||||
|
@ -404,9 +389,13 @@ digraph graphname {
|
|||
fileSink -> caHashSink
|
||||
}
|
||||
*/
|
||||
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo,
|
||||
std::optional<Hash> expectedCAHash)
|
||||
ValidPathInfo Store::addToStoreSlow(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method, HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
std::optional<Hash> expectedCAHash)
|
||||
{
|
||||
HashSink narHashSink { HashAlgorithm::SHA256 };
|
||||
HashSink caHashSink { hashAlgo };
|
||||
|
@ -425,7 +414,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
srcPath. The fact that we use scratchpadSink as a temporary buffer here
|
||||
is an implementation detail. */
|
||||
auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
|
||||
dumpPath(srcPath, scratchpadSink);
|
||||
accessor.dumpPath(srcPath, scratchpadSink);
|
||||
});
|
||||
|
||||
/* tapped provides the same data as fileSource, but we also write all the
|
||||
|
@ -433,9 +422,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
TeeSource tapped { *fileSource, narSink };
|
||||
|
||||
NullParseSink blank;
|
||||
auto & parseSink = method == FileIngestionMethod::Flat
|
||||
auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
|
||||
? (ParseSink &) fileSink
|
||||
: (ParseSink &) blank;
|
||||
: method.getFileIngestionMethod() == FileIngestionMethod::Recursive
|
||||
? (ParseSink &) blank
|
||||
: (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases
|
||||
|
||||
/* The information that flows from tapped (besides being replicated in
|
||||
narSink), is now put in parseSink. */
|
||||
|
@ -452,21 +443,24 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
if (expectedCAHash && expectedCAHash != hash)
|
||||
throw Error("hash mismatch for '%s'", srcPath);
|
||||
|
||||
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = hash,
|
||||
.references = {},
|
||||
},
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
hash,
|
||||
{
|
||||
.others = references,
|
||||
.self = false,
|
||||
}),
|
||||
narHash,
|
||||
};
|
||||
info.narSize = narSize;
|
||||
|
||||
if (!isValidPath(info.path)) {
|
||||
auto source = sinkToSource([&](Sink & scratchpadSink) {
|
||||
dumpPath(srcPath, scratchpadSink);
|
||||
accessor.dumpPath(srcPath, scratchpadSink);
|
||||
});
|
||||
addToStore(info, *source);
|
||||
}
|
||||
|
|
|
@ -427,22 +427,28 @@ public:
|
|||
* libutil/archive.hh).
|
||||
*/
|
||||
virtual StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet());
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
RepairFlag repair = NoRepair);
|
||||
|
||||
/**
|
||||
* Copy the contents of a path to the store and register the
|
||||
* validity the resulting path, using a constant amount of
|
||||
* memory.
|
||||
*/
|
||||
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
std::optional<Hash> expectedCAHash = {});
|
||||
ValidPathInfo addToStoreSlow(
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
std::optional<Hash> expectedCAHash = {});
|
||||
|
||||
/**
|
||||
* Like addToStore(), but the contents of the path are contained
|
||||
|
@ -453,20 +459,14 @@ public:
|
|||
*
|
||||
* \todo remove?
|
||||
*/
|
||||
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
|
||||
const StorePathSet & references = StorePathSet())
|
||||
{ unsupported("addToStoreFromDump"); }
|
||||
|
||||
/**
|
||||
* Like addToStore, but the contents written to the output path is a
|
||||
* regular file containing the given string.
|
||||
*/
|
||||
virtual StorePath addTextToStore(
|
||||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair = NoRepair) = 0;
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair)
|
||||
{ unsupported("addToStoreFromDump"); }
|
||||
|
||||
/**
|
||||
* Add a mapping indicating that `deriver!outputName` maps to the output path
|
||||
|
|
|
@ -86,41 +86,20 @@ struct StoreDirConfig : public Config
|
|||
|
||||
StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const;
|
||||
|
||||
StorePath makeTextPath(std::string_view name, const TextInfo & info) const;
|
||||
|
||||
StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const;
|
||||
|
||||
/**
|
||||
* Read-only variant of addToStoreFromDump(). It returns the store
|
||||
* path to which a NAR or flat file would be written.
|
||||
* Read-only variant of addToStore(). It returns the store
|
||||
* path for the given file sytem object.
|
||||
*/
|
||||
std::pair<StorePath, Hash> computeStorePathFromDump(
|
||||
Source & dump,
|
||||
std::pair<StorePath, Hash> computeStorePath(
|
||||
std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = {}) const;
|
||||
|
||||
/**
|
||||
* Preparatory part of addTextToStore().
|
||||
*
|
||||
* !!! Computation of the path should take the references given to
|
||||
* addTextToStore() into account, otherwise we have a (relatively
|
||||
* minor) security hole: a caller can register a source file with
|
||||
* bogus references. If there are too many references, the path may
|
||||
* not be garbage collected when it has to be (not really a problem,
|
||||
* the caller could create a root anyway), or it may be garbage
|
||||
* collected when it shouldn't be (more serious).
|
||||
*
|
||||
* Hashing the references would solve this (bogus references would
|
||||
* simply yield a different store path, so other users wouldn't be
|
||||
* affected), but it has some backwards compatibility issues (the
|
||||
* hashing scheme changes), so I'm not doing that for now.
|
||||
*/
|
||||
StorePath computeStorePathForText(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references) const;
|
||||
const StorePathSet & references = {},
|
||||
PathFilter & filter = defaultPathFilter) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
49
src/libutil/file-content-address.cc
Normal file
49
src/libutil/file-content-address.cc
Normal file
|
@ -0,0 +1,49 @@
|
|||
#include "file-content-address.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
void dumpPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
Sink & sink,
|
||||
FileIngestionMethod method,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
accessor.readFile(path, sink);
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
accessor.dumpPath(path, sink, filter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
Source & source,
|
||||
FileIngestionMethod method)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
writeFile(path, source);
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
restorePath(path, source);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
HashResult hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ht,
|
||||
PathFilter & filter)
|
||||
{
|
||||
HashSink sink { ht };
|
||||
dumpPath(accessor, path, sink, method, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
|
||||
}
|
56
src/libutil/file-content-address.hh
Normal file
56
src/libutil/file-content-address.hh
Normal file
|
@ -0,0 +1,56 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "source-accessor.hh"
|
||||
#include "fs-sink.hh"
|
||||
#include "util.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* An enumeration of the main ways we can serialize file system
|
||||
* objects.
|
||||
*/
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
/**
|
||||
* Flat-file hashing. Directly ingest the contents of a single file
|
||||
*/
|
||||
Flat = 0,
|
||||
/**
|
||||
* Recursive (or NAR) hashing. Serializes the file-system object in
|
||||
* Nix Archive format and ingest that.
|
||||
*/
|
||||
Recursive = 1,
|
||||
};
|
||||
|
||||
/**
|
||||
* Dump a serialization of the given file system object.
|
||||
*/
|
||||
void dumpPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
Sink & sink,
|
||||
FileIngestionMethod method,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* Restore a serialization of the given file system object.
|
||||
*
|
||||
* @TODO use an arbitrary `ParseSink`.
|
||||
*/
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
Source & source,
|
||||
FileIngestionMethod method);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file system object according to the
|
||||
* given method.
|
||||
*
|
||||
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
||||
*/
|
||||
HashResult hashPath(
|
||||
SourceAccessor & accessor, const CanonPath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ht,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
}
|
|
@ -367,15 +367,6 @@ HashResult HashSink::currentHash()
|
|||
}
|
||||
|
||||
|
||||
HashResult hashPath(
|
||||
HashAlgorithm ha, const Path & path, PathFilter & filter)
|
||||
{
|
||||
HashSink sink(ha);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
|
||||
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||
{
|
||||
Hash h(hash.algo);
|
||||
|
|
|
@ -168,14 +168,11 @@ Hash hashString(HashAlgorithm ha, std::string_view s);
|
|||
Hash hashFile(HashAlgorithm ha, const Path & path);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given path, serializing as a Nix Archive and
|
||||
* then hashing that.
|
||||
* The final hash and the number of bytes digested.
|
||||
*
|
||||
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
||||
* @todo Convert to proper struct
|
||||
*/
|
||||
typedef std::pair<Hash, uint64_t> HashResult;
|
||||
HashResult hashPath(HashAlgorithm ha, const Path & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* Compress a hash to the specified number of bytes by cyclically
|
||||
|
|
|
@ -104,10 +104,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
|
|||
/* Also write a copy of the list of user environment elements to
|
||||
the store; we need it for future modifications of the
|
||||
environment. */
|
||||
std::ostringstream str;
|
||||
manifest.print(state.symbols, str, true);
|
||||
auto manifestFile = state.store->addTextToStore("env-manifest.nix",
|
||||
str.str(), references);
|
||||
auto manifestFile = ({
|
||||
std::ostringstream str;
|
||||
manifest.print(state.symbols, str, true);
|
||||
// TODO with C++20 we can use str.view() instead and avoid copy.
|
||||
std::string str2 = str.str();
|
||||
StringSource source { str2 };
|
||||
state.store->addToStoreFromDump(
|
||||
source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references);
|
||||
});
|
||||
|
||||
/* Get the environment builder expression. */
|
||||
Value envBuilder;
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include "shared.hh"
|
||||
#include "graphml.hh"
|
||||
#include "legacy.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
|
||||
|
@ -175,8 +176,12 @@ static void opAdd(Strings opFlags, Strings opArgs)
|
|||
{
|
||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||
|
||||
PosixSourceAccessor accessor;
|
||||
for (auto & i : opArgs)
|
||||
cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i)));
|
||||
cout << fmt("%s\n", store->printStorePath(store->addToStore(
|
||||
std::string(baseNameOf(i)),
|
||||
accessor,
|
||||
CanonPath::fromCwd(i))));
|
||||
}
|
||||
|
||||
|
||||
|
@ -196,8 +201,14 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
|
|||
HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front());
|
||||
opArgs.pop_front();
|
||||
|
||||
PosixSourceAccessor accessor;
|
||||
for (auto & i : opArgs)
|
||||
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path));
|
||||
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(
|
||||
baseNameOf(i),
|
||||
accessor,
|
||||
CanonPath::fromCwd(i),
|
||||
method,
|
||||
hashAlgo).path));
|
||||
}
|
||||
|
||||
|
||||
|
@ -541,7 +552,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
|||
if (canonicalise)
|
||||
canonicalisePathMetaData(store->printStorePath(info->path), {});
|
||||
if (!hashGiven) {
|
||||
HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path));
|
||||
HashResult hash = hashPath(
|
||||
*store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
|
||||
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
|
||||
info->narHash = hash.first;
|
||||
info->narSize = hash.second;
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "common-args.hh"
|
||||
#include "store-api.hh"
|
||||
#include "archive.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
|
@ -20,7 +21,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
|||
{
|
||||
Path path;
|
||||
std::optional<std::string> namePart;
|
||||
FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive;
|
||||
ContentAddressMethod caMethod = FileIngestionMethod::Recursive;
|
||||
|
||||
CmdAddToStore()
|
||||
{
|
||||
|
@ -48,7 +49,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
|||
)",
|
||||
.labels = {"hash-mode"},
|
||||
.handler = {[this](std::string s) {
|
||||
this->ingestionMethod = parseIngestionMethod(s);
|
||||
this->caMethod = parseIngestionMethod(s);
|
||||
}},
|
||||
});
|
||||
}
|
||||
|
@ -57,36 +58,17 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
|||
{
|
||||
if (!namePart) namePart = baseNameOf(path);
|
||||
|
||||
StringSink sink;
|
||||
dumpPath(path, sink);
|
||||
PosixSourceAccessor accessor;
|
||||
|
||||
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
|
||||
auto path2 = CanonPath::fromCwd(path);
|
||||
|
||||
Hash hash = narHash;
|
||||
if (ingestionMethod == FileIngestionMethod::Flat) {
|
||||
HashSink hsink(HashAlgorithm::SHA256);
|
||||
readFile(path, hsink);
|
||||
hash = hsink.finish().first;
|
||||
}
|
||||
auto storePath = dryRun
|
||||
? store->computeStorePath(
|
||||
*namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first
|
||||
: store->addToStoreSlow(
|
||||
*namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path;
|
||||
|
||||
ValidPathInfo info {
|
||||
*store,
|
||||
std::move(*namePart),
|
||||
FixedOutputInfo {
|
||||
.method = std::move(ingestionMethod),
|
||||
.hash = std::move(hash),
|
||||
.references = {},
|
||||
},
|
||||
narHash,
|
||||
};
|
||||
info.narSize = sink.s.size();
|
||||
|
||||
if (!dryRun) {
|
||||
auto source = StringSource(sink.s);
|
||||
store->addToStore(info, source);
|
||||
}
|
||||
|
||||
logger->cout("%s", store->printStorePath(info.path));
|
||||
logger->cout("%s", store->printStorePath(storePath));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -110,7 +92,7 @@ struct CmdAddFile : CmdAddToStore
|
|||
{
|
||||
CmdAddFile()
|
||||
{
|
||||
ingestionMethod = FileIngestionMethod::Flat;
|
||||
caMethod = FileIngestionMethod::Flat;
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
|
|
|
@ -223,7 +223,11 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
|
|||
if (builder != "bash")
|
||||
throw Error("'nix develop' only works on derivations that use 'bash' as their builder");
|
||||
|
||||
auto getEnvShPath = evalStore->addTextToStore("get-env.sh", getEnvSh, {});
|
||||
auto getEnvShPath = ({
|
||||
StringSource source { getEnvSh };
|
||||
evalStore->addToStoreFromDump(
|
||||
source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {});
|
||||
});
|
||||
|
||||
drv.args = {store->printStorePath(getEnvShPath)};
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#include "shared.hh"
|
||||
#include "references.hh"
|
||||
#include "archive.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
|
@ -88,14 +89,8 @@ struct CmdHashBase : Command
|
|||
else
|
||||
hashSink = std::make_unique<HashSink>(ha);
|
||||
|
||||
switch (mode) {
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(path, *hashSink);
|
||||
break;
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(path, *hashSink);
|
||||
break;
|
||||
}
|
||||
PosixSourceAccessor accessor;
|
||||
dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode);
|
||||
|
||||
Hash h = hashSink->finish().first;
|
||||
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "attr-path.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "legacy.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -122,7 +123,11 @@ std::tuple<StorePath, Hash> prefetchFile(
|
|||
Activity act(*logger, lvlChatty, actUnknown,
|
||||
fmt("adding '%s' to the store", url));
|
||||
|
||||
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash);
|
||||
PosixSourceAccessor accessor;
|
||||
auto info = store->addToStoreSlow(
|
||||
*name,
|
||||
accessor, CanonPath::fromCwd(tmpFile),
|
||||
ingestionMethod, hashAlgo, {}, expectedHash);
|
||||
storePath = info.path;
|
||||
assert(info.ca);
|
||||
hash = info.ca->hash;
|
||||
|
|
|
@ -604,7 +604,7 @@ namespace nix {
|
|||
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hashStringInvalidHashType) {
|
||||
TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) {
|
||||
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue