forked from lix-project/lix
Merge remote-tracking branch 'origin/master' into flakes
This commit is contained in:
commit
c20591ddc3
|
@ -274,7 +274,8 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||||
SV * addToStore(char * srcPath, int recursive, char * algo)
|
SV * addToStore(char * srcPath, int recursive, char * algo)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, recursive, parseHashType(algo));
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
|
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -285,7 +286,8 @@ SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h(hash, parseHashType(algo));
|
Hash h(hash, parseHashType(algo));
|
||||||
auto path = store()->makeFixedOutputPath(recursive, h, name);
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
|
auto path = store()->makeFixedOutputPath(method, h, name);
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
|
|
@ -1711,7 +1711,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||||
else {
|
else {
|
||||||
auto p = settings.readOnlyMode
|
auto p = settings.readOnlyMode
|
||||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair);
|
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||||
dstPath = store->printStorePath(p);
|
dstPath = store->printStorePath(p);
|
||||||
srcToStore.insert_or_assign(path, std::move(p));
|
srcToStore.insert_or_assign(path, std::move(p));
|
||||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
||||||
|
|
|
@ -563,7 +563,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
|
|
||||||
std::optional<std::string> outputHash;
|
std::optional<std::string> outputHash;
|
||||||
std::string outputHashAlgo;
|
std::string outputHashAlgo;
|
||||||
bool outputHashRecursive = false;
|
auto ingestionMethod = FileIngestionMethod::Flat;
|
||||||
|
|
||||||
StringSet outputs;
|
StringSet outputs;
|
||||||
outputs.insert("out");
|
outputs.insert("out");
|
||||||
|
@ -574,8 +574,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
vomit("processing attribute '%1%'", key);
|
vomit("processing attribute '%1%'", key);
|
||||||
|
|
||||||
auto handleHashMode = [&](const std::string & s) {
|
auto handleHashMode = [&](const std::string & s) {
|
||||||
if (s == "recursive") outputHashRecursive = true;
|
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
|
||||||
else if (s == "flat") outputHashRecursive = false;
|
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
|
||||||
else throw EvalError("invalid value '%s' for 'outputHashMode' attribute, at %s", s, posDrvName);
|
else throw EvalError("invalid value '%s' for 'outputHashMode' attribute, at %s", s, posDrvName);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -722,11 +722,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo);
|
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo);
|
||||||
Hash h(*outputHash, ht);
|
Hash h(*outputHash, ht);
|
||||||
|
|
||||||
auto outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName);
|
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
|
||||||
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput(std::move(outPath),
|
drv.outputs.insert_or_assign("out", DerivationOutput {
|
||||||
(outputHashRecursive ? "r:" : "") + printHashType(h.type),
|
std::move(outPath),
|
||||||
h.to_string(Base16, false)));
|
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "")
|
||||||
|
+ printHashType(h.type),
|
||||||
|
h.to_string(Base16, false),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@ -1040,7 +1043,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
||||||
|
|
||||||
|
|
||||||
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
||||||
Value * filterFun, bool recursive, const Hash & expectedHash, Value & v)
|
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
|
||||||
{
|
{
|
||||||
const auto path = evalSettings.pureEval && expectedHash ?
|
const auto path = evalSettings.pureEval && expectedHash ?
|
||||||
path_ :
|
path_ :
|
||||||
|
@ -1071,12 +1074,12 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
std::optional<StorePath> expectedStorePath;
|
||||||
if (expectedHash)
|
if (expectedHash)
|
||||||
expectedStorePath = state.store->makeFixedOutputPath(recursive, expectedHash, name);
|
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
|
||||||
Path dstPath;
|
Path dstPath;
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
dstPath = state.store->printStorePath(settings.readOnlyMode
|
dstPath = state.store->printStorePath(settings.readOnlyMode
|
||||||
? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first
|
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
|
||||||
: state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair));
|
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
|
||||||
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
|
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
|
||||||
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
|
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
|
||||||
} else
|
} else
|
||||||
|
@ -1097,7 +1100,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
|
||||||
if (args[0]->type != tLambda)
|
if (args[0]->type != tLambda)
|
||||||
throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
|
throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos);
|
||||||
|
|
||||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], true, Hash(), v);
|
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
@ -1106,7 +1109,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
Path path;
|
Path path;
|
||||||
string name;
|
string name;
|
||||||
Value * filterFun = nullptr;
|
Value * filterFun = nullptr;
|
||||||
auto recursive = true;
|
auto method = FileIngestionMethod::Recursive;
|
||||||
Hash expectedHash;
|
Hash expectedHash;
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
|
@ -1122,7 +1125,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
state.forceValue(*attr.value, pos);
|
state.forceValue(*attr.value, pos);
|
||||||
filterFun = attr.value;
|
filterFun = attr.value;
|
||||||
} else if (n == "recursive")
|
} else if (n == "recursive")
|
||||||
recursive = state.forceBool(*attr.value, *attr.pos);
|
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||||
else
|
else
|
||||||
|
@ -1133,7 +1136,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
name = baseNameOf(path);
|
name = baseNameOf(path);
|
||||||
|
|
||||||
addPath(state, pos, name, path, filterFun, recursive, expectedHash, v);
|
addPath(state, pos, name, path, filterFun, method, expectedHash, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ StorePath Input::computeStorePath(Store & store) const
|
||||||
auto narHash = getNarHash();
|
auto narHash = getNarHash();
|
||||||
if (!narHash)
|
if (!narHash)
|
||||||
throw Error("cannot compute store path for mutable input '%s'", to_string());
|
throw Error("cannot compute store path for mutable input '%s'", to_string());
|
||||||
return store.makeFixedOutputPath(true, *narHash, "source");
|
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string Input::getType() const
|
std::string Input::getType() const
|
||||||
|
|
|
@ -64,7 +64,7 @@ struct GitInputScheme : InputScheme
|
||||||
maybeGetBoolAttr(attrs, "submodules");
|
maybeGetBoolAttr(attrs, "submodules");
|
||||||
|
|
||||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||||
if (!std::regex_match(*ref, refRegex))
|
if (std::regex_search(*ref, badGitRefRegex))
|
||||||
throw BadURL("invalid Git branch/tag name '%s'", *ref);
|
throw BadURL("invalid Git branch/tag name '%s'", *ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -261,7 +261,7 @@ struct GitInputScheme : InputScheme
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
// FIXME: maybe we should use the timestamp of the last
|
// FIXME: maybe we should use the timestamp of the last
|
||||||
// modified dirty file?
|
// modified dirty file?
|
||||||
|
@ -348,7 +348,11 @@ struct GitInputScheme : InputScheme
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
// we're using --quiet for now. Should process its stderr.
|
// we're using --quiet for now. Should process its stderr.
|
||||||
try {
|
try {
|
||||||
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input.getRef(), *input.getRef()) });
|
auto ref = input.getRef();
|
||||||
|
auto fetchRef = ref->compare(0, 5, "refs/") == 0
|
||||||
|
? *ref
|
||||||
|
: "refs/heads/" + *ref;
|
||||||
|
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (!pathExists(localRefFile)) throw;
|
if (!pathExists(localRefFile)) throw;
|
||||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||||
|
@ -413,7 +417,7 @@ struct GitInputScheme : InputScheme
|
||||||
unpackTarfile(*source, tmpDir);
|
unpackTarfile(*source, tmpDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir, true, htSHA256, filter);
|
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
|
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
|
||||||
|
|
||||||
|
|
|
@ -162,7 +162,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
Tree(store->printStorePath(storePath), std::move(storePath)),
|
Tree(store->printStorePath(storePath), std::move(storePath)),
|
||||||
|
|
|
@ -67,11 +67,12 @@ DownloadFileResult downloadFile(
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(*res.data, sink);
|
||||||
auto hash = hashString(htSHA256, *res.data);
|
auto hash = hashString(htSHA256, *res.data);
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
info.narHash = hashString(htSHA256, *sink.s);
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(false, hash);
|
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
|
||||||
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
auto source = StringSource { *sink.s };
|
||||||
|
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||||
storePath = std::move(info.path);
|
storePath = std::move(info.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,7 +139,7 @@ std::pair<Tree, time_t> downloadTarball(
|
||||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||||
lastModified = lstat(topDir).st_mtime;
|
lastModified = lstat(topDir).st_mtime;
|
||||||
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
|
|
|
@ -113,9 +113,12 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
|
||||||
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
|
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
|
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
|
||||||
{
|
{
|
||||||
|
// FIXME: See if we can use the original source to reduce memory usage.
|
||||||
|
auto nar = make_ref<std::string>(narSource.drain());
|
||||||
|
|
||||||
if (!repair && isValidPath(info.path)) return;
|
if (!repair && isValidPath(info.path)) return;
|
||||||
|
|
||||||
/* Verify that all references are valid. This may do some .narinfo
|
/* Verify that all references are valid. This may do some .narinfo
|
||||||
|
@ -327,7 +330,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
|
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||||
{
|
{
|
||||||
// FIXME: some cut&paste from LocalStore::addToStore().
|
// FIXME: some cut&paste from LocalStore::addToStore().
|
||||||
|
|
||||||
|
@ -336,7 +339,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
small files. */
|
small files. */
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
Hash h;
|
Hash h;
|
||||||
if (recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
dumpPath(srcPath, sink, filter);
|
dumpPath(srcPath, sink, filter);
|
||||||
h = hashString(hashAlgo, *sink.s);
|
h = hashString(hashAlgo, *sink.s);
|
||||||
} else {
|
} else {
|
||||||
|
@ -345,9 +348,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
h = hashString(hashAlgo, s);
|
h = hashString(hashAlgo, s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(recursive, h, name));
|
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
||||||
|
|
||||||
addToStore(info, sink.s, repair, CheckSigs, nullptr);
|
auto source = StringSource { *sink.s };
|
||||||
|
addToStore(info, source, repair, CheckSigs, nullptr);
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
}
|
}
|
||||||
|
@ -361,7 +365,8 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
||||||
if (repair || !isValidPath(info.path)) {
|
if (repair || !isValidPath(info.path)) {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
addToStore(info, sink.s, repair, CheckSigs, nullptr);
|
auto source = StringSource { *sink.s };
|
||||||
|
addToStore(info, source, repair, CheckSigs, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
|
|
|
@ -74,12 +74,12 @@ public:
|
||||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||||
{ unsupported("queryPathFromHashPart"); }
|
{ unsupported("queryPathFromHashPart"); }
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
|
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
RepairFlag repair, CheckSigsFlag checkSigs,
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
std::shared_ptr<FSAccessor> accessor) override;
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
PathFilter & filter, RepairFlag repair) override;
|
PathFilter & filter, RepairFlag repair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
|
|
|
@ -2734,7 +2734,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
{ throw Error("queryPathFromHashPart"); }
|
{ throw Error("queryPathFromHashPart"); }
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
||||||
{ throw Error("addToStore"); }
|
{ throw Error("addToStore"); }
|
||||||
|
|
||||||
|
@ -2747,9 +2747,9 @@ struct RestrictedStore : public LocalFSStore
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
||||||
{
|
{
|
||||||
auto path = next->addToStoreFromDump(dump, name, recursive, hashAlgo, repair);
|
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
||||||
goal.addDependency(path);
|
goal.addDependency(path);
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
@ -3692,10 +3692,10 @@ void DerivationGoal::registerOutputs()
|
||||||
|
|
||||||
if (fixedOutput) {
|
if (fixedOutput) {
|
||||||
|
|
||||||
bool recursive; Hash h;
|
FileIngestionMethod outputHashMode; Hash h;
|
||||||
i.second.parseHashInfo(recursive, h);
|
i.second.parseHashInfo(outputHashMode, h);
|
||||||
|
|
||||||
if (!recursive) {
|
if (outputHashMode == FileIngestionMethod::Flat) {
|
||||||
/* The output path should be a regular file without execute permission. */
|
/* The output path should be a regular file without execute permission. */
|
||||||
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
|
@ -3705,9 +3705,11 @@ void DerivationGoal::registerOutputs()
|
||||||
|
|
||||||
/* Check the hash. In hash mode, move the path produced by
|
/* Check the hash. In hash mode, move the path produced by
|
||||||
the derivation to its content-addressed location. */
|
the derivation to its content-addressed location. */
|
||||||
Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath);
|
Hash h2 = outputHashMode == FileIngestionMethod::Recursive
|
||||||
|
? hashPath(h.type, actualPath).first
|
||||||
|
: hashFile(h.type, actualPath);
|
||||||
|
|
||||||
auto dest = worker.store.makeFixedOutputPath(recursive, h2, i.second.path.name());
|
auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name());
|
||||||
|
|
||||||
if (h != h2) {
|
if (h != h2) {
|
||||||
|
|
||||||
|
@ -3736,7 +3738,7 @@ void DerivationGoal::registerOutputs()
|
||||||
else
|
else
|
||||||
assert(worker.store.parseStorePath(path) == dest);
|
assert(worker.store.parseStorePath(path) == dest);
|
||||||
|
|
||||||
ca = makeFixedOutputCA(recursive, h2);
|
ca = makeFixedOutputCA(outputHashMode, h2);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get rid of all weird permissions. This also checks that
|
/* Get rid of all weird permissions. This also checks that
|
||||||
|
@ -3957,7 +3959,9 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
|
||||||
|
|
||||||
auto spec = parseReferenceSpecifiers(worker.store, *drv, *value);
|
auto spec = parseReferenceSpecifiers(worker.store, *drv, *value);
|
||||||
|
|
||||||
auto used = recursive ? cloneStorePathSet(getClosure(info.path).first) : cloneStorePathSet(info.references);
|
auto used = recursive
|
||||||
|
? cloneStorePathSet(getClosure(info.path).first)
|
||||||
|
: cloneStorePathSet(info.references);
|
||||||
|
|
||||||
if (recursive && checks.ignoreSelfRefs)
|
if (recursive && checks.ignoreSelfRefs)
|
||||||
used.erase(info.path);
|
used.erase(info.path);
|
||||||
|
|
|
@ -355,20 +355,24 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
}
|
}
|
||||||
|
|
||||||
case wopAddToStore: {
|
case wopAddToStore: {
|
||||||
bool fixed, recursive;
|
|
||||||
std::string s, baseName;
|
std::string s, baseName;
|
||||||
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
|
FileIngestionMethod method;
|
||||||
/* Compatibility hack. */
|
{
|
||||||
if (!fixed) {
|
bool fixed, recursive;
|
||||||
s = "sha256";
|
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
|
||||||
recursive = true;
|
method = FileIngestionMethod { recursive };
|
||||||
|
/* Compatibility hack. */
|
||||||
|
if (!fixed) {
|
||||||
|
s = "sha256";
|
||||||
|
method = FileIngestionMethod::Recursive;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
HashType hashAlgo = parseHashType(s);
|
HashType hashAlgo = parseHashType(s);
|
||||||
|
|
||||||
TeeSource savedNAR(from);
|
TeeSource savedNAR(from);
|
||||||
RetrieveRegularNARSink savedRegular;
|
RetrieveRegularNARSink savedRegular;
|
||||||
|
|
||||||
if (recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
/* Get the entire NAR dump from the client and save it to
|
/* Get the entire NAR dump from the client and save it to
|
||||||
a string so that we can pass it to
|
a string so that we can pass it to
|
||||||
addToStoreFromDump(). */
|
addToStoreFromDump(). */
|
||||||
|
@ -380,7 +384,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
if (!savedRegular.regular) throw Error("regular file expected");
|
if (!savedRegular.regular) throw Error("regular file expected");
|
||||||
|
|
||||||
auto path = store->addToStoreFromDump(recursive ? *savedNAR.data : savedRegular.s, baseName, recursive, hashAlgo);
|
auto path = store->addToStoreFromDump(
|
||||||
|
method == FileIngestionMethod::Recursive ? *savedNAR.data : savedRegular.s,
|
||||||
|
baseName,
|
||||||
|
method,
|
||||||
|
hashAlgo);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
|
|
||||||
to << store->printStorePath(path);
|
to << store->printStorePath(path);
|
||||||
|
|
|
@ -9,13 +9,13 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
void DerivationOutput::parseHashInfo(bool & recursive, Hash & hash) const
|
void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const
|
||||||
{
|
{
|
||||||
recursive = false;
|
recursive = FileIngestionMethod::Flat;
|
||||||
string algo = hashAlgo;
|
string algo = hashAlgo;
|
||||||
|
|
||||||
if (string(algo, 0, 2) == "r:") {
|
if (string(algo, 0, 2) == "r:") {
|
||||||
recursive = true;
|
recursive = FileIngestionMethod::Recursive;
|
||||||
algo = string(algo, 2);
|
algo = string(algo, 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ struct DerivationOutput
|
||||||
, hashAlgo(std::move(hashAlgo))
|
, hashAlgo(std::move(hashAlgo))
|
||||||
, hash(std::move(hash))
|
, hash(std::move(hash))
|
||||||
{ }
|
{ }
|
||||||
void parseHashInfo(bool & recursive, Hash & hash) const;
|
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "serialise.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
|
@ -100,7 +101,9 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
|
||||||
if (readInt(source) == 1)
|
if (readInt(source) == 1)
|
||||||
readString(source);
|
readString(source);
|
||||||
|
|
||||||
addToStore(info, tee.source.data, NoRepair, checkSigs, accessor);
|
// Can't use underlying source, which would have been exhausted
|
||||||
|
auto source = StringSource { *tee.source.data };
|
||||||
|
addToStore(info, source, NoRepair, checkSigs, accessor);
|
||||||
|
|
||||||
res.push_back(info.path.clone());
|
res.push_back(info.path.clone());
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,7 +195,7 @@ struct LegacySSHStore : public Store
|
||||||
{ unsupported("queryPathFromHashPart"); }
|
{ unsupported("queryPathFromHashPart"); }
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
PathFilter & filter, RepairFlag repair) override
|
PathFilter & filter, RepairFlag repair) override
|
||||||
{ unsupported("addToStore"); }
|
{ unsupported("addToStore"); }
|
||||||
|
|
||||||
|
|
|
@ -557,10 +557,10 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
||||||
if (out == drv.outputs.end())
|
if (out == drv.outputs.end())
|
||||||
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
|
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
|
||||||
|
|
||||||
bool recursive; Hash h;
|
FileIngestionMethod method; Hash h;
|
||||||
out->second.parseHashInfo(recursive, h);
|
out->second.parseHashInfo(method, h);
|
||||||
|
|
||||||
check(makeFixedOutputPath(recursive, h, drvName), out->second.path, "out");
|
check(makeFixedOutputPath(method, h, drvName), out->second.path, "out");
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@ -1043,11 +1043,11 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
|
|
||||||
StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
|
StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
|
||||||
bool recursive, HashType hashAlgo, RepairFlag repair)
|
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
||||||
{
|
{
|
||||||
Hash h = hashString(hashAlgo, dump);
|
Hash h = hashString(hashAlgo, dump);
|
||||||
|
|
||||||
auto dstPath = makeFixedOutputPath(recursive, h, name);
|
auto dstPath = makeFixedOutputPath(method, h, name);
|
||||||
|
|
||||||
addTempRoot(dstPath);
|
addTempRoot(dstPath);
|
||||||
|
|
||||||
|
@ -1067,7 +1067,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
|
|
||||||
autoGC();
|
autoGC();
|
||||||
|
|
||||||
if (recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
StringSource source(dump);
|
StringSource source(dump);
|
||||||
restorePath(realPath, source);
|
restorePath(realPath, source);
|
||||||
} else
|
} else
|
||||||
|
@ -1080,7 +1080,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
above (if called with recursive == true and hashAlgo ==
|
above (if called with recursive == true and hashAlgo ==
|
||||||
sha256); otherwise, compute it here. */
|
sha256); otherwise, compute it here. */
|
||||||
HashResult hash;
|
HashResult hash;
|
||||||
if (recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
|
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
|
||||||
hash.second = dump.size();
|
hash.second = dump.size();
|
||||||
} else
|
} else
|
||||||
|
@ -1091,7 +1091,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
ValidPathInfo info(dstPath.clone());
|
ValidPathInfo info(dstPath.clone());
|
||||||
info.narHash = hash.first;
|
info.narHash = hash.first;
|
||||||
info.narSize = hash.second;
|
info.narSize = hash.second;
|
||||||
info.ca = makeFixedOutputCA(recursive, h);
|
info.ca = makeFixedOutputCA(method, h);
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1103,7 +1103,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
|
|
||||||
|
|
||||||
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||||
{
|
{
|
||||||
Path srcPath(absPath(_srcPath));
|
Path srcPath(absPath(_srcPath));
|
||||||
|
|
||||||
|
@ -1111,12 +1111,12 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
method for very large paths, but `copyPath' is mainly used for
|
method for very large paths, but `copyPath' is mainly used for
|
||||||
small files. */
|
small files. */
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
if (recursive)
|
if (method == FileIngestionMethod::Recursive)
|
||||||
dumpPath(srcPath, sink, filter);
|
dumpPath(srcPath, sink, filter);
|
||||||
else
|
else
|
||||||
sink.s = make_ref<std::string>(readFile(srcPath));
|
sink.s = make_ref<std::string>(readFile(srcPath));
|
||||||
|
|
||||||
return addToStoreFromDump(*sink.s, name, recursive, hashAlgo, repair);
|
return addToStoreFromDump(*sink.s, name, method, hashAlgo, repair);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -149,7 +149,7 @@ public:
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
std::shared_ptr<FSAccessor> accessor) override;
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
PathFilter & filter, RepairFlag repair) override;
|
PathFilter & filter, RepairFlag repair) override;
|
||||||
|
|
||||||
/* Like addToStore(), but the contents of the path are contained
|
/* Like addToStore(), but the contents of the path are contained
|
||||||
|
@ -157,7 +157,7 @@ public:
|
||||||
true) or simply the contents of a regular file (if recursive ==
|
true) or simply the contents of a regular file (if recursive ==
|
||||||
false). */
|
false). */
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
const StorePathSet & references, RepairFlag repair) override;
|
const StorePathSet & references, RepairFlag repair) override;
|
||||||
|
|
|
@ -73,6 +73,11 @@ const size_t storePathHashLen = 32; // i.e. 160 bits
|
||||||
/* Extension of derivations in the Nix store. */
|
/* Extension of derivations in the Nix store. */
|
||||||
const std::string drvExtension = ".drv";
|
const std::string drvExtension = ".drv";
|
||||||
|
|
||||||
|
enum struct FileIngestionMethod : uint8_t {
|
||||||
|
Flat = false,
|
||||||
|
Recursive = true
|
||||||
|
};
|
||||||
|
|
||||||
struct StorePathWithOutputs
|
struct StorePathWithOutputs
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
|
|
|
@ -484,7 +484,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
|
|
||||||
StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
|
StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||||
{
|
{
|
||||||
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
|
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
|
||||||
|
|
||||||
|
@ -492,10 +492,12 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
|
|
||||||
Path srcPath(absPath(_srcPath));
|
Path srcPath(absPath(_srcPath));
|
||||||
|
|
||||||
conn->to << wopAddToStore << name
|
conn->to
|
||||||
<< ((hashAlgo == htSHA256 && recursive) ? 0 : 1) /* backwards compatibility hack */
|
<< wopAddToStore
|
||||||
<< (recursive ? 1 : 0)
|
<< name
|
||||||
<< printHashType(hashAlgo);
|
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||||
|
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
|
||||||
|
<< printHashType(hashAlgo);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
conn->to.written = 0;
|
conn->to.written = 0;
|
||||||
|
|
|
@ -65,7 +65,7 @@ public:
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
std::shared_ptr<FSAccessor> accessor) override;
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
|
|
|
@ -172,19 +172,22 @@ static std::string makeType(
|
||||||
|
|
||||||
|
|
||||||
StorePath Store::makeFixedOutputPath(
|
StorePath Store::makeFixedOutputPath(
|
||||||
bool recursive,
|
FileIngestionMethod recursive,
|
||||||
const Hash & hash,
|
const Hash & hash,
|
||||||
std::string_view name,
|
std::string_view name,
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
bool hasSelfReference) const
|
bool hasSelfReference) const
|
||||||
{
|
{
|
||||||
if (hash.type == htSHA256 && recursive) {
|
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) {
|
||||||
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
|
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
|
||||||
} else {
|
} else {
|
||||||
assert(references.empty());
|
assert(references.empty());
|
||||||
return makeStorePath("output:out", hashString(htSHA256,
|
return makeStorePath("output:out",
|
||||||
"fixed:out:" + (recursive ? (string) "r:" : "") +
|
hashString(htSHA256,
|
||||||
hash.to_string(Base16) + ":"), name);
|
"fixed:out:"
|
||||||
|
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
|
||||||
|
+ hash.to_string(Base16) + ":"),
|
||||||
|
name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -201,10 +204,12 @@ StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
||||||
|
|
||||||
|
|
||||||
std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
|
std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
|
||||||
const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const
|
const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter) const
|
||||||
{
|
{
|
||||||
Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath);
|
Hash h = method == FileIngestionMethod::Recursive
|
||||||
return std::make_pair(makeFixedOutputPath(recursive, h, name), h);
|
? hashPath(hashAlgo, srcPath, filter).first
|
||||||
|
: hashFile(hashAlgo, srcPath);
|
||||||
|
return std::make_pair(makeFixedOutputPath(method, h, name), h);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -782,8 +787,8 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (hasPrefix(ca, "fixed:")) {
|
else if (hasPrefix(ca, "fixed:")) {
|
||||||
bool recursive = ca.compare(6, 2, "r:") == 0;
|
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
|
||||||
Hash hash(std::string(ca, recursive ? 8 : 6));
|
Hash hash(std::string(ca, recursive == FileIngestionMethod::Recursive ? 8 : 6));
|
||||||
auto refs = cloneStorePathSet(references);
|
auto refs = cloneStorePathSet(references);
|
||||||
bool hasSelfReference = false;
|
bool hasSelfReference = false;
|
||||||
if (refs.count(path)) {
|
if (refs.count(path)) {
|
||||||
|
@ -827,27 +832,14 @@ Strings ValidPathInfo::shortRefs() const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string makeFixedOutputCA(bool recursive, const Hash & hash)
|
std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
|
||||||
{
|
{
|
||||||
return "fixed:" + (recursive ? (std::string) "r:" : "") + hash.to_string();
|
return "fixed:"
|
||||||
|
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
|
||||||
|
+ hash.to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void Store::addToStore(const ValidPathInfo & info, Source & narSource,
|
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
|
||||||
std::shared_ptr<FSAccessor> accessor)
|
|
||||||
{
|
|
||||||
addToStore(info, make_ref<std::string>(narSource.drain()), repair, checkSigs, accessor);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Store::addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
|
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs,
|
|
||||||
std::shared_ptr<FSAccessor> accessor)
|
|
||||||
{
|
|
||||||
StringSource source(*nar);
|
|
||||||
addToStore(info, source, repair, checkSigs, accessor);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -44,7 +44,6 @@ enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||||
|
|
||||||
|
|
||||||
/* Magic header of exportPath() output (obsolete). */
|
/* Magic header of exportPath() output (obsolete). */
|
||||||
const uint32_t exportMagic = 0x4558494e;
|
const uint32_t exportMagic = 0x4558494e;
|
||||||
|
|
||||||
|
@ -347,7 +346,7 @@ public:
|
||||||
StorePath makeOutputPath(const string & id,
|
StorePath makeOutputPath(const string & id,
|
||||||
const Hash & hash, std::string_view name) const;
|
const Hash & hash, std::string_view name) const;
|
||||||
|
|
||||||
StorePath makeFixedOutputPath(bool recursive,
|
StorePath makeFixedOutputPath(FileIngestionMethod method,
|
||||||
const Hash & hash, std::string_view name,
|
const Hash & hash, std::string_view name,
|
||||||
const StorePathSet & references = {},
|
const StorePathSet & references = {},
|
||||||
bool hasSelfReference = false) const;
|
bool hasSelfReference = false) const;
|
||||||
|
@ -359,7 +358,7 @@ public:
|
||||||
store path to which srcPath is to be copied. Returns the store
|
store path to which srcPath is to be copied. Returns the store
|
||||||
path and the cryptographic hash of the contents of srcPath. */
|
path and the cryptographic hash of the contents of srcPath. */
|
||||||
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
|
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
|
||||||
const Path & srcPath, bool recursive = true,
|
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
|
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
|
||||||
|
|
||||||
/* Preparatory part of addTextToStore().
|
/* Preparatory part of addTextToStore().
|
||||||
|
@ -451,24 +450,19 @@ public:
|
||||||
/* Import a path into the store. */
|
/* Import a path into the store. */
|
||||||
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
|
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||||
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
|
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
|
||||||
std::shared_ptr<FSAccessor> accessor = 0);
|
std::shared_ptr<FSAccessor> accessor = 0) = 0;
|
||||||
|
|
||||||
// FIXME: remove
|
|
||||||
virtual void addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
|
|
||||||
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
|
|
||||||
std::shared_ptr<FSAccessor> accessor = 0);
|
|
||||||
|
|
||||||
/* Copy the contents of a path to the store and register the
|
/* Copy the contents of a path to the store and register the
|
||||||
validity the resulting path. The resulting path is returned.
|
validity the resulting path. The resulting path is returned.
|
||||||
The function object `filter' can be used to exclude files (see
|
The function object `filter' can be used to exclude files (see
|
||||||
libutil/archive.hh). */
|
libutil/archive.hh). */
|
||||||
virtual StorePath addToStore(const string & name, const Path & srcPath,
|
virtual StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
||||||
|
|
||||||
// FIXME: remove?
|
// FIXME: remove?
|
||||||
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
||||||
{
|
{
|
||||||
throw Error("addToStoreFromDump() is not supported by this store");
|
throw Error("addToStoreFromDump() is not supported by this store");
|
||||||
}
|
}
|
||||||
|
@ -851,7 +845,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
|
|
||||||
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
/* Compute the content-addressability assertion (ValidPathInfo::ca)
|
||||||
for paths created by makeFixedOutputPath() / addToStore(). */
|
for paths created by makeFixedOutputPath() / addToStore(). */
|
||||||
std::string makeFixedOutputCA(bool recursive, const Hash & hash);
|
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
|
||||||
|
|
||||||
|
|
||||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||||
|
|
|
@ -65,60 +65,63 @@ void Config::getSettings(std::map<std::string, SettingInfo> & res, bool override
|
||||||
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
|
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) {
|
||||||
|
unsigned int pos = 0;
|
||||||
|
|
||||||
|
while (pos < contents.size()) {
|
||||||
|
string line;
|
||||||
|
while (pos < contents.size() && contents[pos] != '\n')
|
||||||
|
line += contents[pos++];
|
||||||
|
pos++;
|
||||||
|
|
||||||
|
string::size_type hash = line.find('#');
|
||||||
|
if (hash != string::npos)
|
||||||
|
line = string(line, 0, hash);
|
||||||
|
|
||||||
|
vector<string> tokens = tokenizeString<vector<string> >(line);
|
||||||
|
if (tokens.empty()) continue;
|
||||||
|
|
||||||
|
if (tokens.size() < 2)
|
||||||
|
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||||
|
|
||||||
|
auto include = false;
|
||||||
|
auto ignoreMissing = false;
|
||||||
|
if (tokens[0] == "include")
|
||||||
|
include = true;
|
||||||
|
else if (tokens[0] == "!include") {
|
||||||
|
include = true;
|
||||||
|
ignoreMissing = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (include) {
|
||||||
|
if (tokens.size() != 2)
|
||||||
|
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||||
|
auto p = absPath(tokens[1], dirOf(path));
|
||||||
|
if (pathExists(p)) {
|
||||||
|
applyConfigFile(p);
|
||||||
|
} else if (!ignoreMissing) {
|
||||||
|
throw Error("file '%1%' included from '%2%' not found", p, path);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokens[1] != "=")
|
||||||
|
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||||
|
|
||||||
|
string name = tokens[0];
|
||||||
|
|
||||||
|
vector<string>::iterator i = tokens.begin();
|
||||||
|
advance(i, 2);
|
||||||
|
|
||||||
|
set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
void AbstractConfig::applyConfigFile(const Path & path)
|
void AbstractConfig::applyConfigFile(const Path & path)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
string contents = readFile(path);
|
string contents = readFile(path);
|
||||||
|
applyConfig(contents, path);
|
||||||
unsigned int pos = 0;
|
|
||||||
|
|
||||||
while (pos < contents.size()) {
|
|
||||||
string line;
|
|
||||||
while (pos < contents.size() && contents[pos] != '\n')
|
|
||||||
line += contents[pos++];
|
|
||||||
pos++;
|
|
||||||
|
|
||||||
string::size_type hash = line.find('#');
|
|
||||||
if (hash != string::npos)
|
|
||||||
line = string(line, 0, hash);
|
|
||||||
|
|
||||||
vector<string> tokens = tokenizeString<vector<string> >(line);
|
|
||||||
if (tokens.empty()) continue;
|
|
||||||
|
|
||||||
if (tokens.size() < 2)
|
|
||||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
|
||||||
|
|
||||||
auto include = false;
|
|
||||||
auto ignoreMissing = false;
|
|
||||||
if (tokens[0] == "include")
|
|
||||||
include = true;
|
|
||||||
else if (tokens[0] == "!include") {
|
|
||||||
include = true;
|
|
||||||
ignoreMissing = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (include) {
|
|
||||||
if (tokens.size() != 2)
|
|
||||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
|
||||||
auto p = absPath(tokens[1], dirOf(path));
|
|
||||||
if (pathExists(p)) {
|
|
||||||
applyConfigFile(p);
|
|
||||||
} else if (!ignoreMissing) {
|
|
||||||
throw Error("file '%1%' included from '%2%' not found", p, path);
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (tokens[1] != "=")
|
|
||||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
|
||||||
|
|
||||||
string name = tokens[0];
|
|
||||||
|
|
||||||
vector<string>::iterator i = tokens.begin();
|
|
||||||
advance(i, 2);
|
|
||||||
|
|
||||||
set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow
|
|
||||||
};
|
|
||||||
} catch (SysError &) { }
|
} catch (SysError &) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,38 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The Config class provides Nix runtime configurations.
|
||||||
|
*
|
||||||
|
* What is a Configuration?
|
||||||
|
* A collection of uniquely named Settings.
|
||||||
|
*
|
||||||
|
* What is a Setting?
|
||||||
|
* Each property that you can set in a configuration corresponds to a
|
||||||
|
* `Setting`. A setting records value and description of a property
|
||||||
|
* with a default and optional aliases.
|
||||||
|
*
|
||||||
|
* A valid configuration consists of settings that are registered to a
|
||||||
|
* `Config` object instance:
|
||||||
|
*
|
||||||
|
* Config config;
|
||||||
|
* Setting<std::string> systemSetting{&config, "x86_64-linux", "system", "the current system"};
|
||||||
|
*
|
||||||
|
* The above creates a `Config` object and registers a setting called "system"
|
||||||
|
* via the variable `systemSetting` with it. The setting defaults to the string
|
||||||
|
* "x86_64-linux", it's description is "the current system". All of the
|
||||||
|
* registered settings can then be accessed as shown below:
|
||||||
|
*
|
||||||
|
* std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
* config.getSettings(settings);
|
||||||
|
* config["system"].description == "the current system"
|
||||||
|
* config["system"].value == "x86_64-linux"
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* The above retrieves all currently known settings from the `Config` object
|
||||||
|
* and adds them to the `settings` map.
|
||||||
|
*/
|
||||||
|
|
||||||
class Args;
|
class Args;
|
||||||
class AbstractSetting;
|
class AbstractSetting;
|
||||||
class JSONPlaceholder;
|
class JSONPlaceholder;
|
||||||
|
@ -23,6 +55,10 @@ protected:
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the value referenced by `name` to `value`. Returns true if the
|
||||||
|
* setting is known, false otherwise.
|
||||||
|
*/
|
||||||
virtual bool set(const std::string & name, const std::string & value) = 0;
|
virtual bool set(const std::string & name, const std::string & value) = 0;
|
||||||
|
|
||||||
struct SettingInfo
|
struct SettingInfo
|
||||||
|
@ -31,18 +67,52 @@ public:
|
||||||
std::string description;
|
std::string description;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds the currently known settings to the given result map `res`.
|
||||||
|
* - res: map to store settings in
|
||||||
|
* - overridenOnly: when set to true only overridden settings will be added to `res`
|
||||||
|
*/
|
||||||
virtual void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) = 0;
|
virtual void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the configuration in `contents` and applies it
|
||||||
|
* - contents: configuration contents to be parsed and applied
|
||||||
|
* - path: location of the configuration file
|
||||||
|
*/
|
||||||
|
void applyConfig(const std::string & contents, const std::string & path = "<unknown>");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies a nix configuration file
|
||||||
|
* - path: the location of the config file to apply
|
||||||
|
*/
|
||||||
void applyConfigFile(const Path & path);
|
void applyConfigFile(const Path & path);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resets the `overridden` flag of all Settings
|
||||||
|
*/
|
||||||
virtual void resetOverriden() = 0;
|
virtual void resetOverriden() = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Outputs all settings to JSON
|
||||||
|
* - out: JSONObject to write the configuration to
|
||||||
|
*/
|
||||||
virtual void toJSON(JSONObject & out) = 0;
|
virtual void toJSON(JSONObject & out) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts settings to `Args` to be used on the command line interface
|
||||||
|
* - args: args to write to
|
||||||
|
* - category: category of the settings
|
||||||
|
*/
|
||||||
virtual void convertToArgs(Args & args, const std::string & category) = 0;
|
virtual void convertToArgs(Args & args, const std::string & category) = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logs a warning for each unregistered setting
|
||||||
|
*/
|
||||||
void warnUnknownSettings();
|
void warnUnknownSettings();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Re-applies all previously attempted changes to unknown settings
|
||||||
|
*/
|
||||||
void reapplyUnknownSettings();
|
void reapplyUnknownSettings();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <cassert>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <list>
|
#include <list>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
264
src/libutil/tests/config.cc
Normal file
264
src/libutil/tests/config.cc
Normal file
|
@ -0,0 +1,264 @@
|
||||||
|
#include "json.hh"
|
||||||
|
#include "config.hh"
|
||||||
|
#include "args.hh"
|
||||||
|
|
||||||
|
#include <sstream>
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* Config
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(Config, setUndefinedSetting) {
|
||||||
|
Config config;
|
||||||
|
ASSERT_EQ(config.set("undefined-key", "value"), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, setDefinedSetting) {
|
||||||
|
Config config;
|
||||||
|
std::string value;
|
||||||
|
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||||
|
ASSERT_EQ(config.set("name-of-the-setting", "value"), true);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, getDefinedSetting) {
|
||||||
|
Config config;
|
||||||
|
std::string value;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ false);
|
||||||
|
const auto iter = settings.find("name-of-the-setting");
|
||||||
|
ASSERT_NE(iter, settings.end());
|
||||||
|
ASSERT_EQ(iter->second.value, "");
|
||||||
|
ASSERT_EQ(iter->second.description, "description");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, getDefinedOverridenSettingNotSet) {
|
||||||
|
Config config;
|
||||||
|
std::string value;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ true);
|
||||||
|
const auto e = settings.find("name-of-the-setting");
|
||||||
|
ASSERT_EQ(e, settings.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, getDefinedSettingSet1) {
|
||||||
|
Config config;
|
||||||
|
std::string value;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, value, "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
setting.assign("value");
|
||||||
|
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ false);
|
||||||
|
const auto iter = settings.find("name-of-the-setting");
|
||||||
|
ASSERT_NE(iter, settings.end());
|
||||||
|
ASSERT_EQ(iter->second.value, "value");
|
||||||
|
ASSERT_EQ(iter->second.description, "description");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, getDefinedSettingSet2) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
ASSERT_TRUE(config.set("name-of-the-setting", "value"));
|
||||||
|
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ false);
|
||||||
|
const auto e = settings.find("name-of-the-setting");
|
||||||
|
ASSERT_NE(e, settings.end());
|
||||||
|
ASSERT_EQ(e->second.value, "value");
|
||||||
|
ASSERT_EQ(e->second.description, "description");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, addSetting) {
|
||||||
|
class TestSetting : public AbstractSetting {
|
||||||
|
public:
|
||||||
|
TestSetting() : AbstractSetting("test", "test", {}) {}
|
||||||
|
void set(const std::string & value) {}
|
||||||
|
std::string to_string() const { return {}; }
|
||||||
|
};
|
||||||
|
|
||||||
|
Config config;
|
||||||
|
TestSetting setting;
|
||||||
|
|
||||||
|
ASSERT_FALSE(config.set("test", "value"));
|
||||||
|
config.addSetting(&setting);
|
||||||
|
ASSERT_TRUE(config.set("test", "value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, withInitialValue) {
|
||||||
|
const StringMap initials = {
|
||||||
|
{ "key", "value" },
|
||||||
|
};
|
||||||
|
Config config(initials);
|
||||||
|
|
||||||
|
{
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ false);
|
||||||
|
ASSERT_EQ(settings.find("key"), settings.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
Setting<std::string> setting{&config, "default-value", "key", "description"};
|
||||||
|
|
||||||
|
{
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ false);
|
||||||
|
ASSERT_EQ(settings["key"].value, "value");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, resetOverriden) {
|
||||||
|
Config config;
|
||||||
|
config.resetOverriden();
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, resetOverridenWithSetting) {
|
||||||
|
Config config;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
{
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
|
||||||
|
setting.set("foo");
|
||||||
|
ASSERT_EQ(setting.get(), "foo");
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ true);
|
||||||
|
ASSERT_TRUE(settings.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
|
||||||
|
setting.override("bar");
|
||||||
|
ASSERT_TRUE(setting.overriden);
|
||||||
|
ASSERT_EQ(setting.get(), "bar");
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ true);
|
||||||
|
ASSERT_FALSE(settings.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
|
||||||
|
config.resetOverriden();
|
||||||
|
ASSERT_FALSE(setting.overriden);
|
||||||
|
config.getSettings(settings, /* overridenOnly = */ true);
|
||||||
|
ASSERT_TRUE(settings.empty());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, toJSONOnEmptyConfig) {
|
||||||
|
std::stringstream out;
|
||||||
|
{ // Scoped to force the destructor of JSONObject to write the final `}`
|
||||||
|
JSONObject obj(out);
|
||||||
|
Config config;
|
||||||
|
config.toJSON(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
ASSERT_EQ(out.str(), "{}");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, toJSONOnNonEmptyConfig) {
|
||||||
|
std::stringstream out;
|
||||||
|
{ // Scoped to force the destructor of JSONObject to write the final `}`
|
||||||
|
JSONObject obj(out);
|
||||||
|
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
setting.assign("value");
|
||||||
|
|
||||||
|
config.toJSON(obj);
|
||||||
|
}
|
||||||
|
ASSERT_EQ(out.str(), R"#({"name-of-the-setting":{"description":"description","value":"value"}})#");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, setSettingAlias) {
|
||||||
|
Config config;
|
||||||
|
Setting<std::string> setting{&config, "", "some-int", "best number", { "another-int" }};
|
||||||
|
ASSERT_TRUE(config.set("some-int", "1"));
|
||||||
|
ASSERT_EQ(setting.get(), "1");
|
||||||
|
ASSERT_TRUE(config.set("another-int", "2"));
|
||||||
|
ASSERT_EQ(setting.get(), "2");
|
||||||
|
ASSERT_TRUE(config.set("some-int", "3"));
|
||||||
|
ASSERT_EQ(setting.get(), "3");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* FIXME: The reapplyUnknownSettings method doesn't seem to do anything
|
||||||
|
* useful (these days). Whenever we add a new setting to Config the
|
||||||
|
* unknown settings are always considered. In which case is this function
|
||||||
|
* actually useful? Is there some way to register a Setting without calling
|
||||||
|
* addSetting? */
|
||||||
|
TEST(Config, DISABLED_reapplyUnknownSettings) {
|
||||||
|
Config config;
|
||||||
|
ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue"));
|
||||||
|
Setting<std::string> setting{&config, "default", "name-of-the-setting", "description"};
|
||||||
|
ASSERT_EQ(setting.get(), "default");
|
||||||
|
config.reapplyUnknownSettings();
|
||||||
|
ASSERT_EQ(setting.get(), "unknownvalue");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigEmpty) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
config.applyConfig("");
|
||||||
|
config.getSettings(settings);
|
||||||
|
ASSERT_TRUE(settings.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigEmptyWithComment) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
config.applyConfig("# just a comment");
|
||||||
|
config.getSettings(settings);
|
||||||
|
ASSERT_TRUE(settings.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigAssignment) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
config.applyConfig(
|
||||||
|
"name-of-the-setting = value-from-file #useful comment\n"
|
||||||
|
"# name-of-the-setting = foo\n"
|
||||||
|
);
|
||||||
|
config.getSettings(settings);
|
||||||
|
ASSERT_FALSE(settings.empty());
|
||||||
|
ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigWithReassignedSetting) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
config.applyConfig(
|
||||||
|
"name-of-the-setting = first-value\n"
|
||||||
|
"name-of-the-setting = second-value\n"
|
||||||
|
);
|
||||||
|
config.getSettings(settings);
|
||||||
|
ASSERT_FALSE(settings.empty());
|
||||||
|
ASSERT_EQ(settings["name-of-the-setting"].value, "second-value");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigFailsOnMissingIncludes) {
|
||||||
|
Config config;
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||||
|
|
||||||
|
ASSERT_THROW(config.applyConfig(
|
||||||
|
"name-of-the-setting = value-from-file\n"
|
||||||
|
"# name-of-the-setting = foo\n"
|
||||||
|
"include /nix/store/does/not/exist.nix"
|
||||||
|
), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(Config, applyConfigInvalidThrows) {
|
||||||
|
Config config;
|
||||||
|
ASSERT_THROW(config.applyConfig("value == key"), UsageError);
|
||||||
|
ASSERT_THROW(config.applyConfig("value "), UsageError);
|
||||||
|
}
|
||||||
|
}
|
130
src/libutil/tests/lru-cache.cc
Normal file
130
src/libutil/tests/lru-cache.cc
Normal file
|
@ -0,0 +1,130 @@
|
||||||
|
#include "lru-cache.hh"
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* size
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(LRUCache, sizeOfEmptyCacheIsZero) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
ASSERT_EQ(c.size(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, sizeOfSingleElementCacheIsOne) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("foo", "bar");
|
||||||
|
ASSERT_EQ(c.size(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* upsert / get
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(LRUCache, getFromEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
auto val = c.get("x");
|
||||||
|
ASSERT_EQ(val.has_value(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, getExistingValue) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("foo", "bar");
|
||||||
|
auto val = c.get("foo");
|
||||||
|
ASSERT_EQ(val, "bar");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, getNonExistingValueFromNonEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("foo", "bar");
|
||||||
|
auto val = c.get("another");
|
||||||
|
ASSERT_EQ(val.has_value(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, upsertOnZeroCapacityCache) {
|
||||||
|
LRUCache<std::string, std::string> c(0);
|
||||||
|
c.upsert("foo", "bar");
|
||||||
|
auto val = c.get("foo");
|
||||||
|
ASSERT_EQ(val.has_value(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, updateExistingValue) {
|
||||||
|
LRUCache<std::string, std::string> c(1);
|
||||||
|
c.upsert("foo", "bar");
|
||||||
|
|
||||||
|
auto val = c.get("foo");
|
||||||
|
ASSERT_EQ(val.value_or("error"), "bar");
|
||||||
|
ASSERT_EQ(c.size(), 1);
|
||||||
|
|
||||||
|
c.upsert("foo", "changed");
|
||||||
|
val = c.get("foo");
|
||||||
|
ASSERT_EQ(val.value_or("error"), "changed");
|
||||||
|
ASSERT_EQ(c.size(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, overwriteOldestWhenCapacityIsReached) {
|
||||||
|
LRUCache<std::string, std::string> c(3);
|
||||||
|
c.upsert("one", "eins");
|
||||||
|
c.upsert("two", "zwei");
|
||||||
|
c.upsert("three", "drei");
|
||||||
|
|
||||||
|
ASSERT_EQ(c.size(), 3);
|
||||||
|
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||||
|
|
||||||
|
// exceed capacity
|
||||||
|
c.upsert("another", "whatever");
|
||||||
|
|
||||||
|
ASSERT_EQ(c.size(), 3);
|
||||||
|
// Retrieving "one" makes it the most recent element thus
|
||||||
|
// two will be the oldest one and thus replaced.
|
||||||
|
ASSERT_EQ(c.get("two").has_value(), false);
|
||||||
|
ASSERT_EQ(c.get("another").value(), "whatever");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* clear
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(LRUCache, clearEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.clear();
|
||||||
|
ASSERT_EQ(c.size(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, clearNonEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("one", "eins");
|
||||||
|
c.upsert("two", "zwei");
|
||||||
|
c.upsert("three", "drei");
|
||||||
|
ASSERT_EQ(c.size(), 3);
|
||||||
|
c.clear();
|
||||||
|
ASSERT_EQ(c.size(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------------------------------------------------------
|
||||||
|
* erase
|
||||||
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
TEST(LRUCache, eraseFromEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
ASSERT_EQ(c.erase("foo"), false);
|
||||||
|
ASSERT_EQ(c.size(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, eraseMissingFromNonEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("one", "eins");
|
||||||
|
ASSERT_EQ(c.erase("foo"), false);
|
||||||
|
ASSERT_EQ(c.size(), 1);
|
||||||
|
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(LRUCache, eraseFromNonEmptyCache) {
|
||||||
|
LRUCache<std::string, std::string> c(10);
|
||||||
|
c.upsert("one", "eins");
|
||||||
|
ASSERT_EQ(c.erase("one"), true);
|
||||||
|
ASSERT_EQ(c.size(), 0);
|
||||||
|
ASSERT_EQ(c.get("one").value_or("empty"), "empty");
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,6 +4,7 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
std::regex refRegex(refRegexS, std::regex::ECMAScript);
|
std::regex refRegex(refRegexS, std::regex::ECMAScript);
|
||||||
|
std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript);
|
||||||
std::regex revRegex(revRegexS, std::regex::ECMAScript);
|
std::regex revRegex(revRegexS, std::regex::ECMAScript);
|
||||||
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,12 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege
|
||||||
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
|
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
|
||||||
extern std::regex refRegex;
|
extern std::regex refRegex;
|
||||||
|
|
||||||
|
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
|
||||||
|
// This is because of the definition of a ref in refs.c in https://github.com/git/git
|
||||||
|
// See tests/fetchGitRefs.sh for the full definition
|
||||||
|
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
|
||||||
|
extern std::regex badGitRefRegex;
|
||||||
|
|
||||||
// A Git revision (a SHA-1 commit hash).
|
// A Git revision (a SHA-1 commit hash).
|
||||||
const static std::string revRegexS = "[0-9a-fA-F]{40}";
|
const static std::string revRegexS = "[0-9a-fA-F]{40}";
|
||||||
extern std::regex revRegex;
|
extern std::regex revRegex;
|
||||||
|
|
|
@ -159,7 +159,8 @@ static int _main(int argc, char * * argv)
|
||||||
std::optional<StorePath> storePath;
|
std::optional<StorePath> storePath;
|
||||||
if (args.size() == 2) {
|
if (args.size() == 2) {
|
||||||
expectedHash = Hash(args[1], ht);
|
expectedHash = Hash(args[1], ht);
|
||||||
storePath = store->makeFixedOutputPath(unpack, expectedHash, name);
|
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
|
storePath = store->makeFixedOutputPath(recursive, expectedHash, name);
|
||||||
if (store->isValidPath(*storePath))
|
if (store->isValidPath(*storePath))
|
||||||
hash = expectedHash;
|
hash = expectedHash;
|
||||||
else
|
else
|
||||||
|
@ -208,13 +209,15 @@ static int _main(int argc, char * * argv)
|
||||||
if (expectedHash != Hash(ht) && expectedHash != hash)
|
if (expectedHash != Hash(ht) && expectedHash != hash)
|
||||||
throw Error(format("hash mismatch for '%1%'") % uri);
|
throw Error(format("hash mismatch for '%1%'") % uri);
|
||||||
|
|
||||||
|
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
|
|
||||||
/* Copy the file to the Nix store. FIXME: if RemoteStore
|
/* Copy the file to the Nix store. FIXME: if RemoteStore
|
||||||
implemented addToStoreFromDump() and downloadFile()
|
implemented addToStoreFromDump() and downloadFile()
|
||||||
supported a sink, we could stream the download directly
|
supported a sink, we could stream the download directly
|
||||||
into the Nix store. */
|
into the Nix store. */
|
||||||
storePath = store->addToStore(name, tmpFile, unpack, ht);
|
storePath = store->addToStore(name, tmpFile, recursive, ht);
|
||||||
|
|
||||||
assert(*storePath == store->makeFixedOutputPath(unpack, hash, name));
|
assert(*storePath == store->makeFixedOutputPath(recursive, hash, name));
|
||||||
}
|
}
|
||||||
|
|
||||||
stopProgressBar();
|
stopProgressBar();
|
||||||
|
|
|
@ -174,10 +174,10 @@ static void opAdd(Strings opFlags, Strings opArgs)
|
||||||
store. */
|
store. */
|
||||||
static void opAddFixed(Strings opFlags, Strings opArgs)
|
static void opAddFixed(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
bool recursive = false;
|
auto recursive = FileIngestionMethod::Flat;
|
||||||
|
|
||||||
for (auto & i : opFlags)
|
for (auto & i : opFlags)
|
||||||
if (i == "--recursive") recursive = true;
|
if (i == "--recursive") recursive = FileIngestionMethod::Recursive;
|
||||||
else throw UsageError(format("unknown flag '%1%'") % i);
|
else throw UsageError(format("unknown flag '%1%'") % i);
|
||||||
|
|
||||||
if (opArgs.empty())
|
if (opArgs.empty())
|
||||||
|
@ -194,10 +194,10 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
|
||||||
/* Hack to support caching in `nix-prefetch-url'. */
|
/* Hack to support caching in `nix-prefetch-url'. */
|
||||||
static void opPrintFixedPath(Strings opFlags, Strings opArgs)
|
static void opPrintFixedPath(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
bool recursive = false;
|
auto recursive = FileIngestionMethod::Flat;
|
||||||
|
|
||||||
for (auto i : opFlags)
|
for (auto i : opFlags)
|
||||||
if (i == "--recursive") recursive = true;
|
if (i == "--recursive") recursive = FileIngestionMethod::Recursive;
|
||||||
else throw UsageError(format("unknown flag '%1%'") % i);
|
else throw UsageError(format("unknown flag '%1%'") % i);
|
||||||
|
|
||||||
if (opArgs.size() != 3)
|
if (opArgs.size() != 3)
|
||||||
|
|
|
@ -45,13 +45,15 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
|
|
||||||
auto narHash = hashString(htSHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(true, narHash, *namePart));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(true, info.narHash);
|
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
|
||||||
|
|
||||||
if (!dryRun)
|
if (!dryRun) {
|
||||||
store->addToStore(info, sink.s);
|
auto source = StringSource { *sink.s };
|
||||||
|
store->addToStore(info, source);
|
||||||
|
}
|
||||||
|
|
||||||
logger->stdout("%s", store->printStorePath(info.path));
|
logger->stdout("%s", store->printStorePath(info.path));
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,15 +9,14 @@ using namespace nix;
|
||||||
|
|
||||||
struct CmdHash : Command
|
struct CmdHash : Command
|
||||||
{
|
{
|
||||||
enum Mode { mFile, mPath };
|
FileIngestionMethod mode;
|
||||||
Mode mode;
|
|
||||||
Base base = SRI;
|
Base base = SRI;
|
||||||
bool truncate = false;
|
bool truncate = false;
|
||||||
HashType ht = htSHA256;
|
HashType ht = htSHA256;
|
||||||
std::vector<std::string> paths;
|
std::vector<std::string> paths;
|
||||||
std::optional<std::string> modulus;
|
std::optional<std::string> modulus;
|
||||||
|
|
||||||
CmdHash(Mode mode) : mode(mode)
|
CmdHash(FileIngestionMethod mode) : mode(mode)
|
||||||
{
|
{
|
||||||
mkFlag(0, "sri", "print hash in SRI format", &base, SRI);
|
mkFlag(0, "sri", "print hash in SRI format", &base, SRI);
|
||||||
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
|
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
|
||||||
|
@ -40,9 +39,14 @@ struct CmdHash : Command
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
return mode == mFile
|
const char* d;
|
||||||
? "print cryptographic hash of a regular file"
|
switch (mode) {
|
||||||
: "print cryptographic hash of the NAR serialisation of a path";
|
case FileIngestionMethod::Flat:
|
||||||
|
d = "print cryptographic hash of a regular file";
|
||||||
|
case FileIngestionMethod::Recursive:
|
||||||
|
d = "print cryptographic hash of the NAR serialisation of a path";
|
||||||
|
};
|
||||||
|
return d;
|
||||||
}
|
}
|
||||||
|
|
||||||
Category category() override { return catUtility; }
|
Category category() override { return catUtility; }
|
||||||
|
@ -57,10 +61,14 @@ struct CmdHash : Command
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashSink>(ht);
|
hashSink = std::make_unique<HashSink>(ht);
|
||||||
|
|
||||||
if (mode == mFile)
|
switch (mode) {
|
||||||
|
case FileIngestionMethod::Flat:
|
||||||
readFile(path, *hashSink);
|
readFile(path, *hashSink);
|
||||||
else
|
break;
|
||||||
|
case FileIngestionMethod::Recursive:
|
||||||
dumpPath(path, *hashSink);
|
dumpPath(path, *hashSink);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
Hash h = hashSink->finish().first;
|
Hash h = hashSink->finish().first;
|
||||||
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
||||||
|
@ -69,8 +77,8 @@ struct CmdHash : Command
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static RegisterCommand r1("hash-file", [](){ return make_ref<CmdHash>(CmdHash::mFile); });
|
static RegisterCommand r1("hash-file", [](){ return make_ref<CmdHash>(FileIngestionMethod::Flat); });
|
||||||
static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(CmdHash::mPath); });
|
static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(FileIngestionMethod::Recursive); });
|
||||||
|
|
||||||
struct CmdToBase : Command
|
struct CmdToBase : Command
|
||||||
{
|
{
|
||||||
|
@ -141,7 +149,7 @@ static int compatNixHash(int argc, char * * argv)
|
||||||
});
|
});
|
||||||
|
|
||||||
if (op == opHash) {
|
if (op == opHash) {
|
||||||
CmdHash cmd(flat ? CmdHash::mFile : CmdHash::mPath);
|
CmdHash cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
|
||||||
cmd.ht = ht;
|
cmd.ht = ht;
|
||||||
cmd.base = base32 ? Base32 : Base16;
|
cmd.base = base32 ? Base32 : Base16;
|
||||||
cmd.truncate = truncate;
|
cmd.truncate = truncate;
|
||||||
|
|
|
@ -77,12 +77,12 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
|
|
||||||
auto narHash = hashModuloSink.finish().first;
|
auto narHash = hashModuloSink.finish().first;
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(true, narHash, path.name(), references, hasSelfReference));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
if (hasSelfReference) info.references.insert(info.path.clone());
|
if (hasSelfReference) info.references.insert(info.path.clone());
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(true, info.narHash);
|
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
|
||||||
|
|
||||||
if (!json)
|
if (!json)
|
||||||
printError("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
printError("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
||||||
|
|
|
@ -128,13 +128,14 @@ struct ProfileManifest
|
||||||
|
|
||||||
auto narHash = hashString(htSHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(true, narHash, "profile", references));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
|
||||||
info.references = std::move(references);
|
info.references = std::move(references);
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(true, info.narHash);
|
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
|
||||||
|
|
||||||
store->addToStore(info, sink.s);
|
auto source = StringSource { *sink.s };
|
||||||
|
store->addToStore(info, source);
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
}
|
}
|
||||||
|
|
111
tests/fetchGitRefs.sh
Normal file
111
tests/fetchGitRefs.sh
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
source common.sh
|
||||||
|
|
||||||
|
if [[ -z $(type -p git) ]]; then
|
||||||
|
echo "Git not installed; skipping Git tests"
|
||||||
|
exit 99
|
||||||
|
fi
|
||||||
|
|
||||||
|
clearStore
|
||||||
|
|
||||||
|
repo="$TEST_ROOT/git"
|
||||||
|
|
||||||
|
rm -rf "$repo" "${repo}-tmp" "$TEST_HOME/.cache/nix"
|
||||||
|
|
||||||
|
git init "$repo"
|
||||||
|
git -C "$repo" config user.email "foobar@example.com"
|
||||||
|
git -C "$repo" config user.name "Foobar"
|
||||||
|
|
||||||
|
echo utrecht > "$repo"/hello
|
||||||
|
git -C "$repo" add hello
|
||||||
|
git -C "$repo" commit -m 'Bla1'
|
||||||
|
|
||||||
|
path=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
|
||||||
|
|
||||||
|
# Test various combinations of ref names
|
||||||
|
# (taken from the git project)
|
||||||
|
|
||||||
|
# git help check-ref-format
|
||||||
|
# Git imposes the following rules on how references are named:
|
||||||
|
#
|
||||||
|
# 1. They can include slash / for hierarchical (directory) grouping, but no slash-separated component can begin with a dot . or end with the sequence .lock.
|
||||||
|
# 2. They must contain at least one /. This enforces the presence of a category like heads/, tags/ etc. but the actual names are not restricted. If the --allow-onelevel option is used, this rule is waived.
|
||||||
|
# 3. They cannot have two consecutive dots .. anywhere.
|
||||||
|
# 4. They cannot have ASCII control characters (i.e. bytes whose values are lower than \040, or \177 DEL), space, tilde ~, caret ^, or colon : anywhere.
|
||||||
|
# 5. They cannot have question-mark ?, asterisk *, or open bracket [ anywhere. See the --refspec-pattern option below for an exception to this rule.
|
||||||
|
# 6. They cannot begin or end with a slash / or contain multiple consecutive slashes (see the --normalize option below for an exception to this rule)
|
||||||
|
# 7. They cannot end with a dot ..
|
||||||
|
# 8. They cannot contain a sequence @{.
|
||||||
|
# 9. They cannot be the single character @.
|
||||||
|
# 10. They cannot contain a \.
|
||||||
|
|
||||||
|
valid_ref() {
|
||||||
|
{ set +x; printf >&2 '\n>>>>>>>>>> valid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; }
|
||||||
|
git check-ref-format --branch "$1" >/dev/null
|
||||||
|
git -C "$repo" branch "$1" master >/dev/null
|
||||||
|
path1=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath")
|
||||||
|
[[ $path1 = $path ]]
|
||||||
|
git -C "$repo" branch -D "$1" >/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
invalid_ref() {
|
||||||
|
{ set +x; printf >&2 '\n>>>>>>>>>> invalid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; }
|
||||||
|
# special case for a sole @:
|
||||||
|
# --branch @ will try to interpret @ as a branch reference and not fail. Thus we need --allow-onelevel
|
||||||
|
if [ "$1" = "@" ]; then
|
||||||
|
(! git check-ref-format --allow-onelevel "$1" >/dev/null 2>&1)
|
||||||
|
else
|
||||||
|
(! git check-ref-format --branch "$1" >/dev/null 2>&1)
|
||||||
|
fi
|
||||||
|
nix --debug eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'error: invalid Git branch/tag name' >/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
valid_ref 'foox'
|
||||||
|
valid_ref '1337'
|
||||||
|
valid_ref 'foo.baz'
|
||||||
|
valid_ref 'foo/bar/baz'
|
||||||
|
valid_ref 'foo./bar'
|
||||||
|
valid_ref 'heads/foo@bar'
|
||||||
|
valid_ref "$(printf 'heads/fu\303\237')"
|
||||||
|
valid_ref 'foo-bar-baz'
|
||||||
|
valid_ref '$1'
|
||||||
|
valid_ref 'foo.locke'
|
||||||
|
|
||||||
|
invalid_ref 'refs///heads/foo'
|
||||||
|
invalid_ref 'heads/foo/'
|
||||||
|
invalid_ref '///heads/foo'
|
||||||
|
invalid_ref '.foo'
|
||||||
|
invalid_ref './foo'
|
||||||
|
invalid_ref './foo/bar'
|
||||||
|
invalid_ref 'foo/./bar'
|
||||||
|
invalid_ref 'foo/bar/.'
|
||||||
|
invalid_ref 'foo bar'
|
||||||
|
invalid_ref 'foo?bar'
|
||||||
|
invalid_ref 'foo^bar'
|
||||||
|
invalid_ref 'foo~bar'
|
||||||
|
invalid_ref 'foo:bar'
|
||||||
|
invalid_ref 'foo[bar'
|
||||||
|
invalid_ref 'foo/bar/.'
|
||||||
|
invalid_ref '.refs/foo'
|
||||||
|
invalid_ref 'refs/heads/foo.'
|
||||||
|
invalid_ref 'heads/foo..bar'
|
||||||
|
invalid_ref 'heads/foo?bar'
|
||||||
|
invalid_ref 'heads/foo.lock'
|
||||||
|
invalid_ref 'heads///foo.lock'
|
||||||
|
invalid_ref 'foo.lock/bar'
|
||||||
|
invalid_ref 'foo.lock///bar'
|
||||||
|
invalid_ref 'heads/v@{ation'
|
||||||
|
invalid_ref 'heads/foo\.ar' # should fail due to \
|
||||||
|
invalid_ref 'heads/foo\bar' # should fail due to \
|
||||||
|
invalid_ref "$(printf 'heads/foo\t')" # should fail because it has a TAB
|
||||||
|
invalid_ref "$(printf 'heads/foo\177')"
|
||||||
|
invalid_ref '@'
|
||||||
|
|
||||||
|
invalid_ref 'foo/*'
|
||||||
|
invalid_ref '*/foo'
|
||||||
|
invalid_ref 'foo/*/bar'
|
||||||
|
invalid_ref '*'
|
||||||
|
invalid_ref 'foo/*/*'
|
||||||
|
invalid_ref '*/foo/*'
|
||||||
|
invalid_ref '/foo'
|
||||||
|
invalid_ref ''
|
|
@ -18,6 +18,7 @@ nix_tests = \
|
||||||
nar-access.sh \
|
nar-access.sh \
|
||||||
structured-attrs.sh \
|
structured-attrs.sh \
|
||||||
fetchGit.sh \
|
fetchGit.sh \
|
||||||
|
fetchGitRefs.sh \
|
||||||
fetchGitSubmodules.sh \
|
fetchGitSubmodules.sh \
|
||||||
fetchMercurial.sh \
|
fetchMercurial.sh \
|
||||||
signing.sh \
|
signing.sh \
|
||||||
|
|
Loading…
Reference in a new issue