forked from lix-project/lix
Revert the enum struct
change
Not a regular git revert as there have been many merges and things.
This commit is contained in:
parent
bbbf3602a3
commit
15abb2aa2b
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? Base::Base32 : Base::Base16, true);
|
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -192,7 +192,7 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashPath(parseHashType(algo), path).first;
|
Hash h = hashPath(parseHashType(algo), path).first;
|
||||||
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
|
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -203,7 +203,7 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashFile(parseHashType(algo), path);
|
Hash h = hashFile(parseHashType(algo), path);
|
||||||
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
|
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -214,7 +214,7 @@ SV * hashString(char * algo, int base32, char * s)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashString(parseHashType(algo), s);
|
Hash h = hashString(parseHashType(algo), s);
|
||||||
auto s = h.to_string(base32 ? Base::Base32 : Base::Base16, false);
|
auto s = h.to_string(base32 ? Base32 : Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -225,7 +225,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h(s, parseHashType(algo));
|
Hash h(s, parseHashType(algo));
|
||||||
string s = h.to_string(toBase32 ? Base::Base32 : Base::Base16, false);
|
string s = h.to_string(toBase32 ? Base32 : Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
|
|
@ -184,7 +184,7 @@ static int _main(int argc, char * * argv)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("connecting to '%s'", bestMachine->storeUri));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri));
|
||||||
|
|
||||||
Store::Params storeParams;
|
Store::Params storeParams;
|
||||||
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
|
if (hasPrefix(bestMachine->storeUri, "ssh://")) {
|
||||||
|
@ -225,7 +225,7 @@ connected:
|
||||||
AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true);
|
AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri) + ".upload-lock", true);
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("waiting for the upload lock to '%s'", storeUri));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("waiting for the upload lock to '%s'", storeUri));
|
||||||
|
|
||||||
auto old = signal(SIGALRM, handleAlarm);
|
auto old = signal(SIGALRM, handleAlarm);
|
||||||
alarm(15 * 60);
|
alarm(15 * 60);
|
||||||
|
@ -238,7 +238,7 @@ connected:
|
||||||
auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute;
|
auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute;
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying dependencies to '%s'", storeUri));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
|
||||||
copyPaths(store, ref<Store>(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
|
copyPaths(store, ref<Store>(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,7 +257,7 @@ connected:
|
||||||
if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path));
|
if (!store->isValidPath(store->parseStorePath(path))) missing.insert(store->parseStorePath(path));
|
||||||
|
|
||||||
if (!missing.empty()) {
|
if (!missing.empty()) {
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("copying outputs from '%s'", storeUri));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
|
||||||
for (auto & i : missing)
|
for (auto & i : missing)
|
||||||
store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
|
store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
|
||||||
copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);
|
copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);
|
||||||
|
|
|
@ -51,7 +51,7 @@ using string_to_base_map
|
||||||
= std::unordered_map<std::string, std::shared_ptr<base>>;
|
= std::unordered_map<std::string, std::shared_ptr<base>>;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// if defined, `base` will retain type information in form of an enum struct
|
// if defined, `base` will retain type information in form of an enum class
|
||||||
// such that static_cast can be used instead of dynamic_cast
|
// such that static_cast can be used instead of dynamic_cast
|
||||||
// #define CPPTOML_NO_RTTI
|
// #define CPPTOML_NO_RTTI
|
||||||
|
|
||||||
|
@ -405,7 +405,7 @@ inline std::shared_ptr<table_array> make_table_array(bool is_inline = false);
|
||||||
|
|
||||||
#if defined(CPPTOML_NO_RTTI)
|
#if defined(CPPTOML_NO_RTTI)
|
||||||
/// Base type used to store underlying data type explicitly if RTTI is disabled
|
/// Base type used to store underlying data type explicitly if RTTI is disabled
|
||||||
enum struct base_type
|
enum class base_type
|
||||||
{
|
{
|
||||||
NONE,
|
NONE,
|
||||||
STRING,
|
STRING,
|
||||||
|
@ -2268,7 +2268,7 @@ class parser
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
enum struct parse_type
|
enum class parse_type
|
||||||
{
|
{
|
||||||
STRING = 1,
|
STRING = 1,
|
||||||
LOCAL_TIME,
|
LOCAL_TIME,
|
||||||
|
|
|
@ -1712,10 +1712,10 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||||
else {
|
else {
|
||||||
auto p = settings.readOnlyMode
|
auto p = settings.readOnlyMode
|
||||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, repair);
|
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||||
dstPath = store->printStorePath(p);
|
dstPath = store->printStorePath(p);
|
||||||
srcToStore.insert_or_assign(path, std::move(p));
|
srcToStore.insert_or_assign(path, std::move(p));
|
||||||
printMsg(Verbosity::Chatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
context.insert(dstPath);
|
context.insert(dstPath);
|
||||||
|
|
|
@ -6,13 +6,13 @@ namespace nix {
|
||||||
FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) {
|
FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) {
|
||||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||||
printMsg(Verbosity::Info, "function-trace entered %1% at %2%", pos, ns.count());
|
printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count());
|
||||||
}
|
}
|
||||||
|
|
||||||
FunctionCallTrace::~FunctionCallTrace() {
|
FunctionCallTrace::~FunctionCallTrace() {
|
||||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||||
printMsg(Verbosity::Info, "function-trace exited %1% at %2%", pos, ns.count());
|
printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -643,7 +643,7 @@ Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
|
||||||
|
|
||||||
Expr * EvalState::parseStdin()
|
Expr * EvalState::parseStdin()
|
||||||
{
|
{
|
||||||
//Activity act(*logger, Verbosity::Talkative, format("parsing standard input"));
|
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
|
||||||
return parseExprFromString(drainFD(0), absPath("."));
|
return parseExprFromString(drainFD(0), absPath("."));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -778,7 +778,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
std::move(outPath),
|
std::move(outPath),
|
||||||
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "")
|
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "")
|
||||||
+ printHashType(*h.type),
|
+ printHashType(*h.type),
|
||||||
h.to_string(Base::Base16, false),
|
h.to_string(Base16, false),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -809,7 +809,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
auto drvPath = writeDerivation(state.store, drv, drvName, state.repair);
|
auto drvPath = writeDerivation(state.store, drv, drvName, state.repair);
|
||||||
auto drvPathS = state.store->printStorePath(drvPath);
|
auto drvPathS = state.store->printStorePath(drvPath);
|
||||||
|
|
||||||
printMsg(Verbosity::Chatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
|
printMsg(lvlChatty, "instantiated '%1%' -> '%2%'", drvName, drvPathS);
|
||||||
|
|
||||||
/* Optimisation, but required in read-only mode! because in that
|
/* Optimisation, but required in read-only mode! because in that
|
||||||
case we don't actually write store derivations, so we can't
|
case we don't actually write store derivations, so we can't
|
||||||
|
@ -1008,7 +1008,7 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
PathSet context; // discarded
|
PathSet context; // discarded
|
||||||
Path p = state.coerceToPath(pos, *args[1], context);
|
Path p = state.coerceToPath(pos, *args[1], context);
|
||||||
|
|
||||||
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base::Base16, false), context);
|
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Read a directory (without . or ..) */
|
/* Read a directory (without . or ..) */
|
||||||
|
@ -1150,8 +1150,8 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
|
||||||
Path dstPath;
|
Path dstPath;
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
dstPath = state.store->printStorePath(settings.readOnlyMode
|
dstPath = state.store->printStorePath(settings.readOnlyMode
|
||||||
? state.store->computeStorePathForPath(name, path, method, HashType::SHA256, filter).first
|
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
|
||||||
: state.store->addToStore(name, path, method, HashType::SHA256, filter, state.repair));
|
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
|
||||||
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
|
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
|
||||||
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
|
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
|
||||||
} else
|
} else
|
||||||
|
@ -1210,7 +1210,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
} else if (n == "recursive")
|
} else if (n == "recursive")
|
||||||
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
|
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||||
else
|
else
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
|
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
|
||||||
|
@ -1945,7 +1945,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
|
||||||
PathSet context; // discarded
|
PathSet context; // discarded
|
||||||
string s = state.forceString(*args[1], context, pos);
|
string s = state.forceString(*args[1], context, pos);
|
||||||
|
|
||||||
mkString(v, hashString(*ht, s).to_string(Base::Base16, false), context);
|
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
else if (n == "ref")
|
else if (n == "ref")
|
||||||
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
|
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else if (n == "rev")
|
else if (n == "rev")
|
||||||
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA1);
|
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else if (n == "submodules")
|
else if (n == "submodules")
|
||||||
|
@ -73,7 +73,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
|
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
|
||||||
// Backward compatibility: set 'revCount' to 0 for a dirty tree.
|
// Backward compatibility: set 'revCount' to 0 for a dirty tree.
|
||||||
|
|
|
@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
if (std::regex_match(value, revRegex))
|
if (std::regex_match(value, revRegex))
|
||||||
rev = Hash(value, HashType::SHA1);
|
rev = Hash(value, htSHA1);
|
||||||
else
|
else
|
||||||
ref = value;
|
ref = value;
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
|
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
|
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
||||||
if (tree.info.revCount)
|
if (tree.info.revCount)
|
||||||
|
|
|
@ -23,7 +23,7 @@ void emitTreeAttrs(
|
||||||
|
|
||||||
assert(tree.info.narHash);
|
assert(tree.info.narHash);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
||||||
tree.info.narHash.to_string(Base::SRI, true));
|
tree.info.narHash.to_string(SRI, true));
|
||||||
|
|
||||||
if (input->getRev()) {
|
if (input->getRev()) {
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
|
||||||
|
@ -106,7 +106,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else
|
else
|
||||||
|
@ -144,10 +144,10 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
if (expectedHash) {
|
if (expectedHash) {
|
||||||
auto hash = unpack
|
auto hash = unpack
|
||||||
? state.store->queryPathInfo(storePath)->narHash
|
? state.store->queryPathInfo(storePath)->narHash
|
||||||
: hashFile(HashType::SHA256, path);
|
: hashFile(htSHA256, path);
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(Base::Base32, true), hash.to_string(Base::Base32, true));
|
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
|
|
@ -47,7 +47,7 @@ Attrs Input::toAttrs() const
|
||||||
{
|
{
|
||||||
auto attrs = toAttrsInternal();
|
auto attrs = toAttrsInternal();
|
||||||
if (narHash)
|
if (narHash)
|
||||||
attrs.emplace("narHash", narHash->to_string(Base::SRI, true));
|
attrs.emplace("narHash", narHash->to_string(SRI, true));
|
||||||
attrs.emplace("type", type());
|
attrs.emplace("type", type());
|
||||||
return attrs;
|
return attrs;
|
||||||
}
|
}
|
||||||
|
@ -67,7 +67,7 @@ std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store)
|
||||||
|
|
||||||
if (narHash && narHash != input->narHash)
|
if (narHash && narHash != input->narHash)
|
||||||
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||||
to_string(), tree.actualPath, narHash->to_string(Base::SRI, true), input->narHash->to_string(Base::SRI, true));
|
to_string(), tree.actualPath, narHash->to_string(SRI, true), input->narHash->to_string(SRI, true));
|
||||||
|
|
||||||
return {std::move(tree), input};
|
return {std::move(tree), input};
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ struct GitInput : Input
|
||||||
|
|
||||||
auto input = std::make_shared<GitInput>(*this);
|
auto input = std::make_shared<GitInput>(*this);
|
||||||
|
|
||||||
assert(!rev || rev->type == HashType::SHA1);
|
assert(!rev || rev->type == htSHA1);
|
||||||
|
|
||||||
std::string cacheType = "git";
|
std::string cacheType = "git";
|
||||||
if (shallow) cacheType += "-shallow";
|
if (shallow) cacheType += "-shallow";
|
||||||
|
@ -195,7 +195,7 @@ struct GitInput : Input
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto tree = Tree {
|
auto tree = Tree {
|
||||||
.actualPath = store->printStorePath(storePath),
|
.actualPath = store->printStorePath(storePath),
|
||||||
|
@ -225,21 +225,21 @@ struct GitInput : Input
|
||||||
if (isLocal) {
|
if (isLocal) {
|
||||||
|
|
||||||
if (!input->rev)
|
if (!input->rev)
|
||||||
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), HashType::SHA1);
|
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
|
||||||
|
|
||||||
repoDir = actualUrl;
|
repoDir = actualUrl;
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
|
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||||
if (!rev || rev == rev2) {
|
if (!rev || rev == rev2) {
|
||||||
input->rev = rev2;
|
input->rev = rev2;
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false);
|
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
||||||
repoDir = cacheDir;
|
repoDir = cacheDir;
|
||||||
|
|
||||||
if (!pathExists(cacheDir)) {
|
if (!pathExists(cacheDir)) {
|
||||||
|
@ -277,7 +277,7 @@ struct GitInput : Input
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doFetch) {
|
if (doFetch) {
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Git repository '%s'", actualUrl));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||||
|
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
// we're using --quiet for now. Should process its stderr.
|
// we're using --quiet for now. Should process its stderr.
|
||||||
|
@ -301,7 +301,7 @@ struct GitInput : Input
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input->rev)
|
if (!input->rev)
|
||||||
input->rev = Hash(chomp(readFile(localRefFile)), HashType::SHA1);
|
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||||
|
@ -350,7 +350,7 @@ struct GitInput : Input
|
||||||
unpackTarfile(*source, tmpDir);
|
unpackTarfile(*source, tmpDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
|
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
|
||||||
|
|
||||||
|
@ -426,7 +426,7 @@ struct GitInputScheme : InputScheme
|
||||||
input->ref = *ref;
|
input->ref = *ref;
|
||||||
}
|
}
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||||
input->rev = Hash(*rev, HashType::SHA1);
|
input->rev = Hash(*rev, htSHA1);
|
||||||
|
|
||||||
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
|
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ struct GitHubInput : Input
|
||||||
auto path = owner + "/" + repo;
|
auto path = owner + "/" + repo;
|
||||||
assert(!(ref && rev));
|
assert(!(ref && rev));
|
||||||
if (ref) path += "/" + *ref;
|
if (ref) path += "/" + *ref;
|
||||||
if (rev) path += "/" + rev->to_string(Base::Base16, false);
|
if (rev) path += "/" + rev->to_string(Base16, false);
|
||||||
return ParsedURL {
|
return ParsedURL {
|
||||||
.scheme = "github",
|
.scheme = "github",
|
||||||
.path = path,
|
.path = path,
|
||||||
|
@ -76,7 +76,7 @@ struct GitHubInput : Input
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false).storePath)));
|
downloadFile(store, url, "source", false).storePath)));
|
||||||
rev = Hash(std::string { json["sha"] }, HashType::SHA1);
|
rev = Hash(std::string { json["sha"] }, htSHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ struct GitHubInput : Input
|
||||||
// might have stricter rate limits.
|
// might have stricter rate limits.
|
||||||
|
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
||||||
owner, repo, rev->to_string(Base::Base16, false));
|
owner, repo, rev->to_string(Base16, false));
|
||||||
|
|
||||||
std::string accessToken = settings.githubAccessToken.get();
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
if (accessToken != "")
|
if (accessToken != "")
|
||||||
|
@ -140,7 +140,7 @@ struct GitHubInputScheme : InputScheme
|
||||||
if (path.size() == 2) {
|
if (path.size() == 2) {
|
||||||
} else if (path.size() == 3) {
|
} else if (path.size() == 3) {
|
||||||
if (std::regex_match(path[2], revRegex))
|
if (std::regex_match(path[2], revRegex))
|
||||||
input->rev = Hash(path[2], HashType::SHA1);
|
input->rev = Hash(path[2], htSHA1);
|
||||||
else if (std::regex_match(path[2], refRegex))
|
else if (std::regex_match(path[2], refRegex))
|
||||||
input->ref = path[2];
|
input->ref = path[2];
|
||||||
else
|
else
|
||||||
|
@ -152,7 +152,7 @@ struct GitHubInputScheme : InputScheme
|
||||||
if (name == "rev") {
|
if (name == "rev") {
|
||||||
if (input->rev)
|
if (input->rev)
|
||||||
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
|
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
|
||||||
input->rev = Hash(value, HashType::SHA1);
|
input->rev = Hash(value, htSHA1);
|
||||||
}
|
}
|
||||||
else if (name == "ref") {
|
else if (name == "ref") {
|
||||||
if (!std::regex_match(value, refRegex))
|
if (!std::regex_match(value, refRegex))
|
||||||
|
@ -185,7 +185,7 @@ struct GitHubInputScheme : InputScheme
|
||||||
input->repo = getStrAttr(attrs, "repo");
|
input->repo = getStrAttr(attrs, "repo");
|
||||||
input->ref = maybeGetStrAttr(attrs, "ref");
|
input->ref = maybeGetStrAttr(attrs, "ref");
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||||
input->rev = Hash(*rev, HashType::SHA1);
|
input->rev = Hash(*rev, htSHA1);
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -114,7 +114,7 @@ struct MercurialInput : Input
|
||||||
return files.count(file);
|
return files.count(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
return {Tree {
|
return {Tree {
|
||||||
.actualPath = store->printStorePath(storePath),
|
.actualPath = store->printStorePath(storePath),
|
||||||
|
@ -167,14 +167,14 @@ struct MercurialInput : Input
|
||||||
});
|
});
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
|
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||||
if (!rev || rev == rev2) {
|
if (!rev || rev == rev2) {
|
||||||
input->rev = rev2;
|
input->rev = rev2;
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false));
|
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
|
||||||
|
|
||||||
/* If this is a commit hash that we already have, we don't
|
/* If this is a commit hash that we already have, we don't
|
||||||
have to pull again. */
|
have to pull again. */
|
||||||
|
@ -184,7 +184,7 @@ struct MercurialInput : Input
|
||||||
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
|
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
|
||||||
.killStderr(true)).second == "1"))
|
.killStderr(true)).second == "1"))
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
||||||
|
|
||||||
if (pathExists(cacheDir)) {
|
if (pathExists(cacheDir)) {
|
||||||
try {
|
try {
|
||||||
|
@ -210,7 +210,7 @@ struct MercurialInput : Input
|
||||||
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||||
assert(tokens.size() == 3);
|
assert(tokens.size() == 3);
|
||||||
|
|
||||||
input->rev = Hash(tokens[0], HashType::SHA1);
|
input->rev = Hash(tokens[0], htSHA1);
|
||||||
auto revCount = std::stoull(tokens[1]);
|
auto revCount = std::stoull(tokens[1]);
|
||||||
input->ref = tokens[2];
|
input->ref = tokens[2];
|
||||||
|
|
||||||
|
@ -293,7 +293,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
input->ref = *ref;
|
input->ref = *ref;
|
||||||
}
|
}
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||||
input->rev = Hash(*rev, HashType::SHA1);
|
input->rev = Hash(*rev, htSHA1);
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -101,7 +101,7 @@ struct PathInputScheme : InputScheme
|
||||||
|
|
||||||
for (auto & [name, value] : url.query)
|
for (auto & [name, value] : url.query)
|
||||||
if (name == "rev")
|
if (name == "rev")
|
||||||
input->rev = Hash(value, HashType::SHA1);
|
input->rev = Hash(value, htSHA1);
|
||||||
else if (name == "revCount") {
|
else if (name == "revCount") {
|
||||||
uint64_t revCount;
|
uint64_t revCount;
|
||||||
if (!string2Int(value, revCount))
|
if (!string2Int(value, revCount))
|
||||||
|
@ -129,7 +129,7 @@ struct PathInputScheme : InputScheme
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
for (auto & [name, value] : attrs)
|
||||||
if (name == "rev")
|
if (name == "rev")
|
||||||
input->rev = Hash(getStrAttr(attrs, "rev"), HashType::SHA1);
|
input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
|
||||||
else if (name == "revCount")
|
else if (name == "revCount")
|
||||||
input->revCount = getIntAttr(attrs, "revCount");
|
input->revCount = getIntAttr(attrs, "revCount");
|
||||||
else if (name == "lastModified")
|
else if (name == "lastModified")
|
||||||
|
|
|
@ -66,9 +66,9 @@ DownloadFileResult downloadFile(
|
||||||
} else {
|
} else {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(*res.data, sink);
|
||||||
auto hash = hashString(HashType::SHA256, *res.data);
|
auto hash = hashString(htSHA256, *res.data);
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
||||||
info.narHash = hashString(HashType::SHA256, *sink.s);
|
info.narHash = hashString(htSHA256, *sink.s);
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
|
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
|
@ -142,7 +142,7 @@ Tree downloadTarball(
|
||||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||||
lastModified = lstat(topDir).st_mtime;
|
lastModified = lstat(topDir).st_mtime;
|
||||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair);
|
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
|
@ -196,9 +196,9 @@ struct TarballInput : Input
|
||||||
// NAR hashes are preferred over file hashes since tar/zip files
|
// NAR hashes are preferred over file hashes since tar/zip files
|
||||||
// don't have a canonical representation.
|
// don't have a canonical representation.
|
||||||
if (narHash)
|
if (narHash)
|
||||||
url2.query.insert_or_assign("narHash", narHash->to_string(Base::SRI, true));
|
url2.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||||
else if (hash)
|
else if (hash)
|
||||||
url2.query.insert_or_assign("hash", hash->to_string(Base::SRI, true));
|
url2.query.insert_or_assign("hash", hash->to_string(SRI, true));
|
||||||
return url2;
|
return url2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +207,7 @@ struct TarballInput : Input
|
||||||
Attrs attrs;
|
Attrs attrs;
|
||||||
attrs.emplace("url", url.to_string());
|
attrs.emplace("url", url.to_string());
|
||||||
if (hash)
|
if (hash)
|
||||||
attrs.emplace("hash", hash->to_string(Base::SRI, true));
|
attrs.emplace("hash", hash->to_string(SRI, true));
|
||||||
return attrs;
|
return attrs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,21 +11,19 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
||||||
.longName = "verbose",
|
.longName = "verbose",
|
||||||
.shortName = 'v',
|
.shortName = 'v',
|
||||||
.description = "increase verbosity level",
|
.description = "increase verbosity level",
|
||||||
.handler = {[]() { verbosity = (Verbosity) ((uint64_t) verbosity + 1); }},
|
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "quiet",
|
.longName = "quiet",
|
||||||
.description = "decrease verbosity level",
|
.description = "decrease verbosity level",
|
||||||
.handler = {[]() { verbosity = verbosity > Verbosity::Error
|
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
|
||||||
? (Verbosity) ((uint64_t) verbosity - 1)
|
|
||||||
: Verbosity::Error; }},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "debug",
|
.longName = "debug",
|
||||||
.description = "enable debug output",
|
.description = "enable debug output",
|
||||||
.handler = {[]() { verbosity = Verbosity::Debug; }},
|
.handler = {[]() { verbosity = lvlDebug; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
|
|
|
@ -39,7 +39,7 @@ private:
|
||||||
struct ActInfo
|
struct ActInfo
|
||||||
{
|
{
|
||||||
std::string s, lastLine, phase;
|
std::string s, lastLine, phase;
|
||||||
ActivityType type = ActivityType::Unknown;
|
ActivityType type = actUnknown;
|
||||||
uint64_t done = 0;
|
uint64_t done = 0;
|
||||||
uint64_t expected = 0;
|
uint64_t expected = 0;
|
||||||
uint64_t running = 0;
|
uint64_t running = 0;
|
||||||
|
@ -153,7 +153,7 @@ public:
|
||||||
{
|
{
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
|
|
||||||
if (lvl <= verbosity && !s.empty() && type != ActivityType::BuildWaiting)
|
if (lvl <= verbosity && !s.empty() && type != actBuildWaiting)
|
||||||
log(*state, lvl, s + "...");
|
log(*state, lvl, s + "...");
|
||||||
|
|
||||||
state->activities.emplace_back(ActInfo());
|
state->activities.emplace_back(ActInfo());
|
||||||
|
@ -164,7 +164,7 @@ public:
|
||||||
state->its.emplace(act, i);
|
state->its.emplace(act, i);
|
||||||
state->activitiesByType[type].its.emplace(act, i);
|
state->activitiesByType[type].its.emplace(act, i);
|
||||||
|
|
||||||
if (type == ActivityType::Build) {
|
if (type == actBuild) {
|
||||||
std::string name(storePathToName(getS(fields, 0)));
|
std::string name(storePathToName(getS(fields, 0)));
|
||||||
if (hasSuffix(name, ".drv"))
|
if (hasSuffix(name, ".drv"))
|
||||||
name = name.substr(0, name.size() - 4);
|
name = name.substr(0, name.size() - 4);
|
||||||
|
@ -179,7 +179,7 @@ public:
|
||||||
i->name = DrvName(name).name;
|
i->name = DrvName(name).name;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type == ActivityType::Substitute) {
|
if (type == actSubstitute) {
|
||||||
auto name = storePathToName(getS(fields, 0));
|
auto name = storePathToName(getS(fields, 0));
|
||||||
auto sub = getS(fields, 1);
|
auto sub = getS(fields, 1);
|
||||||
i->s = fmt(
|
i->s = fmt(
|
||||||
|
@ -189,7 +189,7 @@ public:
|
||||||
name, sub);
|
name, sub);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type == ActivityType::PostBuildHook) {
|
if (type == actPostBuildHook) {
|
||||||
auto name = storePathToName(getS(fields, 0));
|
auto name = storePathToName(getS(fields, 0));
|
||||||
if (hasSuffix(name, ".drv"))
|
if (hasSuffix(name, ".drv"))
|
||||||
name = name.substr(0, name.size() - 4);
|
name = name.substr(0, name.size() - 4);
|
||||||
|
@ -197,14 +197,14 @@ public:
|
||||||
i->name = DrvName(name).name;
|
i->name = DrvName(name).name;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type == ActivityType::QueryPathInfo) {
|
if (type == actQueryPathInfo) {
|
||||||
auto name = storePathToName(getS(fields, 0));
|
auto name = storePathToName(getS(fields, 0));
|
||||||
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
|
i->s = fmt("querying " ANSI_BOLD "%s" ANSI_NORMAL " on %s", name, getS(fields, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((type == ActivityType::Download && hasAncestor(*state, ActivityType::CopyPath, parent))
|
if ((type == actFileTransfer && hasAncestor(*state, actCopyPath, parent))
|
||||||
|| (type == ActivityType::Download && hasAncestor(*state, ActivityType::QueryPathInfo, parent))
|
|| (type == actFileTransfer && hasAncestor(*state, actQueryPathInfo, parent))
|
||||||
|| (type == ActivityType::CopyPath && hasAncestor(*state, ActivityType::Substitute, parent)))
|
|| (type == actCopyPath && hasAncestor(*state, actSubstitute, parent)))
|
||||||
i->visible = false;
|
i->visible = false;
|
||||||
|
|
||||||
update(*state);
|
update(*state);
|
||||||
|
@ -249,13 +249,13 @@ public:
|
||||||
{
|
{
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
|
|
||||||
if (type == ResultType::FileLinked) {
|
if (type == resFileLinked) {
|
||||||
state->filesLinked++;
|
state->filesLinked++;
|
||||||
state->bytesLinked += getI(fields, 0);
|
state->bytesLinked += getI(fields, 0);
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::BuildLogLine || type == ResultType::PostBuildLogLine) {
|
else if (type == resBuildLogLine || type == resPostBuildLogLine) {
|
||||||
auto lastLine = trim(getS(fields, 0));
|
auto lastLine = trim(getS(fields, 0));
|
||||||
if (!lastLine.empty()) {
|
if (!lastLine.empty()) {
|
||||||
auto i = state->its.find(act);
|
auto i = state->its.find(act);
|
||||||
|
@ -263,10 +263,10 @@ public:
|
||||||
ActInfo info = *i->second;
|
ActInfo info = *i->second;
|
||||||
if (printBuildLogs) {
|
if (printBuildLogs) {
|
||||||
auto suffix = "> ";
|
auto suffix = "> ";
|
||||||
if (type == ResultType::PostBuildLogLine) {
|
if (type == resPostBuildLogLine) {
|
||||||
suffix = " (post)> ";
|
suffix = " (post)> ";
|
||||||
}
|
}
|
||||||
log(*state, Verbosity::Info, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine);
|
log(*state, lvlInfo, ANSI_FAINT + info.name.value_or("unnamed") + suffix + ANSI_NORMAL + lastLine);
|
||||||
} else {
|
} else {
|
||||||
state->activities.erase(i->second);
|
state->activities.erase(i->second);
|
||||||
info.lastLine = lastLine;
|
info.lastLine = lastLine;
|
||||||
|
@ -277,24 +277,24 @@ public:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::UntrustedPath) {
|
else if (type == resUntrustedPath) {
|
||||||
state->untrustedPaths++;
|
state->untrustedPaths++;
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::CorruptedPath) {
|
else if (type == resCorruptedPath) {
|
||||||
state->corruptedPaths++;
|
state->corruptedPaths++;
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::SetPhase) {
|
else if (type == resSetPhase) {
|
||||||
auto i = state->its.find(act);
|
auto i = state->its.find(act);
|
||||||
assert(i != state->its.end());
|
assert(i != state->its.end());
|
||||||
i->second->phase = getS(fields, 0);
|
i->second->phase = getS(fields, 0);
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::Progress) {
|
else if (type == resProgress) {
|
||||||
auto i = state->its.find(act);
|
auto i = state->its.find(act);
|
||||||
assert(i != state->its.end());
|
assert(i != state->its.end());
|
||||||
ActInfo & actInfo = *i->second;
|
ActInfo & actInfo = *i->second;
|
||||||
|
@ -305,7 +305,7 @@ public:
|
||||||
update(*state);
|
update(*state);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == ResultType::SetExpected) {
|
else if (type == resSetExpected) {
|
||||||
auto i = state->its.find(act);
|
auto i = state->its.find(act);
|
||||||
assert(i != state->its.end());
|
assert(i != state->its.end());
|
||||||
ActInfo & actInfo = *i->second;
|
ActInfo & actInfo = *i->second;
|
||||||
|
@ -417,10 +417,10 @@ public:
|
||||||
res += s;
|
res += s;
|
||||||
};
|
};
|
||||||
|
|
||||||
showActivity(ActivityType::Builds, "%s built");
|
showActivity(actBuilds, "%s built");
|
||||||
|
|
||||||
auto s1 = renderActivity(ActivityType::CopyPaths, "%s copied");
|
auto s1 = renderActivity(actCopyPaths, "%s copied");
|
||||||
auto s2 = renderActivity(ActivityType::CopyPath, "%s MiB", "%.1f", MiB);
|
auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB);
|
||||||
|
|
||||||
if (!s1.empty() || !s2.empty()) {
|
if (!s1.empty() || !s2.empty()) {
|
||||||
if (!res.empty()) res += ", ";
|
if (!res.empty()) res += ", ";
|
||||||
|
@ -428,10 +428,10 @@ public:
|
||||||
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
|
if (!s2.empty()) { res += " ("; res += s2; res += ')'; }
|
||||||
}
|
}
|
||||||
|
|
||||||
showActivity(ActivityType::Download, "%s MiB DL", "%.1f", MiB);
|
showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB);
|
||||||
|
|
||||||
{
|
{
|
||||||
auto s = renderActivity(ActivityType::OptimiseStore, "%s paths optimised");
|
auto s = renderActivity(actOptimiseStore, "%s paths optimised");
|
||||||
if (s != "") {
|
if (s != "") {
|
||||||
s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked);
|
s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked);
|
||||||
if (!res.empty()) res += ", ";
|
if (!res.empty()) res += ", ";
|
||||||
|
@ -440,7 +440,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: don't show "done" paths in green.
|
// FIXME: don't show "done" paths in green.
|
||||||
showActivity(ActivityType::VerifyPaths, "%s paths verified");
|
showActivity(actVerifyPaths, "%s paths verified");
|
||||||
|
|
||||||
if (state.corruptedPaths) {
|
if (state.corruptedPaths) {
|
||||||
if (!res.empty()) res += ", ";
|
if (!res.empty()) res += ", ";
|
||||||
|
|
|
@ -269,7 +269,7 @@ void parseCmdLine(const string & programName, const Strings & args,
|
||||||
void printVersion(const string & programName)
|
void printVersion(const string & programName)
|
||||||
{
|
{
|
||||||
std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
|
std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
|
||||||
if (verbosity > Verbosity::Info) {
|
if (verbosity > lvlInfo) {
|
||||||
Strings cfg;
|
Strings cfg;
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
cfg.push_back("gc");
|
cfg.push_back("gc");
|
||||||
|
|
|
@ -43,11 +43,11 @@ struct StorePathWithOutputs;
|
||||||
void printMissing(
|
void printMissing(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::vector<StorePathWithOutputs> & paths,
|
const std::vector<StorePathWithOutputs> & paths,
|
||||||
Verbosity lvl = Verbosity::Info);
|
Verbosity lvl = lvlInfo);
|
||||||
|
|
||||||
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
||||||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||||
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = Verbosity::Info);
|
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = lvlInfo);
|
||||||
|
|
||||||
string getArg(const string & opt,
|
string getArg(const string & opt,
|
||||||
Strings::iterator & i, const Strings::iterator & end);
|
Strings::iterator & i, const Strings::iterator & end);
|
||||||
|
|
|
@ -137,7 +137,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
auto narInfo = make_ref<NarInfo>(info);
|
auto narInfo = make_ref<NarInfo>(info);
|
||||||
|
|
||||||
narInfo->narSize = nar->size();
|
narInfo->narSize = nar->size();
|
||||||
narInfo->narHash = hashString(HashType::SHA256, *nar);
|
narInfo->narHash = hashString(htSHA256, *nar);
|
||||||
|
|
||||||
if (info.narHash && info.narHash != narInfo->narHash)
|
if (info.narHash && info.narHash != narInfo->narHash)
|
||||||
throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path));
|
throw Error("refusing to copy corrupted path '%1%' to binary cache", printStorePath(info.path));
|
||||||
|
@ -172,16 +172,16 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
auto now1 = std::chrono::steady_clock::now();
|
auto now1 = std::chrono::steady_clock::now();
|
||||||
auto narCompressed = compress(compression, *nar, parallelCompression);
|
auto narCompressed = compress(compression, *nar, parallelCompression);
|
||||||
auto now2 = std::chrono::steady_clock::now();
|
auto now2 = std::chrono::steady_clock::now();
|
||||||
narInfo->fileHash = hashString(HashType::SHA256, *narCompressed);
|
narInfo->fileHash = hashString(htSHA256, *narCompressed);
|
||||||
narInfo->fileSize = narCompressed->size();
|
narInfo->fileSize = narCompressed->size();
|
||||||
|
|
||||||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
||||||
printMsg(Verbosity::Talkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
|
printMsg(lvlTalkative, "copying path '%1%' (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache",
|
||||||
printStorePath(narInfo->path), narInfo->narSize,
|
printStorePath(narInfo->path), narInfo->narSize,
|
||||||
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
|
((1.0 - (double) narCompressed->size() / nar->size()) * 100.0),
|
||||||
duration);
|
duration);
|
||||||
|
|
||||||
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base::Base32, false) + ".nar"
|
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
||||||
+ (compression == "xz" ? ".xz" :
|
+ (compression == "xz" ? ".xz" :
|
||||||
compression == "bzip2" ? ".bz2" :
|
compression == "bzip2" ? ".bz2" :
|
||||||
compression == "br" ? ".br" :
|
compression == "br" ? ".br" :
|
||||||
|
@ -209,7 +209,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
// to a GC'ed file, so overwriting might be useful...
|
// to a GC'ed file, so overwriting might be useful...
|
||||||
if (fileExists(key)) return;
|
if (fileExists(key)) return;
|
||||||
|
|
||||||
printMsg(Verbosity::Talkative, "creating debuginfo link from '%s' to '%s'", key, target);
|
printMsg(lvlTalkative, "creating debuginfo link from '%s' to '%s'", key, target);
|
||||||
|
|
||||||
upsertFile(key, json.dump(), "application/json");
|
upsertFile(key, json.dump(), "application/json");
|
||||||
};
|
};
|
||||||
|
@ -302,7 +302,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||||
{
|
{
|
||||||
auto uri = getUri();
|
auto uri = getUri();
|
||||||
auto storePathS = printStorePath(storePath);
|
auto storePathS = printStorePath(storePath);
|
||||||
auto act = std::make_shared<Activity>(*logger, Verbosity::Talkative, ActivityType::QueryPathInfo,
|
auto act = std::make_shared<Activity>(*logger, lvlTalkative, actQueryPathInfo,
|
||||||
fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri});
|
fmt("querying info about '%s' on '%s'", storePathS, uri), Logger::Fields{storePathS, uri});
|
||||||
PushActivity pact(act->id);
|
PushActivity pact(act->id);
|
||||||
|
|
||||||
|
|
|
@ -106,13 +106,7 @@ typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||||
|
|
||||||
struct Goal : public std::enable_shared_from_this<Goal>
|
struct Goal : public std::enable_shared_from_this<Goal>
|
||||||
{
|
{
|
||||||
enum struct ExitCode {
|
typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
|
||||||
Busy,
|
|
||||||
Success,
|
|
||||||
Failed,
|
|
||||||
NoSubstituters,
|
|
||||||
IncompleteClosure,
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Backlink to the worker. */
|
/* Backlink to the worker. */
|
||||||
Worker & worker;
|
Worker & worker;
|
||||||
|
@ -147,7 +141,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
||||||
Goal(Worker & worker) : worker(worker)
|
Goal(Worker & worker) : worker(worker)
|
||||||
{
|
{
|
||||||
nrFailed = nrNoSubstituters = nrIncompleteClosure = 0;
|
nrFailed = nrNoSubstituters = nrIncompleteClosure = 0;
|
||||||
exitCode = ExitCode::Busy;
|
exitCode = ecBusy;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual ~Goal()
|
virtual ~Goal()
|
||||||
|
@ -359,8 +353,8 @@ public:
|
||||||
{
|
{
|
||||||
actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds);
|
actDerivations.progress(doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds);
|
||||||
actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions);
|
actSubstitutions.progress(doneSubstitutions, expectedSubstitutions + doneSubstitutions, runningSubstitutions, failedSubstitutions);
|
||||||
act.setExpected(ActivityType::Download, expectedDownloadSize + doneDownloadSize);
|
act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
|
||||||
act.setExpected(ActivityType::CopyPath, expectedNarSize + doneNarSize);
|
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -392,13 +386,13 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||||
|
|
||||||
trace(fmt("waitee '%s' done; %d left", waitee->name, waitees.size()));
|
trace(fmt("waitee '%s' done; %d left", waitee->name, waitees.size()));
|
||||||
|
|
||||||
if (result == ExitCode::Failed || result == ExitCode::NoSubstituters || result == ExitCode::IncompleteClosure) ++nrFailed;
|
if (result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure) ++nrFailed;
|
||||||
|
|
||||||
if (result == ExitCode::NoSubstituters) ++nrNoSubstituters;
|
if (result == ecNoSubstituters) ++nrNoSubstituters;
|
||||||
|
|
||||||
if (result == ExitCode::IncompleteClosure) ++nrIncompleteClosure;
|
if (result == ecIncompleteClosure) ++nrIncompleteClosure;
|
||||||
|
|
||||||
if (waitees.empty() || (result == ExitCode::Failed && !settings.keepGoing)) {
|
if (waitees.empty() || (result == ecFailed && !settings.keepGoing)) {
|
||||||
|
|
||||||
/* If we failed and keepGoing is not set, we remove all
|
/* If we failed and keepGoing is not set, we remove all
|
||||||
remaining waitees. */
|
remaining waitees. */
|
||||||
|
@ -418,8 +412,8 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||||
void Goal::amDone(ExitCode result, std::optional<Error> ex)
|
void Goal::amDone(ExitCode result, std::optional<Error> ex)
|
||||||
{
|
{
|
||||||
trace("done");
|
trace("done");
|
||||||
assert(exitCode == ExitCode::Busy);
|
assert(exitCode == ecBusy);
|
||||||
assert(result == ExitCode::Success || result == ExitCode::Failed || result == ExitCode::NoSubstituters || result == ExitCode::IncompleteClosure);
|
assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters || result == ecIncompleteClosure);
|
||||||
exitCode = result;
|
exitCode = result;
|
||||||
|
|
||||||
if (ex) {
|
if (ex) {
|
||||||
|
@ -684,7 +678,7 @@ HookInstance::HookInstance()
|
||||||
|
|
||||||
Strings args = {
|
Strings args = {
|
||||||
std::string(baseNameOf(settings.buildHook.get())),
|
std::string(baseNameOf(settings.buildHook.get())),
|
||||||
std::to_string((uint64_t)verbosity),
|
std::to_string(verbosity),
|
||||||
};
|
};
|
||||||
|
|
||||||
execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
|
execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
|
||||||
|
@ -1422,7 +1416,7 @@ void DerivationGoal::started() {
|
||||||
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
|
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
|
||||||
fmt("building '%s'", worker.store.printStorePath(drvPath));
|
fmt("building '%s'", worker.store.printStorePath(drvPath));
|
||||||
if (hook) msg += fmt(" on '%s'", machineName);
|
if (hook) msg += fmt(" on '%s'", machineName);
|
||||||
act = std::make_unique<Activity>(*logger, Verbosity::Info, ActivityType::Build, msg,
|
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
|
||||||
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
|
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
|
||||||
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
|
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
|
||||||
worker.updateProgress();
|
worker.updateProgress();
|
||||||
|
@ -1443,7 +1437,7 @@ void DerivationGoal::tryToBuild()
|
||||||
|
|
||||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||||
if (!actLock)
|
if (!actLock)
|
||||||
actLock = std::make_unique<Activity>(*logger, Verbosity::Warn, ActivityType::BuildWaiting,
|
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||||
fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles))));
|
fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles))));
|
||||||
worker.waitForAWhile(shared_from_this());
|
worker.waitForAWhile(shared_from_this());
|
||||||
return;
|
return;
|
||||||
|
@ -1483,20 +1477,6 @@ void DerivationGoal::tryToBuild()
|
||||||
supported for local builds. */
|
supported for local builds. */
|
||||||
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
|
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
|
||||||
|
|
||||||
auto started = [&]() {
|
|
||||||
auto msg = fmt(
|
|
||||||
buildMode == bmRepair ? "repairing outputs of '%s'" :
|
|
||||||
buildMode == bmCheck ? "checking outputs of '%s'" :
|
|
||||||
nrRounds > 1 ? "building '%s' (round %d/%d)" :
|
|
||||||
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
|
|
||||||
fmt("building '%s'", worker.store.printStorePath(drvPath));
|
|
||||||
if (hook) msg += fmt(" on '%s'", machineName);
|
|
||||||
act = std::make_unique<Activity>(*logger, Verbosity::Info, ActivityType::Build, msg,
|
|
||||||
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
|
|
||||||
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
|
|
||||||
worker.updateProgress();
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Is the build hook willing to accept this job? */
|
/* Is the build hook willing to accept this job? */
|
||||||
if (!buildLocally) {
|
if (!buildLocally) {
|
||||||
switch (tryBuildHook()) {
|
switch (tryBuildHook()) {
|
||||||
|
@ -1512,7 +1492,7 @@ void DerivationGoal::tryToBuild()
|
||||||
/* Not now; wait until at least one child finishes or
|
/* Not now; wait until at least one child finishes or
|
||||||
the wake-up timeout expires. */
|
the wake-up timeout expires. */
|
||||||
if (!actLock)
|
if (!actLock)
|
||||||
actLock = std::make_unique<Activity>(*logger, Verbosity::Warn, ActivityType::BuildWaiting,
|
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||||
fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
||||||
worker.waitForAWhile(shared_from_this());
|
worker.waitForAWhile(shared_from_this());
|
||||||
outputLocks.unlock();
|
outputLocks.unlock();
|
||||||
|
@ -1553,7 +1533,7 @@ void DerivationGoal::tryLocalBuild() {
|
||||||
buildUser->kill();
|
buildUser->kill();
|
||||||
} else {
|
} else {
|
||||||
if (!actLock)
|
if (!actLock)
|
||||||
actLock = std::make_unique<Activity>(*logger, Verbosity::Warn, ActivityType::BuildWaiting,
|
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||||
fmt("waiting for UID to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
fmt("waiting for UID to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
||||||
worker.waitForAWhile(shared_from_this());
|
worker.waitForAWhile(shared_from_this());
|
||||||
return;
|
return;
|
||||||
|
@ -1708,7 +1688,7 @@ void DerivationGoal::buildDone()
|
||||||
registerOutputs();
|
registerOutputs();
|
||||||
|
|
||||||
if (settings.postBuildHook != "") {
|
if (settings.postBuildHook != "") {
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::PostBuildHook,
|
Activity act(*logger, lvlInfo, actPostBuildHook,
|
||||||
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
||||||
Logger::Fields{worker.store.printStorePath(drvPath)});
|
Logger::Fields{worker.store.printStorePath(drvPath)});
|
||||||
PushActivity pact(act.id);
|
PushActivity pact(act.id);
|
||||||
|
@ -1740,7 +1720,7 @@ void DerivationGoal::buildDone()
|
||||||
}
|
}
|
||||||
|
|
||||||
void flushLine() {
|
void flushLine() {
|
||||||
act.result(ResultType::PostBuildLogLine, currentLine);
|
act.result(resPostBuildLogLine, currentLine);
|
||||||
currentLine.clear();
|
currentLine.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2144,7 +2124,7 @@ void DerivationGoal::startBuilder()
|
||||||
/* Clean up the chroot directory automatically. */
|
/* Clean up the chroot directory automatically. */
|
||||||
autoDelChroot = std::make_shared<AutoDelete>(chrootRootDir);
|
autoDelChroot = std::make_shared<AutoDelete>(chrootRootDir);
|
||||||
|
|
||||||
printMsg(Verbosity::Chatty, format("setting up chroot environment in '%1%'") % chrootRootDir);
|
printMsg(lvlChatty, format("setting up chroot environment in '%1%'") % chrootRootDir);
|
||||||
|
|
||||||
if (mkdir(chrootRootDir.c_str(), 0750) == -1)
|
if (mkdir(chrootRootDir.c_str(), 0750) == -1)
|
||||||
throw SysError("cannot create '%1%'", chrootRootDir);
|
throw SysError("cannot create '%1%'", chrootRootDir);
|
||||||
|
@ -2253,7 +2233,7 @@ void DerivationGoal::startBuilder()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (useChroot && settings.preBuildHook != "" && dynamic_cast<Derivation *>(drv.get())) {
|
if (useChroot && settings.preBuildHook != "" && dynamic_cast<Derivation *>(drv.get())) {
|
||||||
printMsg(Verbosity::Chatty, format("executing pre-build hook '%1%'")
|
printMsg(lvlChatty, format("executing pre-build hook '%1%'")
|
||||||
% settings.preBuildHook);
|
% settings.preBuildHook);
|
||||||
auto args = useChroot ? Strings({worker.store.printStorePath(drvPath), chrootRootDir}) :
|
auto args = useChroot ? Strings({worker.store.printStorePath(drvPath), chrootRootDir}) :
|
||||||
Strings({ worker.store.printStorePath(drvPath) });
|
Strings({ worker.store.printStorePath(drvPath) });
|
||||||
|
@ -2294,7 +2274,7 @@ void DerivationGoal::startBuilder()
|
||||||
startDaemon();
|
startDaemon();
|
||||||
|
|
||||||
/* Run the builder. */
|
/* Run the builder. */
|
||||||
printMsg(Verbosity::Chatty, "executing builder '%1%'", drv->builder);
|
printMsg(lvlChatty, "executing builder '%1%'", drv->builder);
|
||||||
|
|
||||||
/* Create the log file. */
|
/* Create the log file. */
|
||||||
Path logFile = openLogFile();
|
Path logFile = openLogFile();
|
||||||
|
@ -2530,8 +2510,8 @@ void DerivationGoal::initTmpDir() {
|
||||||
if (passAsFile.find(i.first) == passAsFile.end()) {
|
if (passAsFile.find(i.first) == passAsFile.end()) {
|
||||||
env[i.first] = i.second;
|
env[i.first] = i.second;
|
||||||
} else {
|
} else {
|
||||||
auto hash = hashString(HashType::SHA256, i.first);
|
auto hash = hashString(htSHA256, i.first);
|
||||||
string fn = ".attr-" + hash.to_string(Base::Base32, false);
|
string fn = ".attr-" + hash.to_string(Base32, false);
|
||||||
Path p = tmpDir + "/" + fn;
|
Path p = tmpDir + "/" + fn;
|
||||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||||
chownToBuilder(p);
|
chownToBuilder(p);
|
||||||
|
@ -2777,7 +2757,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
{ throw Error("queryPathFromHashPart"); }
|
{ throw Error("queryPathFromHashPart"); }
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
||||||
{ throw Error("addToStore"); }
|
{ throw Error("addToStore"); }
|
||||||
|
|
||||||
|
@ -2790,7 +2770,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) override
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
||||||
{
|
{
|
||||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
||||||
goal.addDependency(path);
|
goal.addDependency(path);
|
||||||
|
@ -3765,7 +3745,7 @@ void DerivationGoal::registerOutputs()
|
||||||
worker.hashMismatch = true;
|
worker.hashMismatch = true;
|
||||||
delayedException = std::make_exception_ptr(
|
delayedException = std::make_exception_ptr(
|
||||||
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
||||||
worker.store.printStorePath(dest), h.to_string(Base::SRI, true), h2.to_string(Base::SRI, true)));
|
worker.store.printStorePath(dest), h.to_string(SRI, true), h2.to_string(SRI, true)));
|
||||||
|
|
||||||
Path actualDest = worker.store.Store::toRealPath(dest);
|
Path actualDest = worker.store.Store::toRealPath(dest);
|
||||||
|
|
||||||
|
@ -4209,7 +4189,7 @@ void DerivationGoal::flushLine()
|
||||||
logTail.push_back(currentLogLine);
|
logTail.push_back(currentLogLine);
|
||||||
if (logTail.size() > settings.logLines) logTail.pop_front();
|
if (logTail.size() > settings.logLines) logTail.pop_front();
|
||||||
|
|
||||||
act->result(ResultType::BuildLogLine, currentLogLine);
|
act->result(resBuildLogLine, currentLogLine);
|
||||||
}
|
}
|
||||||
|
|
||||||
currentLogLine = "";
|
currentLogLine = "";
|
||||||
|
@ -4236,7 +4216,7 @@ void DerivationGoal::addHashRewrite(const StorePath & path)
|
||||||
auto h1 = std::string(((std::string_view) path.to_string()).substr(0, 32));
|
auto h1 = std::string(((std::string_view) path.to_string()).substr(0, 32));
|
||||||
auto p = worker.store.makeStorePath(
|
auto p = worker.store.makeStorePath(
|
||||||
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
|
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
|
||||||
Hash(HashType::SHA256), path.name());
|
Hash(htSHA256), path.name());
|
||||||
auto h2 = std::string(((std::string_view) p.to_string()).substr(0, 32));
|
auto h2 = std::string(((std::string_view) p.to_string()).substr(0, 32));
|
||||||
deletePath(worker.store.printStorePath(p));
|
deletePath(worker.store.printStorePath(p));
|
||||||
inputRewrites[h1] = h2;
|
inputRewrites[h1] = h2;
|
||||||
|
@ -4250,7 +4230,7 @@ void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex)
|
||||||
result.status = status;
|
result.status = status;
|
||||||
if (ex)
|
if (ex)
|
||||||
result.errorMsg = ex->what();
|
result.errorMsg = ex->what();
|
||||||
amDone(result.success() ? ExitCode::Success : ExitCode::Failed, ex);
|
amDone(result.success() ? ecSuccess : ecFailed, ex);
|
||||||
if (result.status == BuildResult::TimedOut)
|
if (result.status == BuildResult::TimedOut)
|
||||||
worker.timedOut = true;
|
worker.timedOut = true;
|
||||||
if (result.status == BuildResult::PermanentFailure)
|
if (result.status == BuildResult::PermanentFailure)
|
||||||
|
@ -4386,7 +4366,7 @@ void SubstitutionGoal::init()
|
||||||
|
|
||||||
/* If the path already exists we're done. */
|
/* If the path already exists we're done. */
|
||||||
if (!repair && worker.store.isValidPath(storePath)) {
|
if (!repair && worker.store.isValidPath(storePath)) {
|
||||||
amDone(ExitCode::Success);
|
amDone(ecSuccess);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4411,7 +4391,7 @@ void SubstitutionGoal::tryNext()
|
||||||
/* Hack: don't indicate failure if there were no substituters.
|
/* Hack: don't indicate failure if there were no substituters.
|
||||||
In that case the calling derivation should just do a
|
In that case the calling derivation should just do a
|
||||||
build. */
|
build. */
|
||||||
amDone(substituterFailed ? ExitCode::Failed : ExitCode::NoSubstituters);
|
amDone(substituterFailed ? ecFailed : ecNoSubstituters);
|
||||||
|
|
||||||
if (substituterFailed) {
|
if (substituterFailed) {
|
||||||
worker.failedSubstitutions++;
|
worker.failedSubstitutions++;
|
||||||
|
@ -4497,7 +4477,7 @@ void SubstitutionGoal::referencesValid()
|
||||||
|
|
||||||
if (nrFailed > 0) {
|
if (nrFailed > 0) {
|
||||||
debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath));
|
debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath));
|
||||||
amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ExitCode::IncompleteClosure : ExitCode::Failed);
|
amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4535,7 +4515,7 @@ void SubstitutionGoal::tryToRun()
|
||||||
/* Wake up the worker loop when we're done. */
|
/* Wake up the worker loop when we're done. */
|
||||||
Finally updateStats([this]() { outPipe.writeSide = -1; });
|
Finally updateStats([this]() { outPipe.writeSide = -1; });
|
||||||
|
|
||||||
Activity act(*logger, ActivityType::Substitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
|
Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
|
||||||
PushActivity pact(act.id);
|
PushActivity pact(act.id);
|
||||||
|
|
||||||
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
|
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
|
||||||
|
@ -4584,7 +4564,7 @@ void SubstitutionGoal::finished()
|
||||||
|
|
||||||
worker.markContentsGood(storePath);
|
worker.markContentsGood(storePath);
|
||||||
|
|
||||||
printMsg(Verbosity::Chatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath));
|
printMsg(lvlChatty, "substitution of path '%s' succeeded", worker.store.printStorePath(storePath));
|
||||||
|
|
||||||
maintainRunningSubstitutions.reset();
|
maintainRunningSubstitutions.reset();
|
||||||
|
|
||||||
|
@ -4602,7 +4582,7 @@ void SubstitutionGoal::finished()
|
||||||
|
|
||||||
worker.updateProgress();
|
worker.updateProgress();
|
||||||
|
|
||||||
amDone(ExitCode::Success);
|
amDone(ecSuccess);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -4620,9 +4600,9 @@ void SubstitutionGoal::handleEOF(int fd)
|
||||||
|
|
||||||
|
|
||||||
Worker::Worker(LocalStore & store)
|
Worker::Worker(LocalStore & store)
|
||||||
: act(*logger, ActivityType::Realise)
|
: act(*logger, actRealise)
|
||||||
, actDerivations(*logger, ActivityType::Builds)
|
, actDerivations(*logger, actBuilds)
|
||||||
, actSubstitutions(*logger, ActivityType::CopyPaths)
|
, actSubstitutions(*logger, actCopyPaths)
|
||||||
, store(store)
|
, store(store)
|
||||||
{
|
{
|
||||||
/* Debugging: prevent recursive workers. */
|
/* Debugging: prevent recursive workers. */
|
||||||
|
@ -4706,7 +4686,7 @@ void Worker::removeGoal(GoalPtr goal)
|
||||||
topGoals.erase(goal);
|
topGoals.erase(goal);
|
||||||
/* If a top-level goal failed, then kill all other goals
|
/* If a top-level goal failed, then kill all other goals
|
||||||
(unless keepGoing was set). */
|
(unless keepGoing was set). */
|
||||||
if (goal->exitCode == Goal::ExitCode::Failed && !settings.keepGoing)
|
if (goal->exitCode == Goal::ecFailed && !settings.keepGoing)
|
||||||
topGoals.clear();
|
topGoals.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4849,7 +4829,7 @@ void Worker::run(const Goals & _topGoals)
|
||||||
|
|
||||||
void Worker::waitForInput()
|
void Worker::waitForInput()
|
||||||
{
|
{
|
||||||
printMsg(Verbosity::Vomit, "waiting for children");
|
printMsg(lvlVomit, "waiting for children");
|
||||||
|
|
||||||
/* Process output from the file descriptors attached to the
|
/* Process output from the file descriptors attached to the
|
||||||
children, namely log output and output path creation commands.
|
children, namely log output and output path creation commands.
|
||||||
|
@ -4939,7 +4919,7 @@ void Worker::waitForInput()
|
||||||
if (errno != EINTR)
|
if (errno != EINTR)
|
||||||
throw SysError("%s: read failed", goal->getName());
|
throw SysError("%s: read failed", goal->getName());
|
||||||
} else {
|
} else {
|
||||||
printMsg(Verbosity::Vomit, "%1%: read %2% bytes",
|
printMsg(lvlVomit, "%1%: read %2% bytes",
|
||||||
goal->getName(), rd);
|
goal->getName(), rd);
|
||||||
string data((char *) buffer.data(), rd);
|
string data((char *) buffer.data(), rd);
|
||||||
j->lastOutput = after;
|
j->lastOutput = after;
|
||||||
|
@ -4948,7 +4928,7 @@ void Worker::waitForInput()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (goal->exitCode == Goal::ExitCode::Busy &&
|
if (goal->exitCode == Goal::ecBusy &&
|
||||||
0 != settings.maxSilentTime &&
|
0 != settings.maxSilentTime &&
|
||||||
j->respectTimeouts &&
|
j->respectTimeouts &&
|
||||||
after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime))
|
after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime))
|
||||||
|
@ -4958,7 +4938,7 @@ void Worker::waitForInput()
|
||||||
goal->getName(), settings.maxSilentTime));
|
goal->getName(), settings.maxSilentTime));
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (goal->exitCode == Goal::ExitCode::Busy &&
|
else if (goal->exitCode == Goal::ecBusy &&
|
||||||
0 != settings.buildTimeout &&
|
0 != settings.buildTimeout &&
|
||||||
j->respectTimeouts &&
|
j->respectTimeouts &&
|
||||||
after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout))
|
after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout))
|
||||||
|
@ -5019,7 +4999,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
||||||
Hash nullHash(HashType::SHA256);
|
Hash nullHash(htSHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
pathContentsGoodCache.insert_or_assign(path, res);
|
pathContentsGoodCache.insert_or_assign(path, res);
|
||||||
|
@ -5079,7 +5059,7 @@ void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths,
|
||||||
else
|
else
|
||||||
ex = i->ex;
|
ex = i->ex;
|
||||||
}
|
}
|
||||||
if (i->exitCode != Goal::ExitCode::Success) {
|
if (i->exitCode != Goal::ecSuccess) {
|
||||||
DerivationGoal * i2 = dynamic_cast<DerivationGoal *>(i.get());
|
DerivationGoal * i2 = dynamic_cast<DerivationGoal *>(i.get());
|
||||||
if (i2) failed.insert(i2->getDrvPath());
|
if (i2) failed.insert(i2->getDrvPath());
|
||||||
else failed.insert(dynamic_cast<SubstitutionGoal *>(i.get())->getStorePath());
|
else failed.insert(dynamic_cast<SubstitutionGoal *>(i.get())->getStorePath());
|
||||||
|
@ -5128,7 +5108,7 @@ void LocalStore::ensurePath(const StorePath & path)
|
||||||
|
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
|
|
||||||
if (goal->exitCode != Goal::ExitCode::Success) {
|
if (goal->exitCode != Goal::ecSuccess) {
|
||||||
if (goal->ex) {
|
if (goal->ex) {
|
||||||
goal->ex->status = worker.exitStatus();
|
goal->ex->status = worker.exitStatus();
|
||||||
throw *goal->ex;
|
throw *goal->ex;
|
||||||
|
@ -5146,7 +5126,7 @@ void LocalStore::repairPath(const StorePath & path)
|
||||||
|
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
|
|
||||||
if (goal->exitCode != Goal::ExitCode::Success) {
|
if (goal->exitCode != Goal::ecSuccess) {
|
||||||
/* Since substituting the path didn't work, if we have a valid
|
/* Since substituting the path didn't work, if we have a valid
|
||||||
deriver, then rebuild the deriver. */
|
deriver, then rebuild the deriver. */
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
|
|
|
@ -65,7 +65,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||||
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
auto ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
|
||||||
auto h = Hash(getAttr("outputHash"), ht);
|
auto h = Hash(getAttr("outputHash"), ht);
|
||||||
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base::Base16, false));
|
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
|
||||||
return;
|
return;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
debug(e.what());
|
debug(e.what());
|
||||||
|
|
|
@ -126,13 +126,7 @@ struct TunnelLogger : public Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_START_ACTIVITY
|
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
|
||||||
<< act
|
|
||||||
<< (uint64_t) lvl
|
|
||||||
<< (uint64_t) type
|
|
||||||
<< s
|
|
||||||
<< fields
|
|
||||||
<< parent;
|
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(*buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,10 +142,7 @@ struct TunnelLogger : public Logger
|
||||||
{
|
{
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_RESULT
|
buf << STDERR_RESULT << act << type << fields;
|
||||||
<< act
|
|
||||||
<< (uint64_t) type
|
|
||||||
<< fields;
|
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(*buf.s);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -323,7 +314,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto hash = store->queryPathInfo(path)->narHash;
|
auto hash = store->queryPathInfo(path)->narHash;
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << hash.to_string(Base::Base16, false);
|
to << hash.to_string(Base16, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -572,7 +563,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
clientSettings.maxBuildJobs = readInt(from);
|
clientSettings.maxBuildJobs = readInt(from);
|
||||||
clientSettings.maxSilentTime = readInt(from);
|
clientSettings.maxSilentTime = readInt(from);
|
||||||
readInt(from); // obsolete useBuildHook
|
readInt(from); // obsolete useBuildHook
|
||||||
clientSettings.verboseBuild = Verbosity::Error == (Verbosity) readInt(from);
|
clientSettings.verboseBuild = lvlError == (Verbosity) readInt(from);
|
||||||
readInt(from); // obsolete logType
|
readInt(from); // obsolete logType
|
||||||
readInt(from); // obsolete printBuildTrace
|
readInt(from); // obsolete printBuildTrace
|
||||||
clientSettings.buildCores = readInt(from);
|
clientSettings.buildCores = readInt(from);
|
||||||
|
@ -655,7 +646,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||||
to << 1;
|
to << 1;
|
||||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||||
<< info->narHash.to_string(Base::Base16, false);
|
<< info->narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*store, to, info->references);
|
writeStorePaths(*store, to, info->references);
|
||||||
to << info->registrationTime << info->narSize;
|
to << info->registrationTime << info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
|
@ -715,7 +706,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
auto deriver = readString(from);
|
auto deriver = readString(from);
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash(readString(from), HashType::SHA256);
|
info.narHash = Hash(readString(from), htSHA256);
|
||||||
info.references = readStorePaths<StorePathSet>(*store, from);
|
info.references = readStorePaths<StorePathSet>(*store, from);
|
||||||
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
from >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(from);
|
info.sigs = readStrings<StringSet>(from);
|
||||||
|
@ -798,7 +789,7 @@ void processConnection(
|
||||||
|
|
||||||
Finally finally([&]() {
|
Finally finally([&]() {
|
||||||
_isInterrupted = false;
|
_isInterrupted = false;
|
||||||
prevLogger->log(Verbosity::Debug, fmt("%d operations", opCount));
|
prevLogger->log(lvlDebug, fmt("%d operations", opCount));
|
||||||
});
|
});
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {
|
||||||
|
|
|
@ -350,7 +350,7 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
|
||||||
/* Return a fixed hash for fixed-output derivations. */
|
/* Return a fixed hash for fixed-output derivations. */
|
||||||
if (drv.isFixedOutput()) {
|
if (drv.isFixedOutput()) {
|
||||||
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||||
return hashString(HashType::SHA256, "fixed:out:"
|
return hashString(htSHA256, "fixed:out:"
|
||||||
+ i->second.hashAlgo + ":"
|
+ i->second.hashAlgo + ":"
|
||||||
+ i->second.hash + ":"
|
+ i->second.hash + ":"
|
||||||
+ store.printStorePath(i->second.path));
|
+ store.printStorePath(i->second.path));
|
||||||
|
@ -366,10 +366,10 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
|
||||||
h = drvHashes.insert_or_assign(i.first, hashDerivationModulo(store,
|
h = drvHashes.insert_or_assign(i.first, hashDerivationModulo(store,
|
||||||
store.readDerivation(i.first), false)).first;
|
store.readDerivation(i.first), false)).first;
|
||||||
}
|
}
|
||||||
inputs2.insert_or_assign(h->second.to_string(Base::Base16, false), i.second);
|
inputs2.insert_or_assign(h->second.to_string(Base16, false), i.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
return hashString(HashType::SHA256, drv.unparse(store, maskOutputs, &inputs2));
|
return hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -452,7 +452,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
std::string hashPlaceholder(const std::string & outputName)
|
std::string hashPlaceholder(const std::string & outputName)
|
||||||
{
|
{
|
||||||
// FIXME: memoize?
|
// FIXME: memoize?
|
||||||
return "/" + hashString(HashType::SHA256, "nix-output:" + outputName).to_string(Base::Base32, false);
|
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ struct HashAndWriteSink : Sink
|
||||||
{
|
{
|
||||||
Sink & writeSink;
|
Sink & writeSink;
|
||||||
HashSink hashSink;
|
HashSink hashSink;
|
||||||
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(HashType::SHA256)
|
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
virtual void operator () (const unsigned char * data, size_t len)
|
virtual void operator () (const unsigned char * data, size_t len)
|
||||||
|
@ -34,7 +34,7 @@ void Store::exportPaths(const StorePathSet & paths, Sink & sink)
|
||||||
//logger->incExpected(doneLabel, sorted.size());
|
//logger->incExpected(doneLabel, sorted.size());
|
||||||
|
|
||||||
for (auto & path : sorted) {
|
for (auto & path : sorted) {
|
||||||
//Activity act(*logger, Verbosity::Info, format("exporting path '%s'") % path);
|
//Activity act(*logger, lvlInfo, format("exporting path '%s'") % path);
|
||||||
sink << 1;
|
sink << 1;
|
||||||
exportPath(path, sink);
|
exportPath(path, sink);
|
||||||
//logger->incProgress(doneLabel);
|
//logger->incProgress(doneLabel);
|
||||||
|
@ -57,7 +57,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
Hash hash = hashAndWriteSink.currentHash();
|
Hash hash = hashAndWriteSink.currentHash();
|
||||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash.to_string(Base::Base32, true), hash.to_string(Base::Base32, true));
|
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
hashAndWriteSink
|
hashAndWriteSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
@ -86,7 +86,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
|
||||||
|
|
||||||
ValidPathInfo info(parseStorePath(readString(source)));
|
ValidPathInfo info(parseStorePath(readString(source)));
|
||||||
|
|
||||||
//Activity act(*logger, Verbosity::Info, format("importing path '%s'") % info.path);
|
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
|
||||||
|
|
||||||
info.references = readStorePaths<StorePathSet>(*this, source);
|
info.references = readStorePaths<StorePathSet>(*this, source);
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
|
|
||||||
info.narHash = hashString(HashType::SHA256, *tee.source.data);
|
info.narHash = hashString(htSHA256, *tee.source.data);
|
||||||
info.narSize = tee.source.data->size();
|
info.narSize = tee.source.data->size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
|
|
|
@ -89,7 +89,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
Callback<FileTransferResult> && callback)
|
Callback<FileTransferResult> && callback)
|
||||||
: fileTransfer(fileTransfer)
|
: fileTransfer(fileTransfer)
|
||||||
, request(request)
|
, request(request)
|
||||||
, act(*logger, Verbosity::Talkative, ActivityType::Download,
|
, act(*logger, lvlTalkative, actFileTransfer,
|
||||||
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
|
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
|
||||||
{request.uri}, request.parentAct)
|
{request.uri}, request.parentAct)
|
||||||
, callback(std::move(callback))
|
, callback(std::move(callback))
|
||||||
|
@ -174,7 +174,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
{
|
{
|
||||||
size_t realSize = size * nmemb;
|
size_t realSize = size * nmemb;
|
||||||
std::string line((char *) contents, realSize);
|
std::string line((char *) contents, realSize);
|
||||||
printMsg(Verbosity::Vomit, format("got header for '%s': %s") % request.uri % trim(line));
|
printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
|
||||||
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
|
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
|
||||||
result.etag = "";
|
result.etag = "";
|
||||||
auto ss = tokenizeString<vector<string>>(line, " ");
|
auto ss = tokenizeString<vector<string>>(line, " ");
|
||||||
|
@ -257,7 +257,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
curl_easy_reset(req);
|
curl_easy_reset(req);
|
||||||
|
|
||||||
if (verbosity >= Verbosity::Vomit) {
|
if (verbosity >= lvlVomit) {
|
||||||
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
|
curl_easy_setopt(req, CURLOPT_VERBOSE, 1);
|
||||||
curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
|
curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,7 @@ void LocalStore::syncWithGC()
|
||||||
|
|
||||||
void LocalStore::addIndirectRoot(const Path & path)
|
void LocalStore::addIndirectRoot(const Path & path)
|
||||||
{
|
{
|
||||||
string hash = hashString(HashType::SHA1, path).to_string(Base::Base32, false);
|
string hash = hashString(htSHA1, path).to_string(Base32, false);
|
||||||
Path realRoot = canonPath((format("%1%/%2%/auto/%3%")
|
Path realRoot = canonPath((format("%1%/%2%/auto/%3%")
|
||||||
% stateDir % gcRootsDir % hash).str());
|
% stateDir % gcRootsDir % hash).str());
|
||||||
makeSymlink(realRoot, path);
|
makeSymlink(realRoot, path);
|
||||||
|
@ -639,7 +639,7 @@ void LocalStore::tryToDelete(GCState & state, const Path & path)
|
||||||
auto realPath = realStoreDir + "/" + std::string(baseNameOf(path));
|
auto realPath = realStoreDir + "/" + std::string(baseNameOf(path));
|
||||||
if (realPath == linksDir || realPath == trashDir) return;
|
if (realPath == linksDir || realPath == trashDir) return;
|
||||||
|
|
||||||
//Activity act(*logger, Verbosity::Debug, format("considering whether to delete '%1%'") % path);
|
//Activity act(*logger, lvlDebug, format("considering whether to delete '%1%'") % path);
|
||||||
|
|
||||||
auto storePath = maybeParseStorePath(path);
|
auto storePath = maybeParseStorePath(path);
|
||||||
|
|
||||||
|
@ -704,7 +704,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
printMsg(Verbosity::Talkative, format("deleting unused link '%1%'") % path);
|
printMsg(lvlTalkative, format("deleting unused link '%1%'") % path);
|
||||||
|
|
||||||
if (unlink(path.c_str()) == -1)
|
if (unlink(path.c_str()) == -1)
|
||||||
throw SysError("deleting '%1%'", path);
|
throw SysError("deleting '%1%'", path);
|
||||||
|
|
|
@ -139,7 +139,7 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdAddToStoreNar
|
<< cmdAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base::Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
|
|
|
@ -584,7 +584,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
|
|
||||||
state.stmtRegisterValidPath.use()
|
state.stmtRegisterValidPath.use()
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
(info.narHash.to_string(Base::Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
|
@ -684,7 +684,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
state.stmtUpdatePathInfo.use()
|
state.stmtUpdatePathInfo.use()
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.narHash.to_string(Base::Base16, true))
|
(info.narHash.to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(info.ca, !info.ca.empty())
|
(info.ca, !info.ca.empty())
|
||||||
|
@ -895,7 +895,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
assert(i.narHash.type == HashType::SHA256);
|
assert(i.narHash.type == htSHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -992,9 +992,9 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
of the NAR. */
|
of the NAR. */
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (info.ca == "" || !info.references.count(info.path))
|
if (info.ca == "" || !info.references.count(info.path))
|
||||||
hashSink = std::make_unique<HashSink>(HashType::SHA256);
|
hashSink = std::make_unique<HashSink>(htSHA256);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(HashType::SHA256, std::string(info.path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
|
||||||
|
|
||||||
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
|
LambdaSource wrapperSource([&](unsigned char * data, size_t len) -> size_t {
|
||||||
size_t n = source.read(data, len);
|
size_t n = source.read(data, len);
|
||||||
|
@ -1008,7 +1008,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash.to_string(Base::Base32, true), hashResult.first.to_string(Base::Base32, true));
|
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
|
@ -1067,10 +1067,10 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
|
||||||
sha256); otherwise, compute it here. */
|
sha256); otherwise, compute it here. */
|
||||||
HashResult hash;
|
HashResult hash;
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
hash.first = hashAlgo == HashType::SHA256 ? h : hashString(HashType::SHA256, dump);
|
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
|
||||||
hash.second = dump.size();
|
hash.second = dump.size();
|
||||||
} else
|
} else
|
||||||
hash = hashPath(HashType::SHA256, realPath);
|
hash = hashPath(htSHA256, realPath);
|
||||||
|
|
||||||
optimisePath(realPath); // FIXME: combine with hashPath()
|
optimisePath(realPath); // FIXME: combine with hashPath()
|
||||||
|
|
||||||
|
@ -1109,7 +1109,7 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
const StorePathSet & references, RepairFlag repair)
|
const StorePathSet & references, RepairFlag repair)
|
||||||
{
|
{
|
||||||
auto hash = hashString(HashType::SHA256, s);
|
auto hash = hashString(htSHA256, s);
|
||||||
auto dstPath = makeTextPath(name, hash, references);
|
auto dstPath = makeTextPath(name, hash, references);
|
||||||
|
|
||||||
addTempRoot(dstPath);
|
addTempRoot(dstPath);
|
||||||
|
@ -1133,7 +1133,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
auto narHash = hashString(HashType::SHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
|
@ -1141,7 +1141,7 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.ca = "text:" + hash.to_string(Base::Base32, true);
|
info.ca = "text:" + hash.to_string(Base32, true);
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1219,9 +1219,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
printInfo("checking link hashes...");
|
printInfo("checking link hashes...");
|
||||||
|
|
||||||
for (auto & link : readDirectory(linksDir)) {
|
for (auto & link : readDirectory(linksDir)) {
|
||||||
printMsg(Verbosity::Talkative, "checking contents of '%s'", link.name);
|
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||||
Path linkPath = linksDir + "/" + link.name;
|
Path linkPath = linksDir + "/" + link.name;
|
||||||
string hash = hashPath(HashType::SHA256, linkPath).first.to_string(Base::Base32, false);
|
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
|
||||||
if (hash != link.name) {
|
if (hash != link.name) {
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash",
|
.name = "Invalid hash",
|
||||||
|
@ -1242,14 +1242,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
printInfo("checking store hashes...");
|
printInfo("checking store hashes...");
|
||||||
|
|
||||||
Hash nullHash(HashType::SHA256);
|
Hash nullHash(htSHA256);
|
||||||
|
|
||||||
for (auto & i : validPaths) {
|
for (auto & i : validPaths) {
|
||||||
try {
|
try {
|
||||||
auto info = std::const_pointer_cast<ValidPathInfo>(std::shared_ptr<const ValidPathInfo>(queryPathInfo(i)));
|
auto info = std::const_pointer_cast<ValidPathInfo>(std::shared_ptr<const ValidPathInfo>(queryPathInfo(i)));
|
||||||
|
|
||||||
/* Check the content hash (optionally - slow). */
|
/* Check the content hash (optionally - slow). */
|
||||||
printMsg(Verbosity::Talkative, "checking contents of '%s'", printStorePath(i));
|
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (info->ca == "" || !info->references.count(info->path))
|
if (info->ca == "" || !info->references.count(info->path))
|
||||||
|
@ -1264,7 +1264,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash - path modified",
|
.name = "Invalid hash - path modified",
|
||||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash.to_string(Base::Base32, true), current.first.to_string(Base::Base32, true))
|
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -155,7 +155,7 @@ public:
|
||||||
true) or simply the contents of a regular file (if recursive ==
|
true) or simply the contents of a regular file (if recursive ==
|
||||||
false). */
|
false). */
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair) override;
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
const StorePathSet & references, RepairFlag repair) override;
|
const StorePathSet & references, RepairFlag repair) override;
|
||||||
|
|
|
@ -112,7 +112,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
|
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
|
||||||
unsigned long long & downloadSize_, unsigned long long & narSize_)
|
unsigned long long & downloadSize_, unsigned long long & narSize_)
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Debug, ActivityType::Unknown, "querying info about missing paths");
|
Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths");
|
||||||
|
|
||||||
downloadSize_ = narSize_ = 0;
|
downloadSize_ = narSize_ = 0;
|
||||||
|
|
||||||
|
|
|
@ -230,9 +230,9 @@ public:
|
||||||
(std::string(info->path.name()))
|
(std::string(info->path.name()))
|
||||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base::Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash.to_string(Base::Base32, true))
|
(info->narHash.to_string(Base32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -86,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
res += "URL: " + url + "\n";
|
res += "URL: " + url + "\n";
|
||||||
assert(compression != "");
|
assert(compression != "");
|
||||||
res += "Compression: " + compression + "\n";
|
res += "Compression: " + compression + "\n";
|
||||||
assert(fileHash.type == HashType::SHA256);
|
assert(fileHash.type == htSHA256);
|
||||||
res += "FileHash: " + fileHash.to_string(Base::Base32, true) + "\n";
|
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash.type == HashType::SHA256);
|
assert(narHash.type == htSHA256);
|
||||||
res += "NarHash: " + narHash.to_string(Base::Base32, true) + "\n";
|
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -57,7 +57,7 @@ LocalStore::InodeHash LocalStore::loadInodeHash()
|
||||||
}
|
}
|
||||||
if (errno) throw SysError("reading directory '%1%'", linksDir);
|
if (errno) throw SysError("reading directory '%1%'", linksDir);
|
||||||
|
|
||||||
printMsg(Verbosity::Talkative, format("loaded %1% hash inodes") % inodeHash.size());
|
printMsg(lvlTalkative, format("loaded %1% hash inodes") % inodeHash.size());
|
||||||
|
|
||||||
return inodeHash;
|
return inodeHash;
|
||||||
}
|
}
|
||||||
|
@ -152,11 +152,11 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
Also note that if `path' is a symlink, then we're hashing the
|
Also note that if `path' is a symlink, then we're hashing the
|
||||||
contents of the symlink (i.e. the result of readlink()), not
|
contents of the symlink (i.e. the result of readlink()), not
|
||||||
the contents of the target (which may not even exist). */
|
the contents of the target (which may not even exist). */
|
||||||
Hash hash = hashPath(HashType::SHA256, path).first;
|
Hash hash = hashPath(htSHA256, path).first;
|
||||||
debug(format("'%1%' has hash '%2%'") % path % hash.to_string(Base::Base32, true));
|
debug(format("'%1%' has hash '%2%'") % path % hash.to_string(Base32, true));
|
||||||
|
|
||||||
/* Check if this is a known hash. */
|
/* Check if this is a known hash. */
|
||||||
Path linkPath = linksDir + "/" + hash.to_string(Base::Base32, false);
|
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
|
||||||
|
|
||||||
retry:
|
retry:
|
||||||
if (!pathExists(linkPath)) {
|
if (!pathExists(linkPath)) {
|
||||||
|
@ -205,7 +205,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
goto retry;
|
goto retry;
|
||||||
}
|
}
|
||||||
|
|
||||||
printMsg(Verbosity::Talkative, format("linking '%1%' to '%2%'") % path % linkPath);
|
printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath);
|
||||||
|
|
||||||
/* Make the containing directory writable, but only if it's not
|
/* Make the containing directory writable, but only if it's not
|
||||||
the store itself (we don't want or need to mess with its
|
the store itself (we don't want or need to mess with its
|
||||||
|
@ -255,13 +255,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
stats.blocksFreed += st.st_blocks;
|
stats.blocksFreed += st.st_blocks;
|
||||||
|
|
||||||
if (act)
|
if (act)
|
||||||
act->result(ResultType::FileLinked, st.st_size, st.st_blocks);
|
act->result(resFileLinked, st.st_size, st.st_blocks);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::optimiseStore(OptimiseStats & stats)
|
void LocalStore::optimiseStore(OptimiseStats & stats)
|
||||||
{
|
{
|
||||||
Activity act(*logger, ActivityType::OptimiseStore);
|
Activity act(*logger, actOptimiseStore);
|
||||||
|
|
||||||
auto paths = queryAllValidPaths();
|
auto paths = queryAllValidPaths();
|
||||||
InodeHash inodeHash = loadInodeHash();
|
InodeHash inodeHash = loadInodeHash();
|
||||||
|
@ -274,7 +274,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
|
||||||
addTempRoot(i);
|
addTempRoot(i);
|
||||||
if (!isValidPath(i)) continue; /* path was GC'ed, probably */
|
if (!isValidPath(i)) continue; /* path was GC'ed, probably */
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("optimising path '%s'", printStorePath(i)));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i)));
|
||||||
optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash);
|
optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash);
|
||||||
}
|
}
|
||||||
done++;
|
done++;
|
||||||
|
|
|
@ -31,7 +31,7 @@ StorePath::StorePath(std::string_view _baseName)
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
StorePath::StorePath(const Hash & hash, std::string_view _name)
|
||||||
: baseName((hash.to_string(Base::Base32, false) + "-").append(std::string(_name)))
|
: baseName((hash.to_string(Base32, false) + "-").append(std::string(_name)))
|
||||||
{
|
{
|
||||||
checkName(baseName, name());
|
checkName(baseName, name());
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ struct RefScanSink : Sink
|
||||||
|
|
||||||
string tail;
|
string tail;
|
||||||
|
|
||||||
RefScanSink() : hashSink(HashType::SHA256) { }
|
RefScanSink() : hashSink(htSHA256) { }
|
||||||
|
|
||||||
void operator () (const unsigned char * data, size_t len);
|
void operator () (const unsigned char * data, size_t len);
|
||||||
};
|
};
|
||||||
|
@ -96,7 +96,7 @@ PathSet scanForReferences(const string & path,
|
||||||
string s = string(baseName, 0, pos);
|
string s = string(baseName, 0, pos);
|
||||||
assert(s.size() == refLength);
|
assert(s.size() == refLength);
|
||||||
assert(backMap.find(s) == backMap.end());
|
assert(backMap.find(s) == backMap.end());
|
||||||
// parseHash(HashType::SHA256, s);
|
// parseHash(htSHA256, s);
|
||||||
sink.hashes.insert(s);
|
sink.hashes.insert(s);
|
||||||
backMap[s] = i;
|
backMap[s] = i;
|
||||||
}
|
}
|
||||||
|
|
|
@ -177,11 +177,11 @@ void RemoteStore::setOptions(Connection & conn)
|
||||||
<< settings.keepFailed
|
<< settings.keepFailed
|
||||||
<< settings.keepGoing
|
<< settings.keepGoing
|
||||||
<< settings.tryFallback
|
<< settings.tryFallback
|
||||||
<< (uint64_t) verbosity
|
<< verbosity
|
||||||
<< settings.maxBuildJobs
|
<< settings.maxBuildJobs
|
||||||
<< settings.maxSilentTime
|
<< settings.maxSilentTime
|
||||||
<< true
|
<< true
|
||||||
<< (uint64_t) (settings.verboseBuild ? Verbosity::Error : Verbosity::Vomit)
|
<< (settings.verboseBuild ? lvlError : lvlVomit)
|
||||||
<< 0 // obsolete log type
|
<< 0 // obsolete log type
|
||||||
<< 0 /* obsolete print build trace */
|
<< 0 /* obsolete print build trace */
|
||||||
<< settings.buildCores
|
<< settings.buildCores
|
||||||
|
@ -375,7 +375,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||||
info = std::make_shared<ValidPathInfo>(StorePath(path));
|
info = std::make_shared<ValidPathInfo>(StorePath(path));
|
||||||
auto deriver = readString(conn->from);
|
auto deriver = readString(conn->from);
|
||||||
if (deriver != "") info->deriver = parseStorePath(deriver);
|
if (deriver != "") info->deriver = parseStorePath(deriver);
|
||||||
info->narHash = Hash(readString(conn->from), HashType::SHA256);
|
info->narHash = Hash(readString(conn->from), htSHA256);
|
||||||
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
info->references = readStorePaths<StorePathSet>(*this, conn->from);
|
||||||
conn->from >> info->registrationTime >> info->narSize;
|
conn->from >> info->registrationTime >> info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
|
||||||
|
@ -462,7 +462,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
conn->to << wopAddToStoreNar
|
conn->to << wopAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base::Base16, false);
|
<< info.narHash.to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << info.ca
|
<< info.ultimate << info.sigs << info.ca
|
||||||
|
@ -486,7 +486,7 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
conn->to
|
conn->to
|
||||||
<< wopAddToStore
|
<< wopAddToStore
|
||||||
<< name
|
<< name
|
||||||
<< ((hashAlgo == HashType::SHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
|
||||||
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
|
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
|
||||||
<< printHashType(hashAlgo);
|
<< printHashType(hashAlgo);
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ public:
|
||||||
std::shared_ptr<FSAccessor> accessor) override;
|
std::shared_ptr<FSAccessor> accessor) override;
|
||||||
|
|
||||||
StorePath addToStore(const string & name, const Path & srcPath,
|
StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
|
|
|
@ -70,9 +70,9 @@ static void initAWS()
|
||||||
shared.cc), so don't let aws-sdk-cpp override it. */
|
shared.cc), so don't let aws-sdk-cpp override it. */
|
||||||
options.cryptoOptions.initAndCleanupOpenSSL = false;
|
options.cryptoOptions.initAndCleanupOpenSSL = false;
|
||||||
|
|
||||||
if (verbosity >= Verbosity::Debug) {
|
if (verbosity >= lvlDebug) {
|
||||||
options.loggingOptions.logLevel =
|
options.loggingOptions.logLevel =
|
||||||
verbosity == Verbosity::Debug
|
verbosity == lvlDebug
|
||||||
? Aws::Utils::Logging::LogLevel::Debug
|
? Aws::Utils::Logging::LogLevel::Debug
|
||||||
: Aws::Utils::Logging::LogLevel::Trace;
|
: Aws::Utils::Logging::LogLevel::Trace;
|
||||||
options.loggingOptions.logger_create_fn = [options]() {
|
options.loggingOptions.logger_create_fn = [options]() {
|
||||||
|
|
|
@ -58,7 +58,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
||||||
addCommonSSHOpts(args);
|
addCommonSSHOpts(args);
|
||||||
if (socketPath != "")
|
if (socketPath != "")
|
||||||
args.insert(args.end(), {"-S", socketPath});
|
args.insert(args.end(), {"-S", socketPath});
|
||||||
if (verbosity >= Verbosity::Chatty)
|
if (verbosity >= lvlChatty)
|
||||||
args.push_back("-v");
|
args.push_back("-v");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ Path SSHMaster::startMaster()
|
||||||
, "-o", "LocalCommand=echo started"
|
, "-o", "LocalCommand=echo started"
|
||||||
, "-o", "PermitLocalCommand=yes"
|
, "-o", "PermitLocalCommand=yes"
|
||||||
};
|
};
|
||||||
if (verbosity >= Verbosity::Chatty)
|
if (verbosity >= lvlChatty)
|
||||||
args.push_back("-v");
|
args.push_back("-v");
|
||||||
addCommonSSHOpts(args);
|
addCommonSSHOpts(args);
|
||||||
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
|
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
|
||||||
|
|
|
@ -143,8 +143,8 @@ StorePath Store::makeStorePath(const string & type,
|
||||||
const Hash & hash, std::string_view name) const
|
const Hash & hash, std::string_view name) const
|
||||||
{
|
{
|
||||||
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
||||||
string s = type + ":" + hash.to_string(Base::Base16, true) + ":" + storeDir + ":" + std::string(name);
|
string s = type + ":" + hash.to_string(Base16, true) + ":" + storeDir + ":" + std::string(name);
|
||||||
auto h = compressHash(hashString(HashType::SHA256, s), 20);
|
auto h = compressHash(hashString(htSHA256, s), 20);
|
||||||
return StorePath(h, name);
|
return StorePath(h, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -179,15 +179,15 @@ StorePath Store::makeFixedOutputPath(
|
||||||
const StorePathSet & references,
|
const StorePathSet & references,
|
||||||
bool hasSelfReference) const
|
bool hasSelfReference) const
|
||||||
{
|
{
|
||||||
if (hash.type == HashType::SHA256 && recursive == FileIngestionMethod::Recursive) {
|
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) {
|
||||||
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
|
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
|
||||||
} else {
|
} else {
|
||||||
assert(references.empty());
|
assert(references.empty());
|
||||||
return makeStorePath("output:out",
|
return makeStorePath("output:out",
|
||||||
hashString(HashType::SHA256,
|
hashString(htSHA256,
|
||||||
"fixed:out:"
|
"fixed:out:"
|
||||||
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
|
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
|
||||||
+ hash.to_string(Base::Base16, true) + ":"),
|
+ hash.to_string(Base16, true) + ":"),
|
||||||
name);
|
name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -196,7 +196,7 @@ StorePath Store::makeFixedOutputPath(
|
||||||
StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
||||||
const StorePathSet & references) const
|
const StorePathSet & references) const
|
||||||
{
|
{
|
||||||
assert(hash.type == HashType::SHA256);
|
assert(hash.type == htSHA256);
|
||||||
/* Stuff the references (if any) into the type. This is a bit
|
/* Stuff the references (if any) into the type. This is a bit
|
||||||
hacky, but we can't put them in `s' since that would be
|
hacky, but we can't put them in `s' since that would be
|
||||||
ambiguous. */
|
ambiguous. */
|
||||||
|
@ -217,7 +217,7 @@ std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
|
||||||
StorePath Store::computeStorePathForText(const string & name, const string & s,
|
StorePath Store::computeStorePathForText(const string & name, const string & s,
|
||||||
const StorePathSet & references) const
|
const StorePathSet & references) const
|
||||||
{
|
{
|
||||||
return makeTextPath(name, hashString(HashType::SHA256, s), references);
|
return makeTextPath(name, hashString(htSHA256, s), references);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -430,7 +430,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
auto info = queryPathInfo(i);
|
auto info = queryPathInfo(i);
|
||||||
|
|
||||||
if (showHash) {
|
if (showHash) {
|
||||||
s += info->narHash.to_string(Base::Base16, false) + "\n";
|
s += info->narHash.to_string(Base16, false) + "\n";
|
||||||
s += (format("%1%\n") % info->narSize).str();
|
s += (format("%1%\n") % info->narSize).str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -505,7 +505,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
if (!narInfo->url.empty())
|
if (!narInfo->url.empty())
|
||||||
jsonPath.attr("url", narInfo->url);
|
jsonPath.attr("url", narInfo->url);
|
||||||
if (narInfo->fileHash)
|
if (narInfo->fileHash)
|
||||||
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base::Base32, true));
|
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(Base32, true));
|
||||||
if (narInfo->fileSize)
|
if (narInfo->fileSize)
|
||||||
jsonPath.attr("downloadSize", narInfo->fileSize);
|
jsonPath.attr("downloadSize", narInfo->fileSize);
|
||||||
if (showClosureSize)
|
if (showClosureSize)
|
||||||
|
@ -568,7 +568,7 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
auto srcUri = srcStore->getUri();
|
auto srcUri = srcStore->getUri();
|
||||||
auto dstUri = dstStore->getUri();
|
auto dstUri = dstStore->getUri();
|
||||||
|
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::CopyPath,
|
Activity act(*logger, lvlInfo, actCopyPath,
|
||||||
srcUri == "local" || srcUri == "daemon"
|
srcUri == "local" || srcUri == "daemon"
|
||||||
? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri)
|
? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri)
|
||||||
: dstUri == "local" || dstUri == "daemon"
|
: dstUri == "local" || dstUri == "daemon"
|
||||||
|
@ -585,7 +585,7 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
srcStore->narFromPath({storePath}, sink);
|
srcStore->narFromPath({storePath}, sink);
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||||
info2->narHash = hashString(HashType::SHA256, *sink.s);
|
info2->narHash = hashString(htSHA256, *sink.s);
|
||||||
if (!info->narSize) info2->narSize = sink.s->size();
|
if (!info->narSize) info2->narSize = sink.s->size();
|
||||||
if (info->ultimate) info2->ultimate = false;
|
if (info->ultimate) info2->ultimate = false;
|
||||||
info = info2;
|
info = info2;
|
||||||
|
@ -627,7 +627,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
|
|
||||||
if (missing.empty()) return;
|
if (missing.empty()) return;
|
||||||
|
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::CopyPaths, fmt("copying %d paths", missing.size()));
|
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
|
||||||
|
|
||||||
std::atomic<size_t> nrDone{0};
|
std::atomic<size_t> nrDone{0};
|
||||||
std::atomic<size_t> nrFailed{0};
|
std::atomic<size_t> nrFailed{0};
|
||||||
|
@ -653,7 +653,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath));
|
auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath));
|
||||||
|
|
||||||
bytesExpected += info->narSize;
|
bytesExpected += info->narSize;
|
||||||
act.setExpected(ActivityType::CopyPath, bytesExpected);
|
act.setExpected(actCopyPath, bytesExpected);
|
||||||
|
|
||||||
return srcStore->printStorePathSet(info->references);
|
return srcStore->printStorePathSet(info->references);
|
||||||
},
|
},
|
||||||
|
@ -672,7 +672,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
nrFailed++;
|
nrFailed++;
|
||||||
if (!settings.keepGoing)
|
if (!settings.keepGoing)
|
||||||
throw e;
|
throw e;
|
||||||
logger->log(Verbosity::Error, fmt("could not copy %s: %s", storePathS, e.what()));
|
logger->log(lvlError, fmt("could not copy %s: %s", storePathS, e.what()));
|
||||||
showProgress();
|
showProgress();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -703,7 +703,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
|
||||||
if (hashGiven) {
|
if (hashGiven) {
|
||||||
string s;
|
string s;
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
info.narHash = Hash(s, HashType::SHA256);
|
info.narHash = Hash(s, htSHA256);
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
if (!string2Int(s, info.narSize)) throw Error("number expected");
|
if (!string2Int(s, info.narSize)) throw Error("number expected");
|
||||||
}
|
}
|
||||||
|
@ -746,7 +746,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash.to_string(Base::Base32, true) + ";"
|
+ narHash.to_string(Base32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
@ -826,7 +826,7 @@ std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
|
||||||
{
|
{
|
||||||
return "fixed:"
|
return "fixed:"
|
||||||
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
|
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
|
||||||
+ hash.to_string(Base::Base32, true);
|
+ hash.to_string(Base32, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -360,7 +360,7 @@ public:
|
||||||
path and the cryptographic hash of the contents of srcPath. */
|
path and the cryptographic hash of the contents of srcPath. */
|
||||||
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
|
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
|
||||||
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
|
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||||
HashType hashAlgo = HashType::SHA256, PathFilter & filter = defaultPathFilter) const;
|
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
|
||||||
|
|
||||||
/* Preparatory part of addTextToStore().
|
/* Preparatory part of addTextToStore().
|
||||||
|
|
||||||
|
@ -454,12 +454,12 @@ public:
|
||||||
The function object `filter' can be used to exclude files (see
|
The function object `filter' can be used to exclude files (see
|
||||||
libutil/archive.hh). */
|
libutil/archive.hh). */
|
||||||
virtual StorePath addToStore(const string & name, const Path & srcPath,
|
virtual StorePath addToStore(const string & name, const Path & srcPath,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256,
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
|
||||||
|
|
||||||
// FIXME: remove?
|
// FIXME: remove?
|
||||||
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = HashType::SHA256, RepairFlag repair = NoRepair)
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
||||||
{
|
{
|
||||||
throw Error("addToStoreFromDump() is not supported by this store");
|
throw Error("addToStoreFromDump() is not supported by this store");
|
||||||
}
|
}
|
||||||
|
@ -553,7 +553,7 @@ public:
|
||||||
each path is included. */
|
each path is included. */
|
||||||
void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||||
bool includeImpureInfo, bool showClosureSize,
|
bool includeImpureInfo, bool showClosureSize,
|
||||||
Base hashBase = Base::Base32,
|
Base hashBase = Base32,
|
||||||
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
||||||
|
|
||||||
/* Return the size of the closure of the specified path, that is,
|
/* Return the size of the closure of the specified path, that is,
|
||||||
|
|
|
@ -10,7 +10,7 @@ namespace nix {
|
||||||
|
|
||||||
MakeError(UsageError, Error);
|
MakeError(UsageError, Error);
|
||||||
|
|
||||||
enum struct HashType : char;
|
enum HashType : char;
|
||||||
|
|
||||||
class Args
|
class Args
|
||||||
{
|
{
|
||||||
|
|
|
@ -314,7 +314,7 @@ struct XzCompressionSink : CompressionSink
|
||||||
ret = lzma_stream_encoder_mt(&strm, &mt_options);
|
ret = lzma_stream_encoder_mt(&strm, &mt_options);
|
||||||
done = true;
|
done = true;
|
||||||
#else
|
#else
|
||||||
printMsg(Verbosity::Error, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression");
|
printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression");
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -110,50 +110,50 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
|
||||||
|
|
||||||
string levelString;
|
string levelString;
|
||||||
switch (einfo.level) {
|
switch (einfo.level) {
|
||||||
case Verbosity::Error: {
|
case Verbosity::lvlError: {
|
||||||
levelString = ANSI_RED;
|
levelString = ANSI_RED;
|
||||||
levelString += "error:";
|
levelString += "error:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Warn: {
|
case Verbosity::lvlWarn: {
|
||||||
levelString = ANSI_YELLOW;
|
levelString = ANSI_YELLOW;
|
||||||
levelString += "warning:";
|
levelString += "warning:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Info: {
|
case Verbosity::lvlInfo: {
|
||||||
levelString = ANSI_GREEN;
|
levelString = ANSI_GREEN;
|
||||||
levelString += "info:";
|
levelString += "info:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Talkative: {
|
case Verbosity::lvlTalkative: {
|
||||||
levelString = ANSI_GREEN;
|
levelString = ANSI_GREEN;
|
||||||
levelString += "talk:";
|
levelString += "talk:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Chatty: {
|
case Verbosity::lvlChatty: {
|
||||||
levelString = ANSI_GREEN;
|
levelString = ANSI_GREEN;
|
||||||
levelString += "chat:";
|
levelString += "chat:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Vomit: {
|
case Verbosity::lvlVomit: {
|
||||||
levelString = ANSI_GREEN;
|
levelString = ANSI_GREEN;
|
||||||
levelString += "vomit:";
|
levelString += "vomit:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case Verbosity::Debug: {
|
case Verbosity::lvlDebug: {
|
||||||
levelString = ANSI_YELLOW;
|
levelString = ANSI_YELLOW;
|
||||||
levelString += "debug:";
|
levelString += "debug:";
|
||||||
levelString += ANSI_NORMAL;
|
levelString += ANSI_NORMAL;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
levelString = fmt("invalid error level: %d", (uint8_t)einfo.level);
|
levelString = fmt("invalid error level: %1%", einfo.level);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,15 +40,15 @@ See the error-demo.cc program for usage examples.
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
enum struct Verbosity {
|
typedef enum {
|
||||||
Error = 0,
|
lvlError = 0,
|
||||||
Warn,
|
lvlWarn,
|
||||||
Info,
|
lvlInfo,
|
||||||
Talkative,
|
lvlTalkative,
|
||||||
Chatty,
|
lvlChatty,
|
||||||
Debug,
|
lvlDebug,
|
||||||
Vomit,
|
lvlVomit
|
||||||
};
|
} Verbosity;
|
||||||
|
|
||||||
// ErrPos indicates the location of an error in a nix file.
|
// ErrPos indicates the location of an error in a nix file.
|
||||||
struct ErrPos {
|
struct ErrPos {
|
||||||
|
@ -113,7 +113,7 @@ public:
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
BaseError(unsigned int status, const Args & ... args)
|
BaseError(unsigned int status, const Args & ... args)
|
||||||
: err { .level = Verbosity::Error,
|
: err { .level = lvlError,
|
||||||
.hint = hintfmt(args...)
|
.hint = hintfmt(args...)
|
||||||
}
|
}
|
||||||
, status(status)
|
, status(status)
|
||||||
|
@ -121,13 +121,13 @@ public:
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
BaseError(const std::string & fs, const Args & ... args)
|
BaseError(const std::string & fs, const Args & ... args)
|
||||||
: err { .level = Verbosity::Error,
|
: err { .level = lvlError,
|
||||||
.hint = hintfmt(fs, args...)
|
.hint = hintfmt(fs, args...)
|
||||||
}
|
}
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
BaseError(hintformat hint)
|
BaseError(hintformat hint)
|
||||||
: err { .level = Verbosity::Error,
|
: err { .level = lvlError,
|
||||||
.hint = hint
|
.hint = hint
|
||||||
}
|
}
|
||||||
{ }
|
{ }
|
||||||
|
|
|
@ -21,10 +21,10 @@ void Hash::init()
|
||||||
{
|
{
|
||||||
if (!type) abort();
|
if (!type) abort();
|
||||||
switch (*type) {
|
switch (*type) {
|
||||||
case HashType::MD5: hashSize = md5HashSize; break;
|
case htMD5: hashSize = md5HashSize; break;
|
||||||
case HashType::SHA1: hashSize = sha1HashSize; break;
|
case htSHA1: hashSize = sha1HashSize; break;
|
||||||
case HashType::SHA256: hashSize = sha256HashSize; break;
|
case htSHA256: hashSize = sha256HashSize; break;
|
||||||
case HashType::SHA512: hashSize = sha512HashSize; break;
|
case htSHA512: hashSize = sha512HashSize; break;
|
||||||
}
|
}
|
||||||
assert(hashSize <= maxHashSize);
|
assert(hashSize <= maxHashSize);
|
||||||
memset(hash, 0, maxHashSize);
|
memset(hash, 0, maxHashSize);
|
||||||
|
@ -101,7 +101,7 @@ static string printHash32(const Hash & hash)
|
||||||
|
|
||||||
string printHash16or32(const Hash & hash)
|
string printHash16or32(const Hash & hash)
|
||||||
{
|
{
|
||||||
return hash.to_string(hash.type == HashType::MD5 ? Base::Base16 : Base::Base32, false);
|
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -115,19 +115,19 @@ HashType assertInitHashType(const Hash & h) {
|
||||||
std::string Hash::to_string(Base base, bool includeType) const
|
std::string Hash::to_string(Base base, bool includeType) const
|
||||||
{
|
{
|
||||||
std::string s;
|
std::string s;
|
||||||
if (base == Base::SRI || includeType) {
|
if (base == SRI || includeType) {
|
||||||
s += printHashType(assertInitHashType(*this));
|
s += printHashType(assertInitHashType(*this));
|
||||||
s += base == Base::SRI ? '-' : ':';
|
s += base == SRI ? '-' : ':';
|
||||||
}
|
}
|
||||||
switch (base) {
|
switch (base) {
|
||||||
case Base::Base16:
|
case Base16:
|
||||||
s += printHash16(*this);
|
s += printHash16(*this);
|
||||||
break;
|
break;
|
||||||
case Base::Base32:
|
case Base32:
|
||||||
s += printHash32(*this);
|
s += printHash32(*this);
|
||||||
break;
|
break;
|
||||||
case Base::Base64:
|
case Base64:
|
||||||
case Base::SRI:
|
case SRI:
|
||||||
s += base64Encode(std::string((const char *) hash, hashSize));
|
s += base64Encode(std::string((const char *) hash, hashSize));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -241,29 +241,29 @@ union Ctx
|
||||||
|
|
||||||
static void start(HashType ht, Ctx & ctx)
|
static void start(HashType ht, Ctx & ctx)
|
||||||
{
|
{
|
||||||
if (ht == HashType::MD5) MD5_Init(&ctx.md5);
|
if (ht == htMD5) MD5_Init(&ctx.md5);
|
||||||
else if (ht == HashType::SHA1) SHA1_Init(&ctx.sha1);
|
else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
|
||||||
else if (ht == HashType::SHA256) SHA256_Init(&ctx.sha256);
|
else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
|
||||||
else if (ht == HashType::SHA512) SHA512_Init(&ctx.sha512);
|
else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void update(HashType ht, Ctx & ctx,
|
static void update(HashType ht, Ctx & ctx,
|
||||||
const unsigned char * bytes, size_t len)
|
const unsigned char * bytes, size_t len)
|
||||||
{
|
{
|
||||||
if (ht == HashType::MD5) MD5_Update(&ctx.md5, bytes, len);
|
if (ht == htMD5) MD5_Update(&ctx.md5, bytes, len);
|
||||||
else if (ht == HashType::SHA1) SHA1_Update(&ctx.sha1, bytes, len);
|
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, bytes, len);
|
||||||
else if (ht == HashType::SHA256) SHA256_Update(&ctx.sha256, bytes, len);
|
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, bytes, len);
|
||||||
else if (ht == HashType::SHA512) SHA512_Update(&ctx.sha512, bytes, len);
|
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, bytes, len);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
||||||
{
|
{
|
||||||
if (ht == HashType::MD5) MD5_Final(hash, &ctx.md5);
|
if (ht == htMD5) MD5_Final(hash, &ctx.md5);
|
||||||
else if (ht == HashType::SHA1) SHA1_Final(hash, &ctx.sha1);
|
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
|
||||||
else if (ht == HashType::SHA256) SHA256_Final(hash, &ctx.sha256);
|
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
|
||||||
else if (ht == HashType::SHA512) SHA512_Final(hash, &ctx.sha512);
|
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -344,10 +344,10 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
|
|
||||||
std::optional<HashType> parseHashTypeOpt(const string & s)
|
std::optional<HashType> parseHashTypeOpt(const string & s)
|
||||||
{
|
{
|
||||||
if (s == "md5") return HashType::MD5;
|
if (s == "md5") return htMD5;
|
||||||
else if (s == "sha1") return HashType::SHA1;
|
else if (s == "sha1") return htSHA1;
|
||||||
else if (s == "sha256") return HashType::SHA256;
|
else if (s == "sha256") return htSHA256;
|
||||||
else if (s == "sha512") return HashType::SHA512;
|
else if (s == "sha512") return htSHA512;
|
||||||
else return std::optional<HashType> {};
|
else return std::optional<HashType> {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -362,14 +362,15 @@ HashType parseHashType(const string & s)
|
||||||
|
|
||||||
string printHashType(HashType ht)
|
string printHashType(HashType ht)
|
||||||
{
|
{
|
||||||
string ret;
|
|
||||||
switch (ht) {
|
switch (ht) {
|
||||||
case HashType::MD5: ret = "md5"; break;
|
case htMD5: return "md5"; break;
|
||||||
case HashType::SHA1: ret = "sha1"; break;
|
case htSHA1: return "sha1"; break;
|
||||||
case HashType::SHA256: ret = "sha256"; break;
|
case htSHA256: return "sha256"; break;
|
||||||
case HashType::SHA512: ret = "sha512"; break;
|
case htSHA512: return "sha512"; break;
|
||||||
}
|
}
|
||||||
return ret;
|
// illegal hash type enum value internally, as opposed to external input
|
||||||
|
// which should be validated with nice error message.
|
||||||
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,12 +10,7 @@ namespace nix {
|
||||||
MakeError(BadHash, Error);
|
MakeError(BadHash, Error);
|
||||||
|
|
||||||
|
|
||||||
enum struct HashType : char {
|
enum HashType : char { htMD5, htSHA1, htSHA256, htSHA512 };
|
||||||
MD5,
|
|
||||||
SHA1,
|
|
||||||
SHA256,
|
|
||||||
SHA512,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
const int md5HashSize = 16;
|
const int md5HashSize = 16;
|
||||||
|
@ -25,12 +20,7 @@ const int sha512HashSize = 64;
|
||||||
|
|
||||||
extern const string base32Chars;
|
extern const string base32Chars;
|
||||||
|
|
||||||
enum struct Base {
|
enum Base : int { Base64, Base32, Base16, SRI };
|
||||||
Base64,
|
|
||||||
Base32,
|
|
||||||
Base16,
|
|
||||||
SRI,
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
struct Hash
|
struct Hash
|
||||||
|
@ -97,14 +87,14 @@ struct Hash
|
||||||
|
|
||||||
std::string gitRev() const
|
std::string gitRev() const
|
||||||
{
|
{
|
||||||
assert(type == HashType::SHA1);
|
assert(type == htSHA1);
|
||||||
return to_string(Base::Base16, false);
|
return to_string(Base16, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string gitShortRev() const
|
std::string gitShortRev() const
|
||||||
{
|
{
|
||||||
assert(type == HashType::SHA1);
|
assert(type == htSHA1);
|
||||||
return std::string(to_string(Base::Base16, false), 0, 7);
|
return std::string(to_string(Base16, false), 0, 7);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ Logger * logger = makeSimpleLogger(true);
|
||||||
|
|
||||||
void Logger::warn(const std::string & msg)
|
void Logger::warn(const std::string & msg)
|
||||||
{
|
{
|
||||||
log(Verbosity::Warn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
|
log(lvlWarn, ANSI_YELLOW "warning:" ANSI_NORMAL " " + msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Logger::writeToStdout(std::string_view s)
|
void Logger::writeToStdout(std::string_view s)
|
||||||
|
@ -57,10 +57,10 @@ public:
|
||||||
if (systemd) {
|
if (systemd) {
|
||||||
char c;
|
char c;
|
||||||
switch (lvl) {
|
switch (lvl) {
|
||||||
case Verbosity::Error: c = '3'; break;
|
case lvlError: c = '3'; break;
|
||||||
case Verbosity::Warn: c = '4'; break;
|
case lvlWarn: c = '4'; break;
|
||||||
case Verbosity::Info: c = '5'; break;
|
case lvlInfo: c = '5'; break;
|
||||||
case Verbosity::Talkative: case Verbosity::Chatty: c = '6'; break;
|
case lvlTalkative: case lvlChatty: c = '6'; break;
|
||||||
default: c = '7';
|
default: c = '7';
|
||||||
}
|
}
|
||||||
prefix = std::string("<") + c + ">";
|
prefix = std::string("<") + c + ">";
|
||||||
|
@ -87,18 +87,18 @@ public:
|
||||||
|
|
||||||
void result(ActivityId act, ResultType type, const Fields & fields) override
|
void result(ActivityId act, ResultType type, const Fields & fields) override
|
||||||
{
|
{
|
||||||
if (type == ResultType::BuildLogLine && printBuildLogs) {
|
if (type == resBuildLogLine && printBuildLogs) {
|
||||||
auto lastLine = fields[0].s;
|
auto lastLine = fields[0].s;
|
||||||
printError(lastLine);
|
printError(lastLine);
|
||||||
}
|
}
|
||||||
else if (type == ResultType::PostBuildLogLine && printBuildLogs) {
|
else if (type == resPostBuildLogLine && printBuildLogs) {
|
||||||
auto lastLine = fields[0].s;
|
auto lastLine = fields[0].s;
|
||||||
printError("post-build-hook: " + lastLine);
|
printError("post-build-hook: " + lastLine);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Verbosity verbosity = Verbosity::Info;
|
Verbosity verbosity = lvlInfo;
|
||||||
|
|
||||||
void warnOnce(bool & haveWarned, const FormatOrString & fs)
|
void warnOnce(bool & haveWarned, const FormatOrString & fs)
|
||||||
{
|
{
|
||||||
|
@ -158,7 +158,7 @@ struct JSONLogger : Logger {
|
||||||
|
|
||||||
void write(const nlohmann::json & json)
|
void write(const nlohmann::json & json)
|
||||||
{
|
{
|
||||||
prevLogger.log(Verbosity::Error, "@nix " + json.dump());
|
prevLogger.log(lvlError, "@nix " + json.dump());
|
||||||
}
|
}
|
||||||
|
|
||||||
void log(Verbosity lvl, const FormatOrString & fs) override
|
void log(Verbosity lvl, const FormatOrString & fs) override
|
||||||
|
@ -246,7 +246,7 @@ bool handleJSONLogMessage(const std::string & msg,
|
||||||
|
|
||||||
if (action == "start") {
|
if (action == "start") {
|
||||||
auto type = (ActivityType) json["type"];
|
auto type = (ActivityType) json["type"];
|
||||||
if (trusted || type == ActivityType::Download)
|
if (trusted || type == actFileTransfer)
|
||||||
activities.emplace(std::piecewise_construct,
|
activities.emplace(std::piecewise_construct,
|
||||||
std::forward_as_tuple(json["id"]),
|
std::forward_as_tuple(json["id"]),
|
||||||
std::forward_as_tuple(*logger, (Verbosity) json["level"], type,
|
std::forward_as_tuple(*logger, (Verbosity) json["level"], type,
|
||||||
|
@ -264,7 +264,7 @@ bool handleJSONLogMessage(const std::string & msg,
|
||||||
|
|
||||||
else if (action == "setPhase") {
|
else if (action == "setPhase") {
|
||||||
std::string phase = json["phase"];
|
std::string phase = json["phase"];
|
||||||
act.result(ResultType::SetPhase, phase);
|
act.result(resSetPhase, phase);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (action == "msg") {
|
else if (action == "msg") {
|
||||||
|
|
|
@ -5,32 +5,32 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
enum struct ActivityType {
|
typedef enum {
|
||||||
Unknown = 0,
|
actUnknown = 0,
|
||||||
CopyPath = 100,
|
actCopyPath = 100,
|
||||||
Download = 101,
|
actFileTransfer = 101,
|
||||||
Realise = 102,
|
actRealise = 102,
|
||||||
CopyPaths = 103,
|
actCopyPaths = 103,
|
||||||
Builds = 104,
|
actBuilds = 104,
|
||||||
Build = 105,
|
actBuild = 105,
|
||||||
OptimiseStore = 106,
|
actOptimiseStore = 106,
|
||||||
VerifyPaths = 107,
|
actVerifyPaths = 107,
|
||||||
Substitute = 108,
|
actSubstitute = 108,
|
||||||
QueryPathInfo = 109,
|
actQueryPathInfo = 109,
|
||||||
PostBuildHook = 110,
|
actPostBuildHook = 110,
|
||||||
BuildWaiting = 111,
|
actBuildWaiting = 111,
|
||||||
};
|
} ActivityType;
|
||||||
|
|
||||||
enum struct ResultType {
|
typedef enum {
|
||||||
FileLinked = 100,
|
resFileLinked = 100,
|
||||||
BuildLogLine = 101,
|
resBuildLogLine = 101,
|
||||||
UntrustedPath = 102,
|
resUntrustedPath = 102,
|
||||||
CorruptedPath = 103,
|
resCorruptedPath = 103,
|
||||||
SetPhase = 104,
|
resSetPhase = 104,
|
||||||
Progress = 105,
|
resProgress = 105,
|
||||||
SetExpected = 106,
|
resSetExpected = 106,
|
||||||
PostBuildLogLine = 107,
|
resPostBuildLogLine = 107,
|
||||||
};
|
} ResultType;
|
||||||
|
|
||||||
typedef uint64_t ActivityId;
|
typedef uint64_t ActivityId;
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ public:
|
||||||
|
|
||||||
void log(const FormatOrString & fs)
|
void log(const FormatOrString & fs)
|
||||||
{
|
{
|
||||||
log(Verbosity::Info, fs);
|
log(lvlInfo, fs);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void logEI(const ErrorInfo &ei) = 0;
|
virtual void logEI(const ErrorInfo &ei) = 0;
|
||||||
|
@ -109,17 +109,17 @@ struct Activity
|
||||||
|
|
||||||
Activity(Logger & logger, ActivityType type,
|
Activity(Logger & logger, ActivityType type,
|
||||||
const Logger::Fields & fields = {}, ActivityId parent = getCurActivity())
|
const Logger::Fields & fields = {}, ActivityId parent = getCurActivity())
|
||||||
: Activity(logger, Verbosity::Error, type, "", fields, parent) { };
|
: Activity(logger, lvlError, type, "", fields, parent) { };
|
||||||
|
|
||||||
Activity(const Activity & act) = delete;
|
Activity(const Activity & act) = delete;
|
||||||
|
|
||||||
~Activity();
|
~Activity();
|
||||||
|
|
||||||
void progress(uint64_t done = 0, uint64_t expected = 0, uint64_t running = 0, uint64_t failed = 0) const
|
void progress(uint64_t done = 0, uint64_t expected = 0, uint64_t running = 0, uint64_t failed = 0) const
|
||||||
{ result(ResultType::Progress, done, expected, running, failed); }
|
{ result(resProgress, done, expected, running, failed); }
|
||||||
|
|
||||||
void setExpected(ActivityType type2, uint64_t expected) const
|
void setExpected(ActivityType type2, uint64_t expected) const
|
||||||
{ result(ResultType::SetExpected, (uint64_t)type2, expected); }
|
{ result(resSetExpected, type2, expected); }
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
void result(ResultType type, const Args & ... args) const
|
void result(ResultType type, const Args & ... args) const
|
||||||
|
@ -167,8 +167,8 @@ extern Verbosity verbosity; /* suppress msgs > this */
|
||||||
} \
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define logError(errorInfo...) logErrorInfo(Verbosity::Error, errorInfo)
|
#define logError(errorInfo...) logErrorInfo(lvlError, errorInfo)
|
||||||
#define logWarning(errorInfo...) logErrorInfo(Verbosity::Warn, errorInfo)
|
#define logWarning(errorInfo...) logErrorInfo(lvlWarn, errorInfo)
|
||||||
|
|
||||||
/* Print a string message if the current log level is at least the specified
|
/* Print a string message if the current log level is at least the specified
|
||||||
level. Note that this has to be implemented as a macro to ensure that the
|
level. Note that this has to be implemented as a macro to ensure that the
|
||||||
|
@ -180,13 +180,13 @@ extern Verbosity verbosity; /* suppress msgs > this */
|
||||||
} \
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#define printError(args...) printMsg(Verbosity::Error, args)
|
#define printError(args...) printMsg(lvlError, args)
|
||||||
#define printInfo(args...) printMsg(Verbosity::Info, args)
|
#define printInfo(args...) printMsg(lvlInfo, args)
|
||||||
#define printTalkative(args...) printMsg(Verbosity::Talkative, args)
|
#define printTalkative(args...) printMsg(lvlTalkative, args)
|
||||||
#define debug(args...) printMsg(Verbosity::Debug, args)
|
#define debug(args...) printMsg(lvlDebug, args)
|
||||||
#define vomit(args...) printMsg(Verbosity::Vomit, args)
|
#define vomit(args...) printMsg(lvlVomit, args)
|
||||||
|
|
||||||
/* if verbosity >= Verbosity::Warn, print a message with a yellow 'warning:' prefix. */
|
/* if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix. */
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
inline void warn(const std::string & fs, const Args & ... args)
|
inline void warn(const std::string & fs, const Args & ... args)
|
||||||
{
|
{
|
||||||
|
|
|
@ -10,28 +10,28 @@ namespace nix {
|
||||||
TEST(hashString, testKnownMD5Hashes1) {
|
TEST(hashString, testKnownMD5Hashes1) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||||
auto s1 = "";
|
auto s1 = "";
|
||||||
auto hash = hashString(HashType::MD5, s1);
|
auto hash = hashString(HashType::htMD5, s1);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(hashString, testKnownMD5Hashes2) {
|
TEST(hashString, testKnownMD5Hashes2) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||||
auto s2 = "abc";
|
auto s2 = "abc";
|
||||||
auto hash = hashString(HashType::MD5, s2);
|
auto hash = hashString(HashType::htMD5, s2);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(hashString, testKnownSHA1Hashes1) {
|
TEST(hashString, testKnownSHA1Hashes1) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||||
auto s = "abc";
|
auto s = "abc";
|
||||||
auto hash = hashString(HashType::SHA1, s);
|
auto hash = hashString(HashType::htSHA1, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(hashString, testKnownSHA1Hashes2) {
|
TEST(hashString, testKnownSHA1Hashes2) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||||
auto hash = hashString(HashType::SHA1, s);
|
auto hash = hashString(HashType::htSHA1, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ namespace nix {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||||
auto s = "abc";
|
auto s = "abc";
|
||||||
|
|
||||||
auto hash = hashString(HashType::SHA256, s);
|
auto hash = hashString(HashType::htSHA256, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||||
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ namespace nix {
|
||||||
TEST(hashString, testKnownSHA256Hashes2) {
|
TEST(hashString, testKnownSHA256Hashes2) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||||
auto hash = hashString(HashType::SHA256, s);
|
auto hash = hashString(HashType::htSHA256, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||||
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,7 @@ namespace nix {
|
||||||
TEST(hashString, testKnownSHA512Hashes1) {
|
TEST(hashString, testKnownSHA512Hashes1) {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||||
auto s = "abc";
|
auto s = "abc";
|
||||||
auto hash = hashString(HashType::SHA512, s);
|
auto hash = hashString(HashType::htSHA512, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||||
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
||||||
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
||||||
|
@ -66,7 +66,7 @@ namespace nix {
|
||||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||||
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
||||||
|
|
||||||
auto hash = hashString(HashType::SHA512, s);
|
auto hash = hashString(HashType::htSHA512, s);
|
||||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||||
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
||||||
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
||||||
|
|
|
@ -68,7 +68,7 @@ namespace nix {
|
||||||
TEST(logEI, loggingErrorOnInfoLevel) {
|
TEST(logEI, loggingErrorOnInfoLevel) {
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = Verbosity::Info,
|
logger->logEI({ .level = lvlInfo,
|
||||||
.name = "Info name",
|
.name = "Info name",
|
||||||
.description = "Info description",
|
.description = "Info description",
|
||||||
});
|
});
|
||||||
|
@ -78,11 +78,11 @@ namespace nix {
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(logEI, loggingErrorOnTalkativeLevel) {
|
TEST(logEI, loggingErrorOnTalkativeLevel) {
|
||||||
verbosity = Verbosity::Talkative;
|
verbosity = lvlTalkative;
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = Verbosity::Talkative,
|
logger->logEI({ .level = lvlTalkative,
|
||||||
.name = "Talkative name",
|
.name = "Talkative name",
|
||||||
.description = "Talkative description",
|
.description = "Talkative description",
|
||||||
});
|
});
|
||||||
|
@ -92,11 +92,11 @@ namespace nix {
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(logEI, loggingErrorOnChattyLevel) {
|
TEST(logEI, loggingErrorOnChattyLevel) {
|
||||||
verbosity = Verbosity::Chatty;
|
verbosity = lvlChatty;
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = Verbosity::Chatty,
|
logger->logEI({ .level = lvlChatty,
|
||||||
.name = "Chatty name",
|
.name = "Chatty name",
|
||||||
.description = "Talkative description",
|
.description = "Talkative description",
|
||||||
});
|
});
|
||||||
|
@ -106,11 +106,11 @@ namespace nix {
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(logEI, loggingErrorOnDebugLevel) {
|
TEST(logEI, loggingErrorOnDebugLevel) {
|
||||||
verbosity = Verbosity::Debug;
|
verbosity = lvlDebug;
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = Verbosity::Debug,
|
logger->logEI({ .level = lvlDebug,
|
||||||
.name = "Debug name",
|
.name = "Debug name",
|
||||||
.description = "Debug description",
|
.description = "Debug description",
|
||||||
});
|
});
|
||||||
|
@ -120,11 +120,11 @@ namespace nix {
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(logEI, loggingErrorOnVomitLevel) {
|
TEST(logEI, loggingErrorOnVomitLevel) {
|
||||||
verbosity = Verbosity::Vomit;
|
verbosity = lvlVomit;
|
||||||
|
|
||||||
testing::internal::CaptureStderr();
|
testing::internal::CaptureStderr();
|
||||||
|
|
||||||
logger->logEI({ .level = Verbosity::Vomit,
|
logger->logEI({ .level = lvlVomit,
|
||||||
.name = "Vomit name",
|
.name = "Vomit name",
|
||||||
.description = "Vomit description",
|
.description = "Vomit description",
|
||||||
});
|
});
|
||||||
|
|
|
@ -442,7 +442,7 @@ void deletePath(const Path & path)
|
||||||
|
|
||||||
void deletePath(const Path & path, unsigned long long & bytesFreed)
|
void deletePath(const Path & path, unsigned long long & bytesFreed)
|
||||||
{
|
{
|
||||||
//Activity act(*logger, Verbosity::Debug, format("recursively deleting path '%1%'") % path);
|
//Activity act(*logger, lvlDebug, format("recursively deleting path '%1%'") % path);
|
||||||
bytesFreed = 0;
|
bytesFreed = 0;
|
||||||
_deletePath(path, bytesFreed);
|
_deletePath(path, bytesFreed);
|
||||||
}
|
}
|
||||||
|
@ -1433,7 +1433,7 @@ string base64Decode(std::string_view s)
|
||||||
|
|
||||||
char digit = decode[(unsigned char) c];
|
char digit = decode[(unsigned char) c];
|
||||||
if (digit == -1)
|
if (digit == -1)
|
||||||
throw Error("invalid character in Base::Base64 string");
|
throw Error("invalid character in Base64 string");
|
||||||
|
|
||||||
bits += 6;
|
bits += 6;
|
||||||
d = d << 6 | digit;
|
d = d << 6 | digit;
|
||||||
|
|
|
@ -22,7 +22,7 @@ static int _main(int argc, char ** argv)
|
||||||
printVersion("nix-copy-closure");
|
printVersion("nix-copy-closure");
|
||||||
else if (*arg == "--gzip" || *arg == "--bzip2" || *arg == "--xz") {
|
else if (*arg == "--gzip" || *arg == "--bzip2" || *arg == "--xz") {
|
||||||
if (*arg != "--gzip")
|
if (*arg != "--gzip")
|
||||||
printMsg(Verbosity::Error, format("Warning: '%1%' is not implemented, falling back to gzip") % *arg);
|
printMsg(lvlError, format("Warning: '%1%' is not implemented, falling back to gzip") % *arg);
|
||||||
gzip = true;
|
gzip = true;
|
||||||
} else if (*arg == "--from")
|
} else if (*arg == "--from")
|
||||||
toMode = false;
|
toMode = false;
|
||||||
|
@ -31,7 +31,7 @@ static int _main(int argc, char ** argv)
|
||||||
else if (*arg == "--include-outputs")
|
else if (*arg == "--include-outputs")
|
||||||
includeOutputs = true;
|
includeOutputs = true;
|
||||||
else if (*arg == "--show-progress")
|
else if (*arg == "--show-progress")
|
||||||
printMsg(Verbosity::Error, "Warning: '--show-progress' is not implemented");
|
printMsg(lvlError, "Warning: '--show-progress' is not implemented");
|
||||||
else if (*arg == "--dry-run")
|
else if (*arg == "--dry-run")
|
||||||
dryRun = true;
|
dryRun = true;
|
||||||
else if (*arg == "--use-substitutes" || *arg == "-s")
|
else if (*arg == "--use-substitutes" || *arg == "-s")
|
||||||
|
|
|
@ -976,7 +976,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
||||||
try {
|
try {
|
||||||
paths.insert(globals.state->store->parseStorePath(i.queryOutPath()));
|
paths.insert(globals.state->store->parseStorePath(i.queryOutPath()));
|
||||||
} catch (AssertionError & e) {
|
} catch (AssertionError & e) {
|
||||||
printMsg(Verbosity::Talkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName());
|
printMsg(lvlTalkative, "skipping derivation named '%s' which gives an assertion failure", i.queryName());
|
||||||
i.setFailed();
|
i.setFailed();
|
||||||
}
|
}
|
||||||
validPaths = globals.state->store->queryValidPaths(paths);
|
validPaths = globals.state->store->queryValidPaths(paths);
|
||||||
|
@ -1002,7 +1002,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
||||||
try {
|
try {
|
||||||
if (i.hasFailed()) continue;
|
if (i.hasFailed()) continue;
|
||||||
|
|
||||||
//Activity act(*logger, Verbosity::Debug, format("outputting query result '%1%'") % i.attrPath);
|
//Activity act(*logger, lvlDebug, format("outputting query result '%1%'") % i.attrPath);
|
||||||
|
|
||||||
if (globals.prebuiltOnly &&
|
if (globals.prebuiltOnly &&
|
||||||
!validPaths.count(globals.state->store->parseStorePath(i.queryOutPath())) &&
|
!validPaths.count(globals.state->store->parseStorePath(i.queryOutPath())) &&
|
||||||
|
@ -1183,7 +1183,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
||||||
cout.flush();
|
cout.flush();
|
||||||
|
|
||||||
} catch (AssertionError & e) {
|
} catch (AssertionError & e) {
|
||||||
printMsg(Verbosity::Talkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
|
printMsg(lvlTalkative, "skipping derivation named '%1%' which gives an assertion failure", i.queryName());
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName()));
|
e.addPrefix(fmt("while querying the derivation named '%1%':\n", i.queryName()));
|
||||||
throw;
|
throw;
|
||||||
|
|
|
@ -51,7 +51,7 @@ string resolveMirrorUri(EvalState & state, string uri)
|
||||||
static int _main(int argc, char * * argv)
|
static int _main(int argc, char * * argv)
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
HashType ht = HashType::SHA256;
|
HashType ht = htSHA256;
|
||||||
std::vector<string> args;
|
std::vector<string> args;
|
||||||
bool printPath = getEnv("PRINT_PATH") == "1";
|
bool printPath = getEnv("PRINT_PATH") == "1";
|
||||||
bool fromExpr = false;
|
bool fromExpr = false;
|
||||||
|
|
|
@ -372,8 +372,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) {
|
||||||
auto info = store->queryPathInfo(j);
|
auto info = store->queryPathInfo(j);
|
||||||
if (query == qHash) {
|
if (query == qHash) {
|
||||||
assert(info->narHash.type == HashType::SHA256);
|
assert(info->narHash.type == htSHA256);
|
||||||
cout << fmt("%s\n", info->narHash.to_string(Base::Base32, true));
|
cout << fmt("%s\n", info->narHash.to_string(Base32, true));
|
||||||
} else if (query == qSize)
|
} else if (query == qSize)
|
||||||
cout << fmt("%d\n", info->narSize);
|
cout << fmt("%d\n", info->narSize);
|
||||||
}
|
}
|
||||||
|
@ -502,7 +502,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
||||||
if (canonicalise)
|
if (canonicalise)
|
||||||
canonicalisePathMetaData(store->printStorePath(info->path), -1);
|
canonicalisePathMetaData(store->printStorePath(info->path), -1);
|
||||||
if (!hashGiven) {
|
if (!hashGiven) {
|
||||||
HashResult hash = hashPath(HashType::SHA256, store->printStorePath(info->path));
|
HashResult hash = hashPath(htSHA256, store->printStorePath(info->path));
|
||||||
info->narHash = hash.first;
|
info->narHash = hash.first;
|
||||||
info->narSize = hash.second;
|
info->narSize = hash.second;
|
||||||
}
|
}
|
||||||
|
@ -723,7 +723,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
|
|
||||||
for (auto & i : opArgs) {
|
for (auto & i : opArgs) {
|
||||||
auto path = store->followLinksToStorePath(i);
|
auto path = store->followLinksToStorePath(i);
|
||||||
printMsg(Verbosity::Talkative, "checking path '%s'...", store->printStorePath(path));
|
printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path));
|
||||||
auto info = store->queryPathInfo(path);
|
auto info = store->queryPathInfo(path);
|
||||||
HashSink sink(*info->narHash.type);
|
HashSink sink(*info->narHash.type);
|
||||||
store->narFromPath(path, sink);
|
store->narFromPath(path, sink);
|
||||||
|
@ -734,8 +734,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(path),
|
store->printStorePath(path),
|
||||||
info->narHash.to_string(Base::Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
current.first.to_string(Base::Base32, true))
|
current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
status = 1;
|
status = 1;
|
||||||
}
|
}
|
||||||
|
@ -789,7 +789,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
auto getBuildSettings = [&]() {
|
auto getBuildSettings = [&]() {
|
||||||
// FIXME: changing options here doesn't work if we're
|
// FIXME: changing options here doesn't work if we're
|
||||||
// building through the daemon.
|
// building through the daemon.
|
||||||
verbosity = Verbosity::Error;
|
verbosity = lvlError;
|
||||||
settings.keepLog = false;
|
settings.keepLog = false;
|
||||||
settings.useSubstitutes = false;
|
settings.useSubstitutes = false;
|
||||||
settings.maxSilentTime = readInt(in);
|
settings.maxSilentTime = readInt(in);
|
||||||
|
@ -864,7 +864,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
out << info->narSize // downloadSize
|
out << info->narSize // downloadSize
|
||||||
<< info->narSize;
|
<< info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 4)
|
||||||
out << (info->narHash ? info->narHash.to_string(Base::Base32, true) : "") << info->ca << info->sigs;
|
out << (info->narHash ? info->narHash.to_string(Base32, true) : "") << info->ca << info->sigs;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -948,7 +948,7 @@ static void opServe(Strings opFlags, Strings opArgs)
|
||||||
auto deriver = readString(in);
|
auto deriver = readString(in);
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = store->parseStorePath(deriver);
|
info.deriver = store->parseStorePath(deriver);
|
||||||
info.narHash = Hash(readString(in), HashType::SHA256);
|
info.narHash = Hash(readString(in), htSHA256);
|
||||||
info.references = readStorePaths<StorePathSet>(*store, in);
|
info.references = readStorePaths<StorePathSet>(*store, in);
|
||||||
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
in >> info.registrationTime >> info.narSize >> info.ultimate;
|
||||||
info.sigs = readStrings<StringSet>(in);
|
info.sigs = readStrings<StringSet>(in);
|
||||||
|
|
|
@ -43,7 +43,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpPath(path, sink);
|
dumpPath(path, sink);
|
||||||
|
|
||||||
auto narHash = hashString(HashType::SHA256, *sink.s);
|
auto narHash = hashString(htSHA256, *sink.s);
|
||||||
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
|
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
|
||||||
info.narHash = narHash;
|
info.narHash = narHash;
|
||||||
|
|
|
@ -10,18 +10,18 @@ using namespace nix;
|
||||||
struct CmdHash : Command
|
struct CmdHash : Command
|
||||||
{
|
{
|
||||||
FileIngestionMethod mode;
|
FileIngestionMethod mode;
|
||||||
Base base = Base::SRI;
|
Base base = SRI;
|
||||||
bool truncate = false;
|
bool truncate = false;
|
||||||
HashType ht = HashType::SHA256;
|
HashType ht = htSHA256;
|
||||||
std::vector<std::string> paths;
|
std::vector<std::string> paths;
|
||||||
std::optional<std::string> modulus;
|
std::optional<std::string> modulus;
|
||||||
|
|
||||||
CmdHash(FileIngestionMethod mode) : mode(mode)
|
CmdHash(FileIngestionMethod mode) : mode(mode)
|
||||||
{
|
{
|
||||||
mkFlag(0, "sri", "print hash in Base::SRI format", &base, Base::SRI);
|
mkFlag(0, "sri", "print hash in SRI format", &base, SRI);
|
||||||
mkFlag(0, "base64", "print hash in base-64", &base, Base::Base64);
|
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
|
||||||
mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base::Base32);
|
mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32);
|
||||||
mkFlag(0, "base16", "print hash in base-16", &base, Base::Base16);
|
mkFlag(0, "base16", "print hash in base-16", &base, Base16);
|
||||||
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
||||||
#if 0
|
#if 0
|
||||||
mkFlag()
|
mkFlag()
|
||||||
|
@ -68,7 +68,7 @@ struct CmdHash : Command
|
||||||
|
|
||||||
Hash h = hashSink->finish().first;
|
Hash h = hashSink->finish().first;
|
||||||
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
|
||||||
logger->stdout(h.to_string(base, base == Base::SRI));
|
logger->stdout(h.to_string(base, base == SRI));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -91,10 +91,10 @@ struct CmdToBase : Command
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
return fmt("convert a hash to %s representation",
|
return fmt("convert a hash to %s representation",
|
||||||
base == Base::Base16 ? "base-16" :
|
base == Base16 ? "base-16" :
|
||||||
base == Base::Base32 ? "base-32" :
|
base == Base32 ? "base-32" :
|
||||||
base == Base::Base64 ? "base-64" :
|
base == Base64 ? "base-64" :
|
||||||
"Base::SRI");
|
"SRI");
|
||||||
}
|
}
|
||||||
|
|
||||||
Category category() override { return catUtility; }
|
Category category() override { return catUtility; }
|
||||||
|
@ -102,19 +102,19 @@ struct CmdToBase : Command
|
||||||
void run() override
|
void run() override
|
||||||
{
|
{
|
||||||
for (auto s : args)
|
for (auto s : args)
|
||||||
logger->stdout(Hash(s, ht).to_string(base, base == Base::SRI));
|
logger->stdout(Hash(s, ht).to_string(base, base == SRI));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static RegisterCommand r3("to-base16", [](){ return make_ref<CmdToBase>(Base::Base16); });
|
static RegisterCommand r3("to-base16", [](){ return make_ref<CmdToBase>(Base16); });
|
||||||
static RegisterCommand r4("to-base32", [](){ return make_ref<CmdToBase>(Base::Base32); });
|
static RegisterCommand r4("to-base32", [](){ return make_ref<CmdToBase>(Base32); });
|
||||||
static RegisterCommand r5("to-base64", [](){ return make_ref<CmdToBase>(Base::Base64); });
|
static RegisterCommand r5("to-base64", [](){ return make_ref<CmdToBase>(Base64); });
|
||||||
static RegisterCommand r6("to-sri", [](){ return make_ref<CmdToBase>(Base::SRI); });
|
static RegisterCommand r6("to-sri", [](){ return make_ref<CmdToBase>(SRI); });
|
||||||
|
|
||||||
/* Legacy nix-hash command. */
|
/* Legacy nix-hash command. */
|
||||||
static int compatNixHash(int argc, char * * argv)
|
static int compatNixHash(int argc, char * * argv)
|
||||||
{
|
{
|
||||||
HashType ht = HashType::MD5;
|
HashType ht = htMD5;
|
||||||
bool flat = false;
|
bool flat = false;
|
||||||
bool base32 = false;
|
bool base32 = false;
|
||||||
bool truncate = false;
|
bool truncate = false;
|
||||||
|
@ -145,14 +145,14 @@ static int compatNixHash(int argc, char * * argv)
|
||||||
if (op == opHash) {
|
if (op == opHash) {
|
||||||
CmdHash cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
|
CmdHash cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
|
||||||
cmd.ht = ht;
|
cmd.ht = ht;
|
||||||
cmd.base = base32 ? Base::Base32 : Base::Base16;
|
cmd.base = base32 ? Base32 : Base16;
|
||||||
cmd.truncate = truncate;
|
cmd.truncate = truncate;
|
||||||
cmd.paths = ss;
|
cmd.paths = ss;
|
||||||
cmd.run();
|
cmd.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
CmdToBase cmd(op == opTo32 ? Base::Base32 : Base::Base16);
|
CmdToBase cmd(op == opTo32 ? Base32 : Base16);
|
||||||
cmd.args = ss;
|
cmd.args = ss;
|
||||||
cmd.ht = ht;
|
cmd.ht = ht;
|
||||||
cmd.run();
|
cmd.run();
|
||||||
|
|
|
@ -279,7 +279,7 @@ Buildables build(ref<Store> store, RealiseMode mode,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mode == DryRun)
|
if (mode == DryRun)
|
||||||
printMissing(store, pathsToBuild, Verbosity::Error);
|
printMissing(store, pathsToBuild, lvlError);
|
||||||
else if (mode == Build)
|
else if (mode == Build)
|
||||||
store->buildPaths(pathsToBuild);
|
store->buildPaths(pathsToBuild);
|
||||||
|
|
||||||
|
|
|
@ -163,7 +163,7 @@ void mainWrapped(int argc, char * * argv)
|
||||||
if (legacy) return legacy(argc, argv);
|
if (legacy) return legacy(argc, argv);
|
||||||
}
|
}
|
||||||
|
|
||||||
verbosity = Verbosity::Warn;
|
verbosity = lvlWarn;
|
||||||
settings.verboseBuild = false;
|
settings.verboseBuild = false;
|
||||||
|
|
||||||
setLogFormat("bar");
|
setLogFormat("bar");
|
||||||
|
|
|
@ -72,7 +72,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||||
|
|
||||||
*sink.s = rewriteStrings(*sink.s, rewrites);
|
*sink.s = rewriteStrings(*sink.s, rewrites);
|
||||||
|
|
||||||
HashModuloSink hashModuloSink(HashType::SHA256, oldHashPart);
|
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
|
||||||
hashModuloSink((unsigned char *) sink.s->data(), sink.s->size());
|
hashModuloSink((unsigned char *) sink.s->data(), sink.s->size());
|
||||||
|
|
||||||
auto narHash = hashModuloSink.finish().first;
|
auto narHash = hashModuloSink.finish().first;
|
||||||
|
|
|
@ -91,7 +91,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
||||||
store->pathInfoToJSON(jsonRoot,
|
store->pathInfoToJSON(jsonRoot,
|
||||||
// FIXME: preserve order?
|
// FIXME: preserve order?
|
||||||
StorePathSet(storePaths.begin(), storePaths.end()),
|
StorePathSet(storePaths.begin(), storePaths.end()),
|
||||||
true, showClosureSize, Base::SRI, AllowInvalid);
|
true, showClosureSize, SRI, AllowInvalid);
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -211,12 +211,12 @@ void NixRepl::mainLoop(const std::vector<std::string> & files)
|
||||||
// input without clearing the input so far.
|
// input without clearing the input so far.
|
||||||
continue;
|
continue;
|
||||||
} else {
|
} else {
|
||||||
printMsg(Verbosity::Error, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
||||||
}
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
printMsg(Verbosity::Error, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
||||||
} catch (Interrupted & e) {
|
} catch (Interrupted & e) {
|
||||||
printMsg(Verbosity::Error, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
printMsg(lvlError, error + "%1%%2%", (settings.showTrace ? e.prefix() : ""), e.msg());
|
||||||
}
|
}
|
||||||
|
|
||||||
// We handled the current input fully, so we should clear it
|
// We handled the current input fully, so we should clear it
|
||||||
|
|
|
@ -47,7 +47,7 @@ struct CmdCopySigs : StorePathsCommand
|
||||||
//logger->setExpected(doneLabel, storePaths.size());
|
//logger->setExpected(doneLabel, storePaths.size());
|
||||||
|
|
||||||
auto doPath = [&](const Path & storePathS) {
|
auto doPath = [&](const Path & storePathS) {
|
||||||
//Activity act(*logger, Verbosity::Info, format("getting signatures for '%s'") % storePath);
|
//Activity act(*logger, lvlInfo, format("getting signatures for '%s'") % storePath);
|
||||||
|
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
|
|
|
@ -76,12 +76,12 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::Unknown, fmt("downloading '%s'...", store->printStorePath(storePath)));
|
Activity act(*logger, lvlInfo, actUnknown, fmt("downloading '%s'...", store->printStorePath(storePath)));
|
||||||
store->ensurePath(storePath);
|
store->ensurePath(storePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::Unknown, fmt("verifying that '%s' works...", store->printStorePath(storePath)));
|
Activity act(*logger, lvlInfo, actUnknown, fmt("verifying that '%s' works...", store->printStorePath(storePath)));
|
||||||
auto program = store->printStorePath(storePath) + "/bin/nix-env";
|
auto program = store->printStorePath(storePath) + "/bin/nix-env";
|
||||||
auto s = runProgram(program, false, {"--version"});
|
auto s = runProgram(program, false, {"--version"});
|
||||||
if (s.find("Nix") == std::string::npos)
|
if (s.find("Nix") == std::string::npos)
|
||||||
|
@ -91,7 +91,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
||||||
stopProgressBar();
|
stopProgressBar();
|
||||||
|
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::Unknown,
|
Activity act(*logger, lvlInfo, actUnknown,
|
||||||
fmt("installing '%s' into profile '%s'...", store->printStorePath(storePath), profileDir));
|
fmt("installing '%s' into profile '%s'...", store->printStorePath(storePath), profileDir));
|
||||||
runProgram(settings.nixBinDir + "/nix-env", false,
|
runProgram(settings.nixBinDir + "/nix-env", false,
|
||||||
{"--profile", profileDir, "-i", store->printStorePath(storePath), "--no-sandbox"});
|
{"--profile", profileDir, "-i", store->printStorePath(storePath), "--no-sandbox"});
|
||||||
|
@ -142,7 +142,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
||||||
/* Return the store path of the latest stable Nix. */
|
/* Return the store path of the latest stable Nix. */
|
||||||
StorePath getLatestNix(ref<Store> store)
|
StorePath getLatestNix(ref<Store> store)
|
||||||
{
|
{
|
||||||
Activity act(*logger, Verbosity::Info, ActivityType::Unknown, "querying latest Nix version");
|
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
|
||||||
|
|
||||||
// FIXME: use nixos.org?
|
// FIXME: use nixos.org?
|
||||||
auto req = FileTransferRequest(storePathsUrl);
|
auto req = FileTransferRequest(storePathsUrl);
|
||||||
|
|
|
@ -59,7 +59,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
auto publicKeys = getDefaultPublicKeys();
|
auto publicKeys = getDefaultPublicKeys();
|
||||||
|
|
||||||
Activity act(*logger, ActivityType::VerifyPaths);
|
Activity act(*logger, actVerifyPaths);
|
||||||
|
|
||||||
std::atomic<size_t> done{0};
|
std::atomic<size_t> done{0};
|
||||||
std::atomic<size_t> untrusted{0};
|
std::atomic<size_t> untrusted{0};
|
||||||
|
@ -77,7 +77,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
try {
|
try {
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
Activity act2(*logger, Verbosity::Info, ActivityType::Unknown, fmt("checking '%s'", storePath));
|
Activity act2(*logger, lvlInfo, actUnknown, fmt("checking '%s'", storePath));
|
||||||
|
|
||||||
MaintainCount<std::atomic<size_t>> mcActive(active);
|
MaintainCount<std::atomic<size_t>> mcActive(active);
|
||||||
update();
|
update();
|
||||||
|
@ -98,14 +98,14 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
if (hash.first != info->narHash) {
|
if (hash.first != info->narHash) {
|
||||||
corrupted++;
|
corrupted++;
|
||||||
act2.result(ResultType::CorruptedPath, store->printStorePath(info->path));
|
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||||
logError({
|
logError({
|
||||||
.name = "Hash error - path modified",
|
.name = "Hash error - path modified",
|
||||||
.hint = hintfmt(
|
.hint = hintfmt(
|
||||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
store->printStorePath(info->path),
|
store->printStorePath(info->path),
|
||||||
info->narHash.to_string(Base::Base32, true),
|
info->narHash.to_string(Base32, true),
|
||||||
hash.first.to_string(Base::Base32, true))
|
hash.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -153,12 +153,13 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
if (!good) {
|
if (!good) {
|
||||||
untrusted++;
|
untrusted++;
|
||||||
act2.result(ResultType::UntrustedPath, store->printStorePath(info->path));
|
act2.result(resUntrustedPath, store->printStorePath(info->path));
|
||||||
logError({
|
logError({
|
||||||
.name = "Untrusted path",
|
.name = "Untrusted path",
|
||||||
.hint = hintfmt("path '%s' is untrusted",
|
.hint = hintfmt("path '%s' is untrusted",
|
||||||
store->printStorePath(info->path))
|
store->printStorePath(info->path))
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue