From b2748c6e99239ff6803ba0da76c362790c8be192 Mon Sep 17 00:00:00 2001 From: Lucas Franceschino Date: Mon, 25 May 2020 19:07:38 +0200 Subject: [PATCH 01/57] Make `functionArgs` primitive accept primops --- src/libexpr/primops.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0a4236da4..5875457ac 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1346,6 +1346,10 @@ static void prim_catAttrs(EvalState & state, const Pos & pos, Value * * args, Va static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v) { state.forceValue(*args[0], pos); + if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) { + state.mkAttrs(v, 0); + return; + } if (args[0]->type != tLambda) throw TypeError(format("'functionArgs' requires a function, at %1%") % pos); From 2f2ae993dc6d35e9c0e66e893e5d615116d42917 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 4 Aug 2020 19:02:05 +0000 Subject: [PATCH 02/57] WIP systematize more of the worker protocol This refactor should *not* change the wire protocol. --- src/libstore/build.cc | 4 +- src/libstore/daemon.cc | 34 ++++----- src/libstore/derivations.cc | 4 +- src/libstore/export-import.cc | 4 +- src/libstore/legacy-ssh-store.cc | 14 ++-- src/libstore/remote-store.cc | 116 +++++++++++++------------------ src/libstore/store-api.cc | 16 ++--- src/libstore/worker-protocol.hh | 59 ++++++++++------ src/nix-store/nix-store.cc | 16 ++--- 9 files changed, 132 insertions(+), 135 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1f40dc42a..dc636c33f 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1864,11 +1864,11 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - writeStorePaths(worker.store, hook->sink, inputPaths); + write(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ - writeStorePaths(worker.store, hook->sink, missingPaths); + write(worker.store, hook->sink, missingPaths); hook->sink = FdSink(); hook->toHook.writeSide = -1; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 9f7be6e1a..f92d384e5 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -256,11 +256,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryValidPaths: { - auto paths = readStorePaths(*store, from); + auto paths = read(*store, from, Proxy {}); logger->startWork(); auto res = store->queryValidPaths(paths); logger->stopWork(); - writeStorePaths(*store, to, res); + write(*store, to, res); break; } @@ -276,11 +276,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQuerySubstitutablePaths: { - auto paths = readStorePaths(*store, from); + auto paths = read(*store, from, Proxy {}); logger->startWork(); auto res = store->querySubstitutablePaths(paths); logger->stopWork(); - writeStorePaths(*store, to, res); + write(*store, to, res); break; } @@ -309,7 +309,7 @@ static void performOp(TunnelLogger * logger, ref store, paths = store->queryValidDerivers(path); else paths = store->queryDerivationOutputs(path); logger->stopWork(); - writeStorePaths(*store, to, paths); + write(*store, to, paths); break; } @@ -397,7 +397,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopAddTextToStore: { string suffix = readString(from); string s = readString(from); - auto refs = readStorePaths(*store, from); + auto refs = read(*store, from, Proxy {}); logger->startWork(); auto path = store->addTextToStore(suffix, s, refs, NoRepair); logger->stopWork(); @@ -518,7 +518,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = readStorePaths(*store, from); + options.pathsToDelete = read(*store, from, Proxy {}); from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); @@ -587,7 +587,7 @@ static void performOp(TunnelLogger * logger, ref store, else { to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); - writeStorePaths(*store, to, i->second.references); + write(*store, to, i->second.references); to << i->second.downloadSize << i->second.narSize; } @@ -598,11 +598,11 @@ static void performOp(TunnelLogger * logger, ref store, SubstitutablePathInfos infos; StorePathCAMap pathsMap = {}; if (GET_PROTOCOL_MINOR(clientVersion) < 22) { - auto paths = readStorePaths(*store, from); + auto paths = read(*store, from, Proxy {}); for (auto & path : paths) pathsMap.emplace(path, std::nullopt); } else - pathsMap = readStorePathCAMap(*store, from); + pathsMap = read(*store, from, Proxy {}); logger->startWork(); store->querySubstitutablePathInfos(pathsMap, infos); logger->stopWork(); @@ -610,7 +610,7 @@ static void performOp(TunnelLogger * logger, ref store, for (auto & i : infos) { to << store->printStorePath(i.first) << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); - writeStorePaths(*store, to, i.second.references); + write(*store, to, i.second.references); to << i.second.downloadSize << i.second.narSize; } break; @@ -620,7 +620,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto paths = store->queryAllValidPaths(); logger->stopWork(); - writeStorePaths(*store, to, paths); + write(*store, to, paths); break; } @@ -639,7 +639,7 @@ static void performOp(TunnelLogger * logger, ref store, to << 1; to << (info->deriver ? store->printStorePath(*info->deriver) : "") << info->narHash->to_string(Base16, false); - writeStorePaths(*store, to, info->references); + write(*store, to, info->references); to << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { to << info->ultimate @@ -699,7 +699,7 @@ static void performOp(TunnelLogger * logger, ref store, if (deriver != "") info.deriver = store->parseStorePath(deriver); info.narHash = Hash(readString(from), htSHA256); - info.references = readStorePaths(*store, from); + info.references = read(*store, from, Proxy {}); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); info.ca = parseContentAddressOpt(readString(from)); @@ -799,9 +799,9 @@ static void performOp(TunnelLogger * logger, ref store, uint64_t downloadSize, narSize; store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); logger->stopWork(); - writeStorePaths(*store, to, willBuild); - writeStorePaths(*store, to, willSubstitute); - writeStorePaths(*store, to, unknown); + write(*store, to, willBuild); + write(*store, to, willSubstitute); + write(*store, to, unknown); to << downloadSize << narSize; break; } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 7d0a5abeb..5972b5ad2 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -477,7 +477,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = readStorePaths(store, in); + drv.inputSrcs = read(store, in, Proxy {}); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -505,7 +505,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr out << "" << ""; } } - writeStorePaths(store, out, drv.inputSrcs); + write(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index a0fc22264..c20c56156 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) teeSink << exportMagic << printStorePath(path); - writeStorePaths(*this, teeSink, info->references); + write(*this, teeSink, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; @@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); - info.references = readStorePaths(*this, source); + info.references = read(*this, source, Proxy {}); auto deriver = readString(source); if (deriver != "") diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 5d7566121..412e1950b 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -107,7 +107,7 @@ struct LegacySSHStore : public Store auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = readStorePaths(*this, conn->from); + info->references = read(*this, conn->from, Proxy {}); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -139,7 +139,7 @@ struct LegacySSHStore : public Store << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash->to_string(Base16, false); - writeStorePaths(*this, conn->to, info.references); + write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize @@ -168,7 +168,7 @@ struct LegacySSHStore : public Store conn->to << exportMagic << printStorePath(info.path); - writeStorePaths(*this, conn->to, info.references); + write(*this, conn->to, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -251,10 +251,10 @@ struct LegacySSHStore : public Store conn->to << cmdQueryClosure << includeOutputs; - writeStorePaths(*this, conn->to, paths); + write(*this, conn->to, paths); conn->to.flush(); - for (auto & i : readStorePaths(*this, conn->from)) + for (auto & i : read(*this, conn->from, Proxy {})) out.insert(i); } @@ -267,10 +267,10 @@ struct LegacySSHStore : public Store << cmdQueryValidPaths << false // lock << maybeSubstitute; - writeStorePaths(*this, conn->to, paths); + write(*this, conn->to, paths); conn->to.flush(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } void connect() override diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 1200ab200..de50b3e2e 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -23,66 +23,44 @@ namespace nix { -template<> StorePathSet readStorePaths(const Store & store, Source & from) -{ - StorePathSet paths; - for (auto & i : readStrings(from)) - paths.insert(store.parseStorePath(i)); - return paths; -} - - -void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths) +void write(const Store & store, Sink & out, const StorePathSet & paths) { out << paths.size(); for (auto & i : paths) out << store.printStorePath(i); } + +std::string read(const Store & store, Source & from, Proxy _) +{ + return readString(from); +} + +void write(const Store & store, Sink & out, const std::string & str) +{ + out << str; +} + + StorePath read(const Store & store, Source & from, Proxy _) { - auto path = readString(from); - return store.parseStorePath(path); + return store.parseStorePath(readString(from)); } -StorePathCAMap readStorePathCAMap(const Store & store, Source & from) -{ - StorePathCAMap paths; - auto count = readNum(from); - while (count--) - paths.insert_or_assign(store.parseStorePath(readString(from)), parseContentAddressOpt(readString(from))); - return paths; -} - -void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths) -{ - out << paths.size(); - for (auto & i : paths) { - out << store.printStorePath(i.first); - out << renderContentAddress(i.second); - } -} - -std::map readOutputPathMap(const Store & store, Source & from) -{ - std::map pathMap; - auto rawInput = readStrings(from); - if (rawInput.size() % 2) - throw Error("got an odd number of elements from the daemon when trying to read a output path map"); - auto curInput = rawInput.begin(); - while (curInput != rawInput.end()) { - auto thisKey = *curInput++; - auto thisValue = *curInput++; - pathMap.emplace(thisKey, store.parseStorePath(thisValue)); - } - return pathMap; -} - - void write(const Store & store, Sink & out, const StorePath & storePath) { - auto path = store.printStorePath(storePath); - out << path; + out << store.printStorePath(storePath); +} + + +ContentAddress read(const Store & store, Source & from, Proxy _) +{ + return parseContentAddress(readString(from)); +} + +void write(const Store & store, Sink & out, const ContentAddress & ca) +{ + out << renderContentAddress(ca); } @@ -319,9 +297,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute return res; } else { conn->to << wopQueryValidPaths; - writeStorePaths(*this, conn->to, paths); + write(*this, conn->to, paths); conn.processStderr(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } } @@ -331,7 +309,7 @@ StorePathSet RemoteStore::queryAllValidPaths() auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } @@ -348,9 +326,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) return res; } else { conn->to << wopQuerySubstitutablePaths; - writeStorePaths(*this, conn->to, paths); + write(*this, conn->to, paths); conn.processStderr(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } } @@ -372,7 +350,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = readStorePaths(*this, conn->from); + info.references = read(*this, conn->from, Proxy {}); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); infos.insert_or_assign(i.first, std::move(info)); @@ -385,9 +363,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S StorePathSet paths; for (auto & path : pathsMap) paths.insert(path.first); - writeStorePaths(*this, conn->to, paths); + write(*this, conn->to, paths); } else - writeStorePathCAMap(*this, conn->to, pathsMap); + write(*this, conn->to, pathsMap); conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { @@ -395,7 +373,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = readStorePaths(*this, conn->from); + info.references = read(*this, conn->from, Proxy {}); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } @@ -428,7 +406,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path, auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); info->narHash = Hash(readString(conn->from), htSHA256); - info->references = readStorePaths(*this, conn->from); + info->references = read(*this, conn->from, Proxy {}); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { conn->from >> info->ultimate; @@ -447,7 +425,7 @@ void RemoteStore::queryReferrers(const StorePath & path, auto conn(getConnection()); conn->to << wopQueryReferrers << printStorePath(path); conn.processStderr(); - for (auto & i : readStorePaths(*this, conn->from)) + for (auto & i : read(*this, conn->from, Proxy {})) referrers.insert(i); } @@ -457,7 +435,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) auto conn(getConnection()); conn->to << wopQueryValidDerivers << printStorePath(path); conn.processStderr(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } @@ -469,7 +447,7 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) } conn->to << wopQueryDerivationOutputs << printStorePath(path); conn.processStderr(); - return readStorePaths(*this, conn->from); + return read(*this, conn->from, Proxy {}); } @@ -508,7 +486,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, sink << exportMagic << printStorePath(info.path); - writeStorePaths(*this, sink, info.references); + write(*this, sink, info.references); sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature @@ -518,7 +496,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn.processStderr(0, source2.get()); - auto importedPaths = readStorePaths(*this, conn->from); + auto importedPaths = read(*this, conn->from, Proxy {}); assert(importedPaths.size() <= 1); } @@ -527,7 +505,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash->to_string(Base16, false); - writeStorePaths(*this, conn->to, info.references); + write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; @@ -660,7 +638,7 @@ StorePath RemoteStore::addTextToStore(const string & name, const string & s, auto conn(getConnection()); conn->to << wopAddTextToStore << name << s; - writeStorePaths(*this, conn->to, references); + write(*this, conn->to, references); conn.processStderr(); return parseStorePath(readString(conn->from)); @@ -762,7 +740,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) conn->to << wopCollectGarbage << options.action; - writeStorePaths(*this, conn->to, options.pathsToDelete); + write(*this, conn->to, options.pathsToDelete); conn->to << options.ignoreLiveness << options.maxFreed /* removed options */ @@ -824,9 +802,9 @@ void RemoteStore::queryMissing(const std::vector & targets ss.push_back(p.to_string(*this)); conn->to << ss; conn.processStderr(); - willBuild = readStorePaths(*this, conn->from); - willSubstitute = readStorePaths(*this, conn->from); - unknown = readStorePaths(*this, conn->from); + willBuild = read(*this, conn->from, Proxy {}); + willSubstitute = read(*this, conn->from, Proxy {}); + unknown = read(*this, conn->from, Proxy {}); conn->from >> downloadSize >> narSize; return; } diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 4c68709ef..e894d2b85 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -362,14 +362,14 @@ bool Store::PathInfoCacheValue::isKnownNow() } OutputPathMap Store::queryDerivationOutputMapAssumeTotal(const StorePath & path) { - auto resp = queryDerivationOutputMap(path); - OutputPathMap result; - for (auto & [outName, optOutPath] : resp) { - if (!optOutPath) - throw Error("output '%s' has no store path mapped to it", outName); - result.insert_or_assign(outName, *optOutPath); - } - return result; + auto resp = queryDerivationOutputMap(path); + OutputPathMap result; + for (auto & [outName, optOutPath] : resp) { + if (!optOutPath) + throw Error("output '%s' has no store path mapped to it", outName); + result.insert_or_assign(outName, *optOutPath); + } + return result; } StorePathSet Store::queryDerivationOutputs(const StorePath & path) diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index ad5854c85..08eec9b48 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -66,33 +66,50 @@ typedef enum { class Store; struct Source; -template T readStorePaths(const Store & store, Source & from); - -void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths); - /* To guide overloading */ template struct Proxy {}; template -std::map read(const Store & store, Source & from, Proxy> _) +std::set read(const Store & store, Source & from, Proxy> _) { - std::map resMap; - auto size = (size_t)readInt(from); + std::set resSet; + auto size = readNum(from); while (size--) { - auto thisKey = readString(from); - resMap.insert_or_assign(std::move(thisKey), read(store, from, Proxy {})); + resSet.insert(read(store, from, Proxy {})); + } + return resSet; +} + +template +void write(const Store & store, Sink & out, const std::set & resSet) +{ + out << resSet.size(); + for (auto & key : resSet) { + write(store, out, key); + } +} + +template +std::map read(const Store & store, Source & from, Proxy> _) +{ + std::map resMap; + auto size = readNum(from); + while (size--) { + resMap.insert_or_assign( + read(store, from, Proxy {}), + read(store, from, Proxy {})); } return resMap; } -template -void write(const Store & store, Sink & out, const std::map & resMap) +template +void write(const Store & store, Sink & out, const std::map & resMap) { out << resMap.size(); - for (auto & i : resMap) { - out << i.first; - write(store, out, i.second); + for (auto & [key, value] : resMap) { + write(store, out, key); + write(store, out, value); } } @@ -106,26 +123,28 @@ std::optional read(const Store & store, Source & from, Proxy case 1: return read(store, from, Proxy {}); default: - throw Error("got an invalid tag bit for std::optional: %#04x", tag); + throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); } } template void write(const Store & store, Sink & out, const std::optional & optVal) { - out << (optVal ? 1 : 0); + out << (uint64_t) (optVal ? 1 : 0); if (optVal) write(store, out, *optVal); } +std::string read(const Store & store, Source & from, Proxy _); + +void write(const Store & store, Sink & out, const std::string & str); + StorePath read(const Store & store, Source & from, Proxy _); void write(const Store & store, Sink & out, const StorePath & storePath); -StorePathCAMap readStorePathCAMap(const Store & store, Source & from); +ContentAddress read(const Store & store, Source & from, Proxy _); -void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths); - -void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths); +void write(const Store & store, Sink & out, const ContentAddress & ca); } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 7b26970ef..a1fb921ef 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -815,7 +815,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = readStorePaths(*store, in); + auto paths = read(*store, in, Proxy {}); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -845,19 +845,19 @@ static void opServe(Strings opFlags, Strings opArgs) } } - writeStorePaths(*store, out, store->queryValidPaths(paths)); + write(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - auto paths = readStorePaths(*store, in); + auto paths = read(*store, in, Proxy {}); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - writeStorePaths(*store, out, info->references); + write(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -885,7 +885,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(readStorePaths(*store, in), out); + store->exportPaths(read(*store, in, Proxy {}), out); break; } @@ -934,9 +934,9 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(readStorePaths(*store, in), + store->computeFSClosure(read(*store, in, Proxy {}), closure, false, includeOutputs); - writeStorePaths(*store, out, closure); + write(*store, out, closure); break; } @@ -949,7 +949,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (deriver != "") info.deriver = store->parseStorePath(deriver); info.narHash = Hash(readString(in), htSHA256); - info.references = readStorePaths(*store, in); + info.references = read(*store, in, Proxy {}); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = parseContentAddressOpt(readString(in)); From 1bab8a321f246c1c202268851b0e706ff5030d2e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 4 Aug 2020 21:56:42 +0000 Subject: [PATCH 03/57] Remove unneeded definition Template instantiations will cover this case fine. --- src/libstore/remote-store.cc | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index de50b3e2e..5de8f95a7 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -23,14 +23,6 @@ namespace nix { -void write(const Store & store, Sink & out, const StorePathSet & paths) -{ - out << paths.size(); - for (auto & i : paths) - out << store.printStorePath(i); -} - - std::string read(const Store & store, Source & from, Proxy _) { return readString(from); From 3d8240c32eedd0809450be52e4ac7625ffdad9aa Mon Sep 17 00:00:00 2001 From: Carlo Nucera Date: Thu, 6 Aug 2020 16:04:18 -0400 Subject: [PATCH 04/57] Remove leftover commented code --- src/libstore/worker-protocol.hh | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index b4c260e26..3bb27ab22 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -136,7 +136,6 @@ void write(const Store & store, Sink & out, const std::map & resMap) { out << resMap.size(); for (auto & i : resMap) { - // out << i.first; write(store, out, i.first); write(store, out, i.second); } @@ -150,7 +149,6 @@ std::optional read(const Store & store, Source & from, Phantom {}); return read(store, from, Phantom {}); default: throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); From 9ab07e99f527d1fa3adfa02839da477a1528d64b Mon Sep 17 00:00:00 2001 From: Carlo Nucera Date: Thu, 6 Aug 2020 18:04:13 -0400 Subject: [PATCH 05/57] Use template structs instead of phantoms --- src/libstore/build.cc | 4 +- src/libstore/daemon.cc | 36 +++---- src/libstore/derivations.cc | 4 +- src/libstore/export-import.cc | 4 +- src/libstore/legacy-ssh-store.cc | 14 +-- src/libstore/remote-store.cc | 61 ++++++------ src/libstore/worker-protocol.hh | 165 +++++++++++++++---------------- src/nix-store/nix-store.cc | 16 +-- 8 files changed, 148 insertions(+), 156 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 8340828e7..e1c360338 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1864,11 +1864,11 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - nix::worker_proto::write(worker.store, hook->sink, inputPaths); + WorkerProto::write(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ - nix::worker_proto::write(worker.store, hook->sink, missingPaths); + WorkerProto::write(worker.store, hook->sink, missingPaths); hook->sink = FdSink(); hook->toHook.writeSide = -1; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 6d734abdc..148bd5cc9 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -256,11 +256,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryValidPaths: { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); logger->startWork(); auto res = store->queryValidPaths(paths); logger->stopWork(); - nix::worker_proto::write(*store, to, res); + WorkerProto::write(*store, to, res); break; } @@ -276,11 +276,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQuerySubstitutablePaths: { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); logger->startWork(); auto res = store->querySubstitutablePaths(paths); logger->stopWork(); - nix::worker_proto::write(*store, to, res); + WorkerProto::write(*store, to, res); break; } @@ -309,7 +309,7 @@ static void performOp(TunnelLogger * logger, ref store, paths = store->queryValidDerivers(path); else paths = store->queryDerivationOutputs(path); logger->stopWork(); - nix::worker_proto::write(*store, to, paths); + WorkerProto::write(*store, to, paths); break; } @@ -327,7 +327,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto outputs = store->queryDerivationOutputMap(path); logger->stopWork(); - nix::worker_proto::write(*store, to, outputs); + WorkerProto>>::write(*store, to, outputs); break; } @@ -397,7 +397,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopAddTextToStore: { string suffix = readString(from); string s = readString(from); - auto refs = nix::worker_proto::read(*store, from, Phantom {}); + auto refs = WorkerProto::read(*store, from); logger->startWork(); auto path = store->addTextToStore(suffix, s, refs, NoRepair); logger->stopWork(); @@ -518,7 +518,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = nix::worker_proto::read(*store, from, Phantom {}); + options.pathsToDelete = WorkerProto::read(*store, from); from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); @@ -587,7 +587,7 @@ static void performOp(TunnelLogger * logger, ref store, else { to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); - nix::worker_proto::write(*store, to, i->second.references); + WorkerProto::write(*store, to, i->second.references); to << i->second.downloadSize << i->second.narSize; } @@ -598,11 +598,11 @@ static void performOp(TunnelLogger * logger, ref store, SubstitutablePathInfos infos; StorePathCAMap pathsMap = {}; if (GET_PROTOCOL_MINOR(clientVersion) < 22) { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); for (auto & path : paths) pathsMap.emplace(path, std::nullopt); } else - pathsMap = nix::worker_proto::read(*store, from, Phantom {}); + pathsMap = WorkerProto::read(*store, from); logger->startWork(); store->querySubstitutablePathInfos(pathsMap, infos); logger->stopWork(); @@ -610,7 +610,7 @@ static void performOp(TunnelLogger * logger, ref store, for (auto & i : infos) { to << store->printStorePath(i.first) << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); - nix::worker_proto::write(*store, to, i.second.references); + WorkerProto::write(*store, to, i.second.references); to << i.second.downloadSize << i.second.narSize; } break; @@ -620,7 +620,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto paths = store->queryAllValidPaths(); logger->stopWork(); - nix::worker_proto::write(*store, to, paths); + WorkerProto::write(*store, to, paths); break; } @@ -639,7 +639,7 @@ static void performOp(TunnelLogger * logger, ref store, to << 1; to << (info->deriver ? store->printStorePath(*info->deriver) : "") << info->narHash->to_string(Base16, false); - nix::worker_proto::write(*store, to, info->references); + WorkerProto::write(*store, to, info->references); to << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { to << info->ultimate @@ -699,7 +699,7 @@ static void performOp(TunnelLogger * logger, ref store, if (deriver != "") info.deriver = store->parseStorePath(deriver); info.narHash = Hash::parseAny(readString(from), htSHA256); - info.references = nix::worker_proto::read(*store, from, Phantom {}); + info.references = WorkerProto::read(*store, from); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); info.ca = parseContentAddressOpt(readString(from)); @@ -799,9 +799,9 @@ static void performOp(TunnelLogger * logger, ref store, uint64_t downloadSize, narSize; store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); logger->stopWork(); - nix::worker_proto::write(*store, to, willBuild); - nix::worker_proto::write(*store, to, willSubstitute); - nix::worker_proto::write(*store, to, unknown); + WorkerProto::write(*store, to, willBuild); + WorkerProto::write(*store, to, willSubstitute); + WorkerProto::write(*store, to, unknown); to << downloadSize << narSize; break; } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index b7aef9d65..bf2758ae5 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -605,7 +605,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = nix::worker_proto::read(store, in, Phantom {}); + drv.inputSrcs = WorkerProto::read(store, in); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -640,7 +640,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, }, i.second.output); } - nix::worker_proto::write(store, out, drv.inputSrcs); + WorkerProto::write(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 7551294f7..e2e52b0af 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) teeSink << exportMagic << printStorePath(path); - nix::worker_proto::write(*this, teeSink, info->references); + WorkerProto::write(*this, teeSink, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; @@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); - info.references = nix::worker_proto::read(*this, source, Phantom {}); + info.references = WorkerProto::read(*this, source); auto deriver = readString(source); if (deriver != "") diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 0951610d3..ee07b7156 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -107,7 +107,7 @@ struct LegacySSHStore : public Store auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = nix::worker_proto::read(*this, conn->from, Phantom {}); + info->references = WorkerProto::read(*this, conn->from); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -139,7 +139,7 @@ struct LegacySSHStore : public Store << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash->to_string(Base16, false); - nix::worker_proto::write(*this, conn->to, info.references); + WorkerProto::write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize @@ -168,7 +168,7 @@ struct LegacySSHStore : public Store conn->to << exportMagic << printStorePath(info.path); - nix::worker_proto::write(*this, conn->to, info.references); + WorkerProto::write(*this, conn->to, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -251,10 +251,10 @@ struct LegacySSHStore : public Store conn->to << cmdQueryClosure << includeOutputs; - nix::worker_proto::write(*this, conn->to, paths); + WorkerProto::write(*this, conn->to, paths); conn->to.flush(); - for (auto & i : nix::worker_proto::read(*this, conn->from, Phantom {})) + for (auto & i : WorkerProto::read(*this, conn->from)) out.insert(i); } @@ -267,10 +267,10 @@ struct LegacySSHStore : public Store << cmdQueryValidPaths << false // lock << maybeSubstitute; - nix::worker_proto::write(*this, conn->to, paths); + WorkerProto::write(*this, conn->to, paths); conn->to.flush(); - return nix::worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } void connect() override diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index ac776b95a..48450eb67 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -22,40 +22,36 @@ namespace nix { -namespace worker_proto { - -std::string read(const Store & store, Source & from, Phantom _) +std::string WorkerProto::read(const Store & store, Source & from) { return readString(from); } -void write(const Store & store, Sink & out, const std::string & str) +void WorkerProto::write(const Store & store, Sink & out, const std::string & str) { out << str; } -StorePath read(const Store & store, Source & from, Phantom _) +StorePath WorkerProto::read(const Store & store, Source & from) { return store.parseStorePath(readString(from)); } -void write(const Store & store, Sink & out, const StorePath & storePath) +void WorkerProto::write(const Store & store, Sink & out, const StorePath & storePath) { out << store.printStorePath(storePath); } -ContentAddress read(const Store & store, Source & from, Phantom _) +ContentAddress WorkerProto::read(const Store & store, Source & from) { return parseContentAddress(readString(from)); } -void write(const Store & store, Sink & out, const ContentAddress & ca) +void WorkerProto::write(const Store & store, Sink & out, const ContentAddress & ca) { out << renderContentAddress(ca); } -} - /* TODO: Separate these store impls into different files, give them better names */ RemoteStore::RemoteStore(const Params & params) @@ -290,9 +286,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute return res; } else { conn->to << wopQueryValidPaths; - nix::worker_proto::write(*this, conn->to, paths); + WorkerProto::write(*this, conn->to, paths); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } } @@ -302,7 +298,7 @@ StorePathSet RemoteStore::queryAllValidPaths() auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return nix::worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -319,9 +315,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) return res; } else { conn->to << wopQuerySubstitutablePaths; - nix::worker_proto::write(*this, conn->to, paths); + WorkerProto::write(*this, conn->to, paths); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } } @@ -343,7 +339,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = worker_proto::read(*this, conn->from, Phantom {}); + info.references = WorkerProto::read(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); infos.insert_or_assign(i.first, std::move(info)); @@ -356,9 +352,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S StorePathSet paths; for (auto & path : pathsMap) paths.insert(path.first); - worker_proto::write(*this, conn->to, paths); + WorkerProto::write(*this, conn->to, paths); } else - worker_proto::write(*this, conn->to, pathsMap); + WorkerProto::write(*this, conn->to, pathsMap); conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { @@ -366,7 +362,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = worker_proto::read(*this, conn->from, Phantom {}); + info.references = WorkerProto::read(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } @@ -399,7 +395,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path, auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); info->narHash = Hash::parseAny(readString(conn->from), htSHA256); - info->references = worker_proto::read(*this, conn->from, Phantom {}); + info->references = WorkerProto::read(*this, conn->from); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { conn->from >> info->ultimate; @@ -418,7 +414,7 @@ void RemoteStore::queryReferrers(const StorePath & path, auto conn(getConnection()); conn->to << wopQueryReferrers << printStorePath(path); conn.processStderr(); - for (auto & i : worker_proto::read(*this, conn->from, Phantom {})) + for (auto & i : WorkerProto::read(*this, conn->from)) referrers.insert(i); } @@ -428,7 +424,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) auto conn(getConnection()); conn->to << wopQueryValidDerivers << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -440,7 +436,7 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) } conn->to << wopQueryDerivationOutputs << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -449,8 +445,7 @@ std::map> RemoteStore::queryDerivationOutp auto conn(getConnection()); conn->to << wopQueryDerivationOutputMap << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom>> {}); - + return WorkerProto>>::read(*this, conn->from); } std::optional RemoteStore::queryPathFromHashPart(const std::string & hashPart) @@ -479,7 +474,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, sink << exportMagic << printStorePath(info.path); - worker_proto::write(*this, sink, info.references); + WorkerProto::write(*this, sink, info.references); sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature @@ -489,7 +484,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn.processStderr(0, source2.get()); - auto importedPaths = worker_proto::read(*this, conn->from, Phantom {}); + auto importedPaths = WorkerProto::read(*this, conn->from); assert(importedPaths.size() <= 1); } @@ -498,7 +493,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash->to_string(Base16, false); - worker_proto::write(*this, conn->to, info.references); + WorkerProto::write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; @@ -631,7 +626,7 @@ StorePath RemoteStore::addTextToStore(const string & name, const string & s, auto conn(getConnection()); conn->to << wopAddTextToStore << name << s; - worker_proto::write(*this, conn->to, references); + WorkerProto::write(*this, conn->to, references); conn.processStderr(); return parseStorePath(readString(conn->from)); @@ -733,7 +728,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) conn->to << wopCollectGarbage << options.action; - worker_proto::write(*this, conn->to, options.pathsToDelete); + WorkerProto::write(*this, conn->to, options.pathsToDelete); conn->to << options.ignoreLiveness << options.maxFreed /* removed options */ @@ -795,9 +790,9 @@ void RemoteStore::queryMissing(const std::vector & targets ss.push_back(p.to_string(*this)); conn->to << ss; conn.processStderr(); - willBuild = worker_proto::read(*this, conn->from, Phantom {}); - willSubstitute = worker_proto::read(*this, conn->from, Phantom {}); - unknown = worker_proto::read(*this, conn->from, Phantom {}); + willBuild = WorkerProto::read(*this, conn->from); + willSubstitute = WorkerProto::read(*this, conn->from); + unknown = WorkerProto::read(*this, conn->from); conn->from >> downloadSize >> narSize; return; } diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 3bb27ab22..60543d626 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -66,105 +66,102 @@ typedef enum { class Store; struct Source; -/* To guide overloading */ template -struct Phantom {}; +struct WorkerProto { + static T read(const Store & store, Source & from); + static void write(const Store & store, Sink & out, const T & t); +}; +template<> +struct WorkerProto { + static std::string read(const Store & store, Source & from); + static void write(const Store & store, Sink & out, const std::string & t); +}; -namespace worker_proto { -/* FIXME maybe move more stuff inside here */ +template<> +struct WorkerProto { + static StorePath read(const Store & store, Source & from); + static void write(const Store & store, Sink & out, const StorePath & t); +}; -std::string read(const Store & store, Source & from, Phantom _); -void write(const Store & store, Sink & out, const std::string & str); - -StorePath read(const Store & store, Source & from, Phantom _); -void write(const Store & store, Sink & out, const StorePath & storePath); - -ContentAddress read(const Store & store, Source & from, Phantom _); -void write(const Store & store, Sink & out, const ContentAddress & ca); +template<> +struct WorkerProto { + static ContentAddress read(const Store & store, Source & from); + static void write(const Store & store, Sink & out, const ContentAddress & t); +}; template -std::set read(const Store & store, Source & from, Phantom> _); -template -void write(const Store & store, Sink & out, const std::set & resSet); +struct WorkerProto> { + + static std::set read(const Store & store, Source & from) + { + std::set resSet; + auto size = readNum(from); + while (size--) { + resSet.insert(WorkerProto::read(store, from)); + } + return resSet; + } + + static void write(const Store & store, Sink & out, const std::set & resSet) + { + out << resSet.size(); + for (auto & key : resSet) { + WorkerProto::write(store, out, key); + } + } + +}; template -std::map read(const Store & store, Source & from, Phantom> _); -template -void write(const Store & store, Sink & out, const std::map & resMap); +struct WorkerProto> { -template -std::optional read(const Store & store, Source & from, Phantom> _); -template -void write(const Store & store, Sink & out, const std::optional & optVal); - -template -std::set read(const Store & store, Source & from, Phantom> _) -{ - std::set resSet; - auto size = readNum(from); - while (size--) { - resSet.insert(read(store, from, Phantom {})); + static std::map read(const Store & store, Source & from) + { + std::map resMap; + auto size = readNum(from); + while (size--) { + resMap.insert_or_assign( + WorkerProto::read(store, from), + WorkerProto::read(store, from)); + } + return resMap; } - return resSet; -} + + static void write(const Store & store, Sink & out, const std::map & resMap) + { + out << resMap.size(); + for (auto & i : resMap) { + WorkerProto::write(store, out, i.first); + WorkerProto::write(store, out, i.second); + } + } + +}; template -void write(const Store & store, Sink & out, const std::set & resSet) -{ - out << resSet.size(); - for (auto & key : resSet) { - write(store, out, key); +struct WorkerProto> { + + static std::optional read(const Store & store, Source & from) + { + auto tag = readNum(from); + switch (tag) { + case 0: + return std::nullopt; + case 1: + return WorkerProto::read(store, from); + default: + throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); + } } -} -template -std::map read(const Store & store, Source & from, Phantom> _) -{ - std::map resMap; - auto size = readNum(from); - while (size--) { - resMap.insert_or_assign( - read(store, from, Phantom {}), - read(store, from, Phantom {})); + static void write(const Store & store, Sink & out, const std::optional & optVal) + { + out << (uint64_t) (optVal ? 1 : 0); + if (optVal) + WorkerProto::write(store, out, *optVal); } - return resMap; -} - -template -void write(const Store & store, Sink & out, const std::map & resMap) -{ - out << resMap.size(); - for (auto & i : resMap) { - write(store, out, i.first); - write(store, out, i.second); - } -} - -template -std::optional read(const Store & store, Source & from, Phantom> _) -{ - auto tag = readNum(from); - switch (tag) { - case 0: - return std::nullopt; - case 1: - return read(store, from, Phantom {}); - default: - throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); - } -} - -template -void write(const Store & store, Sink & out, const std::optional & optVal) -{ - out << (uint64_t) (optVal ? 1 : 0); - if (optVal) - nix::worker_proto::write(store, out, *optVal); -} - - -} +}; } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index a0007f1c4..e59009d11 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -815,7 +815,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = nix::worker_proto::read(*store, in, Phantom {}); + auto paths = WorkerProto::read(*store, in); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -845,19 +845,19 @@ static void opServe(Strings opFlags, Strings opArgs) } } - nix::worker_proto::write(*store, out, store->queryValidPaths(paths)); + WorkerProto::write(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - auto paths = nix::worker_proto::read(*store, in, Phantom {}); + auto paths = WorkerProto::read(*store, in); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - nix::worker_proto::write(*store, out, info->references); + WorkerProto::write(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -885,7 +885,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(nix::worker_proto::read(*store, in, Phantom {}), out); + store->exportPaths(WorkerProto::read(*store, in), out); break; } @@ -934,9 +934,9 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(nix::worker_proto::read(*store, in, Phantom {}), + store->computeFSClosure(WorkerProto::read(*store, in), closure, false, includeOutputs); - nix::worker_proto::write(*store, out, closure); + WorkerProto::write(*store, out, closure); break; } @@ -949,7 +949,7 @@ static void opServe(Strings opFlags, Strings opArgs) if (deriver != "") info.deriver = store->parseStorePath(deriver); info.narHash = Hash::parseAny(readString(in), htSHA256); - info.references = nix::worker_proto::read(*store, in, Phantom {}); + info.references = WorkerProto::read(*store, in); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = parseContentAddressOpt(readString(in)); From 46f9dd56da7d2af82148c47e40108f3c11ffe4d7 Mon Sep 17 00:00:00 2001 From: Carlo Nucera Date: Thu, 6 Aug 2020 19:30:05 -0400 Subject: [PATCH 06/57] Fix bug due to non-deterministic arg eval order --- src/libstore/worker-protocol.hh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 60543d626..1e8fd027c 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -121,9 +121,9 @@ struct WorkerProto> { std::map resMap; auto size = readNum(from); while (size--) { - resMap.insert_or_assign( - WorkerProto::read(store, from), - WorkerProto::read(store, from)); + auto k = WorkerProto::read(store, from); + auto v = WorkerProto::read(store, from); + resMap.insert_or_assign(std::move(k), std::move(v)); } return resMap; } From 59979e705352abb1624d3427c2c7145ed43b1b84 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 24 Aug 2020 18:10:58 +0000 Subject: [PATCH 07/57] Fix bad debug format string --- src/libstore/build.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index ba28e78c8..6baaa31d9 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -3850,7 +3850,7 @@ void DerivationGoal::registerOutputs() something like that. */ canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen); - debug("scanning for references for output %1 in temp location '%1%'", outputName, actualPath); + debug("scanning for references for output '%s' in temp location '%s'", outputName, actualPath); /* Pass blank Sink as we are not ready to hash data at this stage. */ NullSink blank; From e0b0e18905835fdc8ccbbf1c0f5d016d9f466187 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 23 Aug 2020 15:27:30 +0000 Subject: [PATCH 08/57] Add constructor for BasicDerivation -> Derivation --- src/libstore/derivations.hh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index b09480e1e..2ea4178c0 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -100,7 +100,7 @@ struct BasicDerivation StringPairs env; std::string name; - BasicDerivation() { } + BasicDerivation() = default; virtual ~BasicDerivation() { }; bool isBuiltin() const; @@ -127,7 +127,8 @@ struct Derivation : BasicDerivation std::string unparse(const Store & store, bool maskOutputs, std::map * actualInputs = nullptr) const; - Derivation() { } + Derivation() = default; + Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } }; From 8eb73a87245acf9d93dc401831b629981864fa58 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 22 Aug 2020 20:44:47 +0000 Subject: [PATCH 09/57] CA derivations that depend on other CA derivations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Théophane Hufschmitt --- src/libstore/build.cc | 51 +++++++++++++++++++++++++++++++---- src/libstore/derivations.cc | 53 +++++++++++++++++++++++++++++++++++++ src/libstore/derivations.hh | 18 +++++++++++++ src/libstore/local-store.cc | 27 +++++++++++++++++-- tests/content-addressed.nix | 48 ++++++++++++++++++++++++--------- tests/content-addressed.sh | 20 +++++++------- 6 files changed, 189 insertions(+), 28 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 6baaa31d9..f5256bf87 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -984,6 +984,8 @@ private: void tryLocalBuild(); void buildDone(); + void resolvedFinished(); + /* Is the build hook willing to perform the build? */ HookReply tryBuildHook(); @@ -1451,8 +1453,39 @@ void DerivationGoal::inputsRealised() /* Determine the full set of input paths. */ /* First, the input derivations. */ - if (useDerivation) - for (auto & [depDrvPath, wantedDepOutputs] : dynamic_cast(drv.get())->inputDrvs) { + if (useDerivation) { + auto & fullDrv = *dynamic_cast(drv.get()); + + if (!fullDrv.inputDrvs.empty() && fullDrv.type() == DerivationType::CAFloating) { + /* We are be able to resolve this derivation based on the + now-known results of dependencies. If so, we become a stub goal + aliasing that resolved derivation goal */ + Derivation drvResolved { fullDrv.resolve(worker.store) }; + + auto pathResolved = writeDerivation(worker.store, drvResolved); + /* Add to memotable to speed up downstream goal's queries with the + original derivation. */ + drvPathResolutions.lock()->insert_or_assign(drvPath, pathResolved); + + auto msg = fmt("Resolved derivation: '%s' -> '%s'", + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved)); + act = std::make_unique(*logger, lvlInfo, actBuildWaiting, msg, + Logger::Fields { + worker.store.printStorePath(drvPath), + worker.store.printStorePath(pathResolved), + }); + + auto resolvedGoal = worker.makeDerivationGoal( + pathResolved, wantedOutputs, + buildMode == bmRepair ? bmRepair : bmNormal); + addWaitee(resolvedGoal); + + state = &DerivationGoal::resolvedFinished; + return; + } + + for (auto & [depDrvPath, wantedDepOutputs] : fullDrv.inputDrvs) { /* Add the relevant output closures of the input derivation `i' as input paths. Only add the closures of output paths that are specified as inputs. */ @@ -1472,6 +1505,7 @@ void DerivationGoal::inputsRealised() worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath)); } } + } /* Second, the input sources. */ worker.store.computeFSClosure(drv->inputSrcs, inputPaths); @@ -1893,6 +1927,9 @@ void DerivationGoal::buildDone() done(BuildResult::Built); } +void DerivationGoal::resolvedFinished() { + done(BuildResult::Built); +} HookReply DerivationGoal::tryBuildHook() { @@ -2065,7 +2102,7 @@ void linkOrCopy(const Path & from, const Path & to) file (e.g. 32000 of ext3), which is quite possible after a 'nix-store --optimise'. FIXME: actually, why don't we just bind-mount in this case? - + It can also fail with EPERM in BeegFS v7 and earlier versions which don't allow hard-links to other directories */ if (errno != EMLINK && errno != EPERM) @@ -4248,10 +4285,14 @@ void DerivationGoal::registerOutputs() { ValidPathInfos infos2; for (auto & [outputName, newInfo] : infos) { - /* FIXME: we will want to track this mapping in the DB whether or - not we have a drv file. */ if (useDerivation) worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); + else { + /* Once a floating CA derivations reaches this point, it must + already be resolved, drvPath the basic derivation path, and + a file existsing at that path for sake of the DB's foreign key. */ + assert(drv->type() != DerivationType::CAFloating); + } infos2.push_back(newInfo); } worker.store.registerValidPaths(infos2); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 34541227b..d96d4083d 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -672,4 +672,57 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath return "/" + hashString(htSHA256, clearText).to_string(Base32, false); } + +// N.B. Outputs are left unchanged +static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) { + + debug("Rewriting the derivation"); + + for (auto &rewrite: rewrites) { + debug("rewriting %s as %s", rewrite.first, rewrite.second); + } + + drv.builder = rewriteStrings(drv.builder, rewrites); + for (auto & arg: drv.args) { + arg = rewriteStrings(arg, rewrites); + } + + StringPairs newEnv; + for (auto & envVar: drv.env) { + auto envName = rewriteStrings(envVar.first, rewrites); + auto envValue = rewriteStrings(envVar.second, rewrites); + newEnv.emplace(envName, envValue); + } + drv.env = newEnv; +} + + +Sync drvPathResolutions; + +BasicDerivation Derivation::resolve(Store & store) { + BasicDerivation resolved { *this }; + + // Input paths that we'll want to rewrite in the derivation + StringMap inputRewrites; + + for (auto & input : inputDrvs) { + auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first); + StringSet newOutputNames; + for (auto & outputName : input.second) { + auto actualPathOpt = inputDrvOutputs.at(outputName); + if (!actualPathOpt) + throw Error("input drv '%s' wasn't yet built", store.printStorePath(input.first)); + auto actualPath = *actualPathOpt; + inputRewrites.emplace( + downstreamPlaceholder(store, input.first, outputName), + store.printStorePath(actualPath)); + resolved.inputSrcs.insert(std::move(actualPath)); + } + } + + rewriteDerivation(store, resolved, inputRewrites); + + return resolved; +} + } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 2ea4178c0..0bb565e8a 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -4,6 +4,7 @@ #include "types.hh" #include "hash.hh" #include "content-address.hh" +#include "sync.hh" #include #include @@ -127,6 +128,13 @@ struct Derivation : BasicDerivation std::string unparse(const Store & store, bool maskOutputs, std::map * actualInputs = nullptr) const; + /* Return the underlying basic derivation but with + + 1. input drv outputs moved to input sources. + + 2. placeholders replaced with realized input store paths. */ + BasicDerivation resolve(Store & store); + Derivation() = default; Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } }; @@ -187,6 +195,16 @@ typedef std::map DrvHashes; extern DrvHashes drvHashes; // FIXME: global, not thread-safe +/* Memoisation of `readDerivation(..).resove()`. */ +typedef std::map< + StorePath, + std::optional +> DrvPathResolutions; + +// FIXME: global, though at least thread-safe. +// FIXME: arguably overlaps with hashDerivationModulo memo table. +extern Sync drvPathResolutions; + bool wantOutput(const string & output, const std::set & wanted); struct Source; diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 0086bb13e..e51d127b3 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -803,13 +803,36 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path) } -std::map> LocalStore::queryPartialDerivationOutputMap(const StorePath & path) +std::map> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_) { + auto path = path_; std::map> outputs; - BasicDerivation drv = readDerivation(path); + Derivation drv = readDerivation(path); for (auto & [outName, _] : drv.outputs) { outputs.insert_or_assign(outName, std::nullopt); } + bool haveCached = false; + { + auto resolutions = drvPathResolutions.lock(); + auto resolvedPathOptIter = resolutions->find(path); + if (resolvedPathOptIter != resolutions->end()) { + auto & [_, resolvedPathOpt] = *resolvedPathOptIter; + if (resolvedPathOpt) + path = *resolvedPathOpt; + haveCached = true; + } + } + /* can't just use else-if instead of `!haveCached` because we need to unlock + `drvPathResolutions` before it is locked in `Derivation::resolve`. */ + if (!haveCached && drv.type() == DerivationType::CAFloating) { + /* Resolve drv and use that path instead. */ + auto pathResolved = writeDerivation(*this, drv.resolve(*this)); + /* Store in memo table. */ + /* FIXME: memo logic should not be local-store specific, should have + wrapper-method instead. */ + drvPathResolutions.lock()->insert_or_assign(path, pathResolved); + path = std::move(pathResolved); + } return retrySQLite>>([&]() { auto state(_state.lock()); diff --git a/tests/content-addressed.nix b/tests/content-addressed.nix index 586e4cba6..a46c21164 100644 --- a/tests/content-addressed.nix +++ b/tests/content-addressed.nix @@ -4,16 +4,40 @@ with import ./config.nix; # A simple content-addressed derivation. # The derivation can be arbitrarily modified by passing a different `seed`, # but the output will always be the same -mkDerivation { - name = "simple-content-addressed"; - buildCommand = '' - set -x - echo "Building a CA derivation" - echo "The seed is ${toString seed}" - mkdir -p $out - echo "Hello World" > $out/hello - ''; - __contentAddressed = true; - outputHashMode = "recursive"; - outputHashAlgo = "sha256"; +rec { + root = mkDerivation { + name = "simple-content-addressed"; + buildCommand = '' + set -x + echo "Building a CA derivation" + echo "The seed is ${toString seed}" + mkdir -p $out + echo "Hello World" > $out/hello + ''; + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + }; + dependent = mkDerivation { + name = "dependent"; + buildCommand = '' + echo "building a dependent derivation" + mkdir -p $out + echo ${root}/hello > $out/dep + ''; + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + }; + transitivelyDependent = mkDerivation { + name = "transitively-dependent"; + buildCommand = '' + echo "building transitively-dependent" + cat ${dependent}/dep + echo ${dependent} > $out + ''; + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + }; } diff --git a/tests/content-addressed.sh b/tests/content-addressed.sh index 2968f3a8c..522310585 100644 --- a/tests/content-addressed.sh +++ b/tests/content-addressed.sh @@ -2,15 +2,17 @@ source common.sh -clearStore -clearCache - -export REMOTE_STORE=file://$cacheDir - -drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix --arg seed 1) +drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A root --arg seed 1) nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1 -out1=$(nix-build --experimental-features ca-derivations ./content-addressed.nix --arg seed 1 --no-out-link) -out2=$(nix-build --experimental-features ca-derivations ./content-addressed.nix --arg seed 2 --no-out-link) +testDerivation () { + local derivationPath=$1 + local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link") + local out1=$(nix-build "${commonArgs[@]}" --arg seed 1) + local out2=$(nix-build "${commonArgs[@]}" --arg seed 2) + test $out1 == $out2 +} -test $out1 == $out2 +testDerivation root +testDerivation dependent +testDerivation transitivelyDependent From 421ed527c70201c722cbefc10576ae77e383ba8e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 28 Aug 2020 17:22:57 -0400 Subject: [PATCH 10/57] Update src/libstore/build.cc Thanks for catching, @regnat. --- src/libstore/build.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index f5256bf87..1249668c4 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1477,8 +1477,7 @@ void DerivationGoal::inputsRealised() }); auto resolvedGoal = worker.makeDerivationGoal( - pathResolved, wantedOutputs, - buildMode == bmRepair ? bmRepair : bmNormal); + pathResolved, wantedOutputs, buildMode); addWaitee(resolvedGoal); state = &DerivationGoal::resolvedFinished; From 4db0010a9374e357de3db3c0cf1cb1b490a14727 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 28 Aug 2020 22:03:54 +0000 Subject: [PATCH 11/57] Test CA derivation input caching --- tests/content-addressed.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/content-addressed.sh b/tests/content-addressed.sh index 522310585..5997a432f 100644 --- a/tests/content-addressed.sh +++ b/tests/content-addressed.sh @@ -9,10 +9,13 @@ testDerivation () { local derivationPath=$1 local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link") local out1=$(nix-build "${commonArgs[@]}" --arg seed 1) - local out2=$(nix-build "${commonArgs[@]}" --arg seed 2) + local out2=$(nix-build "${commonArgs[@]}" --arg seed 2 "${extraArgs[@]}") test $out1 == $out2 } testDerivation root +# The seed only changes the root derivation, and not it's output, so the +# dependent derivations should only need to be built once. +extaArgs=(-j0) testDerivation dependent testDerivation transitivelyDependent From aad4abcc9c27d5c1a2349e40f51f076387e0f844 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 4 Sep 2020 01:17:38 +0000 Subject: [PATCH 12/57] Fix floating CA tests We will sometimes try to query the outputs of derivations we can't resolve. That's fine; it just means we don't know what those outputs are yet. --- src/libstore/build.cc | 4 +++- src/libstore/derivations.cc | 4 ++-- src/libstore/derivations.hh | 2 +- src/libstore/local-store.cc | 9 +++++++-- tests/content-addressed.sh | 2 +- 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 1249668c4..1f77b8ea8 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1460,7 +1460,9 @@ void DerivationGoal::inputsRealised() /* We are be able to resolve this derivation based on the now-known results of dependencies. If so, we become a stub goal aliasing that resolved derivation goal */ - Derivation drvResolved { fullDrv.resolve(worker.store) }; + std::optional attempt = fullDrv.tryResolve(worker.store); + assert(attempt); + Derivation drvResolved { *std::move(attempt) }; auto pathResolved = writeDerivation(worker.store, drvResolved); /* Add to memotable to speed up downstream goal's queries with the diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index ce57a5bb0..695265860 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -671,7 +671,7 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String Sync drvPathResolutions; -BasicDerivation Derivation::resolve(Store & store) { +std::optional Derivation::tryResolve(Store & store) { BasicDerivation resolved { *this }; // Input paths that we'll want to rewrite in the derivation @@ -683,7 +683,7 @@ BasicDerivation Derivation::resolve(Store & store) { for (auto & outputName : input.second) { auto actualPathOpt = inputDrvOutputs.at(outputName); if (!actualPathOpt) - throw Error("input drv '%s' wasn't yet built", store.printStorePath(input.first)); + return std::nullopt; auto actualPath = *actualPathOpt; inputRewrites.emplace( downstreamPlaceholder(store, input.first, outputName), diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index e4d85aa05..eaffbf452 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -133,7 +133,7 @@ struct Derivation : BasicDerivation 1. input drv outputs moved to input sources. 2. placeholders replaced with realized input store paths. */ - BasicDerivation resolve(Store & store); + std::optional tryResolve(Store & store); Derivation() = default; Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index e51d127b3..f490188ce 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -825,8 +825,13 @@ std::map> LocalStore::queryPartialDerivati /* can't just use else-if instead of `!haveCached` because we need to unlock `drvPathResolutions` before it is locked in `Derivation::resolve`. */ if (!haveCached && drv.type() == DerivationType::CAFloating) { - /* Resolve drv and use that path instead. */ - auto pathResolved = writeDerivation(*this, drv.resolve(*this)); + /* Try resolve drv and use that path instead. */ + auto attempt = drv.tryResolve(*this); + if (!attempt) + /* If we cannot resolve the derivation, we cannot have any path + assigned so we return the map of all std::nullopts. */ + return outputs; + auto pathResolved = writeDerivation(*this, *std::move(attempt)); /* Store in memo table. */ /* FIXME: memo logic should not be local-store specific, should have wrapper-method instead. */ diff --git a/tests/content-addressed.sh b/tests/content-addressed.sh index b2e94fe1e..34334b22d 100644 --- a/tests/content-addressed.sh +++ b/tests/content-addressed.sh @@ -9,7 +9,7 @@ testDerivation () { local derivationPath=$1 local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link") local out1 out2 - out1=$(set -e; nix-build "${commonArgs[@]}" --arg seed 1) + out1=$(nix-build "${commonArgs[@]}" --arg seed 1) out2=$(nix-build "${commonArgs[@]}" --arg seed 2 "${secondSeedArgs[@]}") test "$out1" == "$out2" } From 98dfd7531d6df6abc925a446f390c4a5bbb9a51d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 4 Sep 2020 18:33:58 +0000 Subject: [PATCH 13/57] Fix querying outputs for CA derivations some more If we resolve using the known path of a derivation whose output we didn't have, we previously blew up. Now we just fail gracefully, returning the map of all outputs unknown. --- src/libstore/derivations.cc | 4 ++-- src/libstore/derivations.hh | 4 +++- src/libstore/local-store.cc | 20 ++++++++++++++++---- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 695265860..afac00fc4 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -69,7 +69,7 @@ bool BasicDerivation::isBuiltin() const StorePath writeDerivation(Store & store, - const Derivation & drv, RepairFlag repair) + const Derivation & drv, RepairFlag repair, bool readOnly) { auto references = drv.inputSrcs; for (auto & i : drv.inputDrvs) @@ -79,7 +79,7 @@ StorePath writeDerivation(Store & store, held during a garbage collection). */ auto suffix = std::string(drv.name) + drvExtension; auto contents = drv.unparse(store, false); - return settings.readOnlyMode + return readOnly || settings.readOnlyMode ? store.computeStorePathForText(suffix, contents, references) : store.addTextToStore(suffix, contents, references, repair); } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 8aa496143..716862127 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -146,7 +146,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true }; /* Write a derivation to the Nix store, and return its path. */ StorePath writeDerivation(Store & store, - const Derivation & drv, RepairFlag repair = NoRepair); + const Derivation & drv, + RepairFlag repair = NoRepair, + bool readOnly = false); /* Read a derivation from a file. */ Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index f490188ce..0755cfa91 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -728,7 +728,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path) { auto use(state.stmtQueryPathInfo.use()(printStorePath(path))); if (!use.next()) - throw Error("path '%s' is not valid", printStorePath(path)); + throw InvalidPath("path '%s' is not valid", printStorePath(path)); return use.getInt(0); } @@ -831,7 +831,8 @@ std::map> LocalStore::queryPartialDerivati /* If we cannot resolve the derivation, we cannot have any path assigned so we return the map of all std::nullopts. */ return outputs; - auto pathResolved = writeDerivation(*this, *std::move(attempt)); + /* Just compute store path */ + auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true); /* Store in memo table. */ /* FIXME: memo logic should not be local-store specific, should have wrapper-method instead. */ @@ -841,8 +842,19 @@ std::map> LocalStore::queryPartialDerivati return retrySQLite>>([&]() { auto state(_state.lock()); - auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() - (queryValidPathId(*state, path))); + uint64_t drvId; + try { + drvId = queryValidPathId(*state, path); + } catch (InvalidPath &) { + /* FIXME? if the derivation doesn't exist, we cannot have a mapping + for it. */ + return outputs; + } + + auto useQueryDerivationOutputs { + state->stmtQueryDerivationOutputs.use() + (drvId) + }; while (useQueryDerivationOutputs.next()) outputs.insert_or_assign( From a303c0b6dc71b1e0d6a57986c3f7a9b61361cd92 Mon Sep 17 00:00:00 2001 From: Greg Hale Date: Wed, 17 Jun 2020 15:08:59 -0400 Subject: [PATCH 14/57] Fetch commits from github/gitlab using Auth header `nix flake info` calls the github 'commits' API, which requires authorization when the repository is private. Currently this request fails with a 404. This commit adds an authorization header when calling the 'commits' API. It also changes the way that the 'tarball' API authenticates, moving the user's token from a query parameter into the Authorization header. The query parameter method is recently deprecated and will be disallowed in November 2020. Using them today triggers a warning email. --- src/libexpr/common-eval-args.cc | 2 +- src/libexpr/parser.y | 2 +- src/libexpr/primops/fetchTree.cc | 4 +- src/libfetchers/fetchers.hh | 2 + src/libfetchers/github.cc | 76 +++++++++++++++++++++++--------- src/libfetchers/registry.cc | 2 +- src/libfetchers/tarball.cc | 9 ++-- src/libstore/filetransfer.cc | 3 ++ src/libstore/filetransfer.hh | 4 ++ src/libstore/globals.hh | 3 ++ src/libutil/types.hh | 2 + src/nix-channel/nix-channel.cc | 6 +-- 12 files changed, 84 insertions(+), 31 deletions(-) diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc index 10c1a6975..d71aa22f1 100644 --- a/src/libexpr/common-eval-args.cc +++ b/src/libexpr/common-eval-args.cc @@ -76,7 +76,7 @@ Path lookupFileArg(EvalState & state, string s) if (isUri(s)) { return state.store->toRealPath( fetchers::downloadTarball( - state.store, resolveUri(s), "source", false).first.storePath); + state.store, resolveUri(s), Headers {}, "source", false).first.storePath); } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { Path p = s.substr(1, s.size() - 2); return state.findFile(p); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 24b21f7da..28e31f46b 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -719,7 +719,7 @@ std::pair EvalState::resolveSearchPathElem(const SearchPathEl if (isUri(elem.second)) { try { res = { true, store->toRealPath(fetchers::downloadTarball( - store, resolveUri(elem.second), "source", false).first.storePath) }; + store, resolveUri(elem.second), Headers {}, "source", false).first.storePath) }; } catch (FileTransferError & e) { logWarning({ .name = "Entry download", diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 06e8304b8..3001957b4 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -201,8 +201,8 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, auto storePath = unpack - ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath - : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; + ? fetchers::downloadTarball(state.store, *url, Headers {}, name, (bool) expectedHash).first.storePath + : fetchers::downloadFile(state.store, *url, Headers{}, name, (bool) expectedHash).storePath; auto path = state.store->toRealPath(storePath); diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 89b1e6e7d..62807e53b 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -118,12 +118,14 @@ struct DownloadFileResult DownloadFileResult downloadFile( ref store, const std::string & url, + const Headers & headers, const std::string & name, bool immutable); std::pair downloadTarball( ref store, const std::string & url, + const Headers & headers, const std::string & name, bool immutable); diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 1cc0c5e2e..d8d0351b9 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -3,11 +3,24 @@ #include "fetchers.hh" #include "globals.hh" #include "store-api.hh" +#include "types.hh" #include namespace nix::fetchers { +struct DownloadUrl +{ + std::string url; + std::optional> access_token_header; + + DownloadUrl(const std::string & url) + : url(url) { } + + DownloadUrl(const std::string & url, const std::pair & access_token_header) + : url(url), access_token_header(access_token_header) { } +}; + // A github or gitlab url const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check std::regex urlRegex(urlRegexS, std::regex::ECMAScript); @@ -16,6 +29,8 @@ struct GitArchiveInputScheme : InputScheme { virtual std::string type() = 0; + virtual std::pair accessHeaderFromToken(const std::string & token) const = 0; + std::optional inputFromURL(const ParsedURL & url) override { if (url.scheme != type()) return {}; @@ -131,7 +146,7 @@ struct GitArchiveInputScheme : InputScheme virtual Hash getRevFromRef(nix::ref store, const Input & input) const = 0; - virtual std::string getDownloadUrl(const Input & input) const = 0; + virtual DownloadUrl getDownloadUrl(const Input & input) const = 0; std::pair fetch(ref store, const Input & _input) override { @@ -160,7 +175,12 @@ struct GitArchiveInputScheme : InputScheme auto url = getDownloadUrl(input); - auto [tree, lastModified] = downloadTarball(store, url, "source", true); + Headers headers; + if (url.access_token_header) { + headers.push_back(*url.access_token_header); + } + + auto [tree, lastModified] = downloadTarball(store, url.url, headers, "source", true); input.attrs.insert_or_assign("lastModified", lastModified); @@ -182,11 +202,8 @@ struct GitHubInputScheme : GitArchiveInputScheme { std::string type() override { return "github"; } - void addAccessToken(std::string & url) const - { - std::string accessToken = settings.githubAccessToken.get(); - if (accessToken != "") - url += "?access_token=" + accessToken; + std::pair accessHeaderFromToken(const std::string & token) const { + return std::pair("Authorization", fmt("token %s", token)); } Hash getRevFromRef(nix::ref store, const Input & input) const override @@ -195,18 +212,21 @@ struct GitHubInputScheme : GitArchiveInputScheme auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); - addAccessToken(url); + Headers headers; + std::string accessToken = settings.githubAccessToken.get(); + if (accessToken != "") + headers.push_back(accessHeaderFromToken(accessToken)); auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, "source", false).storePath))); + downloadFile(store, url, headers, "source", false).storePath))); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } - std::string getDownloadUrl(const Input & input) const override + DownloadUrl getDownloadUrl(const Input & input) const override { // FIXME: use regular /archive URLs instead? api.github.com // might have stricter rate limits. @@ -215,9 +235,13 @@ struct GitHubInputScheme : GitArchiveInputScheme host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); - addAccessToken(url); - - return url; + std::string accessToken = settings.githubAccessToken.get(); + if (accessToken != "") { + auto auth_header = accessHeaderFromToken(accessToken); + return DownloadUrl(url, auth_header); + } else { + return DownloadUrl(url); + } } void clone(const Input & input, const Path & destDir) override @@ -234,21 +258,31 @@ struct GitLabInputScheme : GitArchiveInputScheme { std::string type() override { return "gitlab"; } + std::pair accessHeaderFromToken(const std::string & token) const { + return std::pair("Authorization", fmt("Bearer %s", token)); + } + Hash getRevFromRef(nix::ref store, const Input & input) const override { auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); + + Headers headers; + std::string accessToken = settings.gitlabAccessToken.get(); + if (accessToken != "") + headers.push_back(accessHeaderFromToken(accessToken)); + auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, "source", false).storePath))); + downloadFile(store, url, headers, "source", false).storePath))); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } - std::string getDownloadUrl(const Input & input) const override + DownloadUrl getDownloadUrl(const Input & input) const override { // FIXME: This endpoint has a rate limit threshold of 5 requests per minute auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); @@ -256,12 +290,14 @@ struct GitLabInputScheme : GitArchiveInputScheme host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); - /* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: "`) - std::string accessToken = settings.githubAccessToken.get(); - if (accessToken != "") - url += "?access_token=" + accessToken;*/ + std::string accessToken = settings.gitlabAccessToken.get(); + if (accessToken != "") { + auto auth_header = accessHeaderFromToken(accessToken); + return DownloadUrl(url, auth_header); + } else { + return DownloadUrl(url); + } - return url; } void clone(const Input & input, const Path & destDir) override diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 4367ee810..551e7684a 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -145,7 +145,7 @@ static std::shared_ptr getGlobalRegistry(ref store) auto path = settings.flakeRegistry.get(); if (!hasPrefix(path, "/")) { - auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; + auto storePath = downloadFile(store, path, Headers {}, "flake-registry.json", false).storePath; if (auto store2 = store.dynamic_pointer_cast()) store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); path = store->toRealPath(storePath); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index a2d16365e..cf6d6e3d2 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -5,12 +5,14 @@ #include "store-api.hh" #include "archive.hh" #include "tarfile.hh" +#include "types.hh" namespace nix::fetchers { DownloadFileResult downloadFile( ref store, const std::string & url, + const Headers & headers, const std::string & name, bool immutable) { @@ -36,7 +38,7 @@ DownloadFileResult downloadFile( if (cached && !cached->expired) return useCached(); - FileTransferRequest request(url); + FileTransferRequest request(url, headers); if (cached) request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); FileTransferResult res; @@ -110,6 +112,7 @@ DownloadFileResult downloadFile( std::pair downloadTarball( ref store, const std::string & url, + const Headers & headers, const std::string & name, bool immutable) { @@ -127,7 +130,7 @@ std::pair downloadTarball( getIntAttr(cached->infoAttrs, "lastModified") }; - auto res = downloadFile(store, url, name, immutable); + auto res = downloadFile(store, url, headers, name, immutable); std::optional unpackedStorePath; time_t lastModified; @@ -222,7 +225,7 @@ struct TarballInputScheme : InputScheme std::pair fetch(ref store, const Input & input) override { - auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first; + auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), Headers {}, "source", false).first; return {std::move(tree), input}; } }; diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 4149f8155..13ed429fa 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -112,6 +112,9 @@ struct curlFileTransfer : public FileTransfer requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); if (!request.mimeType.empty()) requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); + for (auto it = request.headers.begin(); it != request.headers.end(); ++it){ + requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str()); + } } ~TransferItem() diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh index 0d608c8d8..7e302ff39 100644 --- a/src/libstore/filetransfer.hh +++ b/src/libstore/filetransfer.hh @@ -51,6 +51,7 @@ extern FileTransferSettings fileTransferSettings; struct FileTransferRequest { std::string uri; + Headers headers; std::string expectedETag; bool verifyTLS = true; bool head = false; @@ -65,6 +66,9 @@ struct FileTransferRequest FileTransferRequest(const std::string & uri) : uri(uri), parentAct(getCurActivity()) { } + FileTransferRequest(const std::string & uri, Headers headers) + : uri(uri), headers(headers) { } + std::string verb() { return data ? "upload" : "download"; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 02721285a..b2e7610ee 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -863,6 +863,9 @@ public: Setting githubAccessToken{this, "", "github-access-token", "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."}; + Setting gitlabAccessToken{this, "", "gitlab-access-token", + "GitLab access token to get access to GitLab data through the GitLab API for gitlab:<..> flakes."}; + Setting experimentalFeatures{this, {}, "experimental-features", "Experimental Nix features to enable."}; diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 3af485fa0..55d02bcf9 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -24,6 +24,8 @@ typedef string Path; typedef list Paths; typedef set PathSet; +typedef vector> Headers; + /* Helper class to run code at startup. */ template struct OnStartup diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 3ccf620c9..94d33a75c 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -87,7 +87,7 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false); + auto result = fetchers::downloadFile(store, url, Headers {}, std::string(baseNameOf(url)), false); auto filename = store->toRealPath(result.storePath); url = result.effectiveUrl; @@ -112,9 +112,9 @@ static void update(const StringSet & channelNames) if (!unpacked) { // Download the channel tarball. try { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", Headers {}, "nixexprs.tar.xz", false).storePath); } catch (FileTransferError & e) { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", Headers {}, "nixexprs.tar.bz2", false).storePath); } } From 7fdbb377ba800728a47095008cec11be7d970330 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Sep 2020 14:55:43 -0400 Subject: [PATCH 15/57] Start to fix floating CA + remote building --- src/libstore/build.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index ee12f8e67..32980f264 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -4613,7 +4613,7 @@ void DerivationGoal::flushLine() std::map> DerivationGoal::queryPartialDerivationOutputMap() { - if (drv->type() != DerivationType::CAFloating) { + if (!useDerivation || drv->type() != DerivationType::CAFloating) { std::map> res; for (auto & [name, output] : drv->outputs) res.insert_or_assign(name, output.path(worker.store, drv->name, name)); @@ -4625,7 +4625,7 @@ std::map> DerivationGoal::queryPartialDeri OutputPathMap DerivationGoal::queryDerivationOutputMap() { - if (drv->type() != DerivationType::CAFloating) { + if (!useDerivation || drv->type() != DerivationType::CAFloating) { OutputPathMap res; for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) res.insert_or_assign(name, *output.second); From 2741fffa350ec59d29ade24dd93007d535a61bde Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 9 Sep 2020 19:13:21 +0000 Subject: [PATCH 16/57] Ensure resolved CA derivations are written so we can link outputs to deriver and thus properly cache. --- src/libstore/build.cc | 33 ++++++++++++++++++++------------- src/libstore/derivations.hh | 1 + 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 32980f264..87c50f0e6 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -4298,11 +4298,13 @@ void DerivationGoal::registerOutputs() /* Register each output path as valid, and register the sets of paths referenced by each of them. If there are cycles in the outputs, this will fail. */ - ValidPathInfos infos2; - for (auto & [outputName, newInfo] : infos) { - infos2.push_back(newInfo); + { + ValidPathInfos infos2; + for (auto & [outputName, newInfo] : infos) { + infos2.push_back(newInfo); + } + worker.store.registerValidPaths(infos2); } - worker.store.registerValidPaths(infos2); /* In case of a fixed-output derivation hash mismatch, throw an exception now that we have registered the output as valid. */ @@ -4314,16 +4316,21 @@ void DerivationGoal::registerOutputs() means it's safe to link the derivation to the output hash. We must do that for floating CA derivations, which otherwise couldn't be cached, but it's fine to do in all cases. */ - for (auto & [outputName, newInfo] : infos) { - if (useDerivation) - worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); - else { - /* Once a floating CA derivations reaches this point, it must - already be resolved, drvPath the basic derivation path, and - a file existsing at that path for sake of the DB's foreign key. */ - assert(drv->type() != DerivationType::CAFloating); - } + bool isCaFloating = drv->type() == DerivationType::CAFloating; + + auto drvPath2 = drvPath; + if (!useDerivation && isCaFloating) { + /* Once a floating CA derivations reaches this point, it + must already be resolved, so we don't bother trying to + downcast drv to get would would just be an empty + inputDrvs field. */ + Derivation drv2 { *drv }; + drvPath2 = writeDerivation(worker.store, drv2); } + + if (useDerivation || isCaFloating) + for (auto & [outputName, newInfo] : infos) + worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index adbf8c094..74601134e 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -136,6 +136,7 @@ struct Derivation : BasicDerivation std::optional tryResolve(Store & store); Derivation() = default; + Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { } Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { } }; From 993229cdaf2e2347a204c876ecd660fc94048101 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 22 Aug 2020 20:44:47 +0000 Subject: [PATCH 17/57] Deduplicate basic derivation goals too See comments for security concerns. Also optimize goal creation by not traversing map twice. --- src/libstore/build.cc | 90 +++++++++++++++++++++++++++------------ src/libstore/daemon.cc | 14 ++++++ src/libstore/store-api.hh | 36 ++++++++++++++-- 3 files changed, 110 insertions(+), 30 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 07c5bceb2..8b206e819 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -296,9 +296,21 @@ public: ~Worker(); /* Make a goal (with caching). */ - GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); - std::shared_ptr makeBasicDerivationGoal(const StorePath & drvPath, - const BasicDerivation & drv, BuildMode buildMode = bmNormal); + + /* derivation goal */ +private: + std::shared_ptr makeDerivationGoalCommon( + const StorePath & drvPath, const StringSet & wantedOutputs, + std::function()> mkDrvGoal); +public: + std::shared_ptr makeDerivationGoal( + const StorePath & drvPath, + const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); + std::shared_ptr makeBasicDerivationGoal( + const StorePath & drvPath, const BasicDerivation & drv, + const StringSet & wantedOutputs, BuildMode buildMode = bmNormal); + + /* substitution goal */ GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional ca = std::nullopt); /* Remove a dead goal. */ @@ -949,10 +961,12 @@ private: friend struct RestrictedStore; public: - DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, - Worker & worker, BuildMode buildMode = bmNormal); + DerivationGoal(const StorePath & drvPath, + const StringSet & wantedOutputs, Worker & worker, + BuildMode buildMode = bmNormal); DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, - Worker & worker, BuildMode buildMode = bmNormal); + const StringSet & wantedOutputs, Worker & worker, + BuildMode buildMode = bmNormal); ~DerivationGoal(); /* Whether we need to perform hash rewriting if there are valid output paths. */ @@ -1085,8 +1099,8 @@ private: const Path DerivationGoal::homeDir = "/homeless-shelter"; -DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, - Worker & worker, BuildMode buildMode) +DerivationGoal::DerivationGoal(const StorePath & drvPath, + const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) : Goal(worker) , useDerivation(true) , drvPath(drvPath) @@ -1094,7 +1108,9 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want , buildMode(buildMode) { state = &DerivationGoal::getDerivation; - name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath)); + name = fmt( + "building of '%s' from .drv file", + StorePathWithOutputs { drvPath, wantedOutputs }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -1103,15 +1119,18 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, - Worker & worker, BuildMode buildMode) + const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode) : Goal(worker) , useDerivation(false) , drvPath(drvPath) + , wantedOutputs(wantedOutputs) , buildMode(buildMode) { this->drv = std::make_unique(BasicDerivation(drv)); state = &DerivationGoal::haveDerivation; - name = fmt("building of %s", StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store)); + name = fmt( + "building of '%s' from in-memory derivation", + StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store)); trace("created"); mcExpectedBuilds = std::make_unique>(worker.expectedBuilds); @@ -5060,35 +5079,52 @@ Worker::~Worker() } -GoalPtr Worker::makeDerivationGoal(const StorePath & path, - const StringSet & wantedOutputs, BuildMode buildMode) +std::shared_ptr Worker::makeDerivationGoalCommon( + const StorePath & drvPath, + const StringSet & wantedOutputs, + std::function()> mkDrvGoal) { - GoalPtr goal = derivationGoals[path].lock(); // FIXME - if (!goal) { - goal = std::make_shared(path, wantedOutputs, *this, buildMode); - derivationGoals.insert_or_assign(path, goal); + WeakGoalPtr & abstract_goal_weak = derivationGoals[drvPath]; + GoalPtr abstract_goal = abstract_goal_weak.lock(); // FIXME + std::shared_ptr goal; + if (!abstract_goal) { + goal = mkDrvGoal(); + abstract_goal_weak = goal; wakeUp(goal); - } else - (dynamic_cast(goal.get()))->addWantedOutputs(wantedOutputs); + } else { + goal = std::dynamic_pointer_cast(abstract_goal); + assert(goal); + goal->addWantedOutputs(wantedOutputs); + } return goal; } -std::shared_ptr Worker::makeBasicDerivationGoal(const StorePath & drvPath, - const BasicDerivation & drv, BuildMode buildMode) +std::shared_ptr Worker::makeDerivationGoal(const StorePath & drvPath, + const StringSet & wantedOutputs, BuildMode buildMode) { - auto goal = std::make_shared(drvPath, drv, *this, buildMode); - wakeUp(goal); - return goal; + return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() { + return std::make_shared(drvPath, wantedOutputs, *this, buildMode); + }); +} + + +std::shared_ptr Worker::makeBasicDerivationGoal(const StorePath & drvPath, + const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode) +{ + return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() { + return std::make_shared(drvPath, drv, wantedOutputs, *this, buildMode); + }); } GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional ca) { - GoalPtr goal = substitutionGoals[path].lock(); // FIXME + WeakGoalPtr & goal_weak = substitutionGoals[path]; + GoalPtr goal = goal_weak.lock(); // FIXME if (!goal) { goal = std::make_shared(path, *this, repair, ca); - substitutionGoals.insert_or_assign(path, goal); + goal_weak = goal; wakeUp(goal); } return goal; @@ -5519,7 +5555,7 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe BuildMode buildMode) { Worker worker(*this); - auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode); + auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode); BuildResult result; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 83f8968b0..ec3391a6d 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -546,6 +546,20 @@ static void performOp(TunnelLogger * logger, ref store, are in fact content-addressed if we don't trust them. */ assert(derivationIsCA(drv.type()) || trusted); + /* Recompute the derivation path when we cannot trust the original. */ + if (!trusted) { + /* Recomputing the derivation path for input-address derivations + makes it harder to audit them after the fact, since we need the + original not-necessarily-resolved derivation to verify the drv + derivation as adequate claim to the input-addressed output + paths. */ + assert(derivationIsCA(drv.type())); + + Derivation drv2; + static_cast(drv2) = drv; + drvPath = writeDerivation(*store, Derivation { drv2 }); + } + auto res = store->buildDerivation(drvPath, drv, buildMode); logger->stopWork(); to << res.status << res.errorMsg; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 591140874..3ccee4f75 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -479,8 +479,38 @@ public: BuildMode buildMode = bmNormal); /* Build a single non-materialized derivation (i.e. not from an - on-disk .drv file). Note that ‘drvPath’ is only used for - informational purposes. */ + on-disk .drv file). + + ‘drvPath’ is used to deduplicate worker goals so it is imperative that + is correct. That said, it doesn't literally need to be store path that + would be calculated from writing this derivation to the store: it is OK + if it instead is that of a Derivation which would resolve to this (by + taking the outputs of it's input derivations and adding them as input + sources) such that the build time referenceable-paths are the same. + + In the input-addressed case, we usually *do* use an "original" + unresolved derivations's path, as that is what will be used in the + `buildPaths` case. Also, the input-addressed output paths are verified + only by that contents of that specific unresolved derivation, so it is + nice to keep that information around so if the original derivation is + ever obtained later, it can be verified whether the trusted user in fact + used the proper output path. + + In the content-addressed case, we want to always use the + resolved drv path calculated from the provided derivation. This serves + two purposes: + + - It keeps the operation trustless, by ruling out a maliciously + invalid drv path corresponding to a non-resolution-equivalent + derivation. + + - For the floating case in particular, it ensures that the derivation + to output mapping respects the resolution equivalence relation, so + one cannot choose different resolution-equivalent derivations to + subvert dependency coherence (i.e. the property that one doesn't end + up with multiple different versions of dependencies without + explicitly choosing to allow it). + */ virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, BuildMode buildMode = bmNormal) = 0; @@ -517,7 +547,7 @@ public: - The collector isn't running, or it's just started but hasn't acquired the GC lock yet. In that case we get and release the lock right away, then exit. The collector scans the - permanent root and sees our's. + permanent root and sees ours. In either case the permanent root is seen by the collector. */ virtual void syncWithGC() { }; From 9fbc31a65bab50cd60a882517b3c8030485ce096 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 15 Aug 2020 16:41:28 +0000 Subject: [PATCH 18/57] Get rid of Hash::dummy from BinaryCacheStore --- src/libstore/binary-cache-store.cc | 102 +++++++++++++++++------------ src/libstore/binary-cache-store.hh | 7 ++ src/libstore/store-api.hh | 4 +- 3 files changed, 69 insertions(+), 44 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ebc0bd6a4..6679eb16f 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -142,17 +142,10 @@ struct FileSource : FdSource } }; -void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, - RepairFlag repair, CheckSigsFlag checkSigs) +StorePath BinaryCacheStore::addToStoreCommon( + Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, + std::function mkInfo) { - assert(info.narSize); - - if (!repair && isValidPath(info.path)) { - // FIXME: copyNAR -> null sink - narSource.drain(); - return; - } - auto [fdTemp, fnTemp] = createTempFile(); AutoDelete autoDelete(fnTemp); @@ -162,13 +155,15 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource /* Read the NAR simultaneously into a CompressionSink+FileSink (to write the compressed NAR to disk), into a HashSink (to get the NAR hash), and into a NarAccessor (to get the NAR listing). */ - HashSink fileHashSink(htSHA256); + HashSink fileHashSink { htSHA256 }; std::shared_ptr narAccessor; + HashSink narHashSink { htSHA256 }; { FdSink fileSink(fdTemp.get()); - TeeSink teeSink(fileSink, fileHashSink); - auto compressionSink = makeCompressionSink(compression, teeSink); - TeeSource teeSource(narSource, *compressionSink); + TeeSink teeSinkCompressed { fileSink, fileHashSink }; + auto compressionSink = makeCompressionSink(compression, teeSinkCompressed); + TeeSink teeSinkUncompressed { *compressionSink, narHashSink }; + TeeSource teeSource { narSource, teeSinkUncompressed }; narAccessor = makeNarAccessor(teeSource); compressionSink->finish(); fileSink.flush(); @@ -176,9 +171,10 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource auto now2 = std::chrono::steady_clock::now(); + auto info = mkInfo(narHashSink.finish()); auto narInfo = make_ref(info); - narInfo->narSize = info.narSize; - narInfo->narHash = info.narHash; + narInfo->narSize = info.narSize; // FIXME needed? + narInfo->narHash = info.narHash; // FIXME needed? narInfo->compression = compression; auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; @@ -300,6 +296,41 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource writeNarInfo(narInfo); stats.narInfoWrite++; + + return narInfo->path; +} + +void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, + RepairFlag repair, CheckSigsFlag checkSigs) +{ + if (!repair && isValidPath(info.path)) { + // FIXME: copyNAR -> null sink + narSource.drain(); + return; + } + + (void) addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) { + /* FIXME reinstate these, once we can correctly do hash modulo sink as + needed. */ + // assert(info.narHash == nar.first); + // assert(info.narSize == nar.second); + return info; + }}); +} + +StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name, + FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) +{ + if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) + unsupported("addToStoreFromDump"); + return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { + ValidPathInfo info { + makeFixedOutputPath(method, nar.first, name), + nar.first, + }; + info.narSize = nar.second; + return info; + }); } bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) @@ -367,11 +398,9 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) { - // FIXME: some cut&paste from LocalStore::addToStore(). - - /* Read the whole path into memory. This is not a very scalable - method for very large paths, but `copyPath' is mainly used for - small files. */ + /* FIXME: Make BinaryCacheStore::addToStoreCommon support + non-recursive+sha256 so we can just use the default + implementation of this method in terms of addToStoreFromDump. */ StringSink sink; std::optional h; if (method == FileIngestionMethod::Recursive) { @@ -383,34 +412,25 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath h = hashString(hashAlgo, s); } - ValidPathInfo info { - makeFixedOutputPath(method, *h, name), - Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash - }; - auto source = StringSource { *sink.s }; - addToStore(info, source, repair, CheckSigs); - - return std::move(info.path); + return addToStoreFromDump(source, name, FileIngestionMethod::Recursive, htSHA256, repair); } StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, const StorePathSet & references, RepairFlag repair) { - ValidPathInfo info { - computeStorePathForText(name, s, references), - Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash - }; - info.references = references; + auto path = computeStorePathForText(name, s, references); - if (repair || !isValidPath(info.path)) { - StringSink sink; - dumpString(s, sink); - auto source = StringSource { *sink.s }; - addToStore(info, source, repair, CheckSigs); - } + if (!repair && isValidPath(path)) + return path; - return std::move(info.path); + auto source = StringSource { s }; + return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) { + ValidPathInfo info { path, nar.first }; + info.narSize = nar.second; + info.references = references; + return info; + }); } ref BinaryCacheStore::getFSAccessor() diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 4b779cdd4..ce69ad3b4 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -72,6 +72,10 @@ private: void writeNarInfo(ref narInfo); + StorePath addToStoreCommon( + Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, + std::function mkInfo); + public: bool isValidPathUncached(const StorePath & path) override; @@ -85,6 +89,9 @@ public: void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) override; + StorePath addToStoreFromDump(Source & dump, const string & name, + FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override; + StorePath addToStore(const string & name, const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) override; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 591140874..85a84d080 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -454,9 +454,7 @@ public: // FIXME: remove? virtual StorePath addToStoreFromDump(Source & dump, const string & name, FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) - { - throw Error("addToStoreFromDump() is not supported by this store"); - } + { unsupported("addToStoreFromDump"); } /* Like addToStore, but the contents written to the output path is a regular file containing the given string. */ From 412b3a54fb02cdf49cb084a925bd14c24e14aea8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 23 Sep 2020 10:36:55 -0400 Subject: [PATCH 19/57] Clarify FIXME in `BinaryCacheStore::addToStoreCommon` Co-authored-by: Robert Hensing --- src/libstore/binary-cache-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 6679eb16f..817661869 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -311,7 +311,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource (void) addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) { /* FIXME reinstate these, once we can correctly do hash modulo sink as - needed. */ + needed. We need to throw here in case we uploaded a corrupted store path. */ // assert(info.narHash == nar.first); // assert(info.narSize == nar.second); return info; From 3f226f71c185b2fbaaabb01bd0f3ba3cd4a39612 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 23 Sep 2020 14:40:41 +0000 Subject: [PATCH 20/57] Return more info from `BinaryCacheStore::addToStoreCommon` We don't need it yet, but we could/should in the future, and it's a cost-free change since we already have the reference. I like it. Co-authored-by: Robert Hensing --- src/libstore/binary-cache-store.cc | 8 ++++---- src/libstore/binary-cache-store.hh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 817661869..f7a52a296 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -142,7 +142,7 @@ struct FileSource : FdSource } }; -StorePath BinaryCacheStore::addToStoreCommon( +ref BinaryCacheStore::addToStoreCommon( Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, std::function mkInfo) { @@ -297,7 +297,7 @@ StorePath BinaryCacheStore::addToStoreCommon( stats.narInfoWrite++; - return narInfo->path; + return narInfo; } void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, @@ -330,7 +330,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & nam }; info.narSize = nar.second; return info; - }); + })->path; } bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) @@ -430,7 +430,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s info.narSize = nar.second; info.references = references; return info; - }); + })->path; } ref BinaryCacheStore::getFSAccessor() diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index ce69ad3b4..5224d7ec8 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -72,7 +72,7 @@ private: void writeNarInfo(ref narInfo); - StorePath addToStoreCommon( + ref addToStoreCommon( Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs, std::function mkInfo); From bd5f3dbe118d569ffb201ce14394572ac5fc412c Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Thu, 24 Sep 2020 12:30:03 -0700 Subject: [PATCH 21/57] Fixes fall-through to report correct description of hash-file command. --- src/nix/hash.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 0eca4f8ea..494f00a20 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -44,6 +44,7 @@ struct CmdHash : Command switch (mode) { case FileIngestionMethod::Flat: d = "print cryptographic hash of a regular file"; + break; case FileIngestionMethod::Recursive: d = "print cryptographic hash of the NAR serialisation of a path"; }; From ed218e1d6cf755fc3011c0954eb7031f95d3d732 Mon Sep 17 00:00:00 2001 From: Alexander Bantyev Date: Fri, 25 Sep 2020 00:07:42 +0300 Subject: [PATCH 22/57] Fix max-jobs option After 0ed946aa616bbf7ffe7f90d3309abdd27d875b10, max-jobs setting (-j/--max-jobs) stopped working. The reason was that nrLocalBuilds (which compared to maxBuildJobs to figure out whether the limit is reached or not) is not incremented yet when tryBuild is started; So, the solution is to move the check to tryLocalBuild. Closes https://github.com/nixos/nix/issues/3763 --- src/libstore/build.cc | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index db7dbc17e..3fc24b221 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1612,6 +1612,13 @@ void DerivationGoal::tryToBuild() actLock.reset(); + state = &DerivationGoal::tryLocalBuild; + worker.wakeUp(shared_from_this()); +} + +void DerivationGoal::tryLocalBuild() { + bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store); + /* Make sure that we are allowed to start a build. If this derivation prefers to be done locally, do it even if maxBuildJobs is 0. */ @@ -1622,12 +1629,6 @@ void DerivationGoal::tryToBuild() return; } - state = &DerivationGoal::tryLocalBuild; - worker.wakeUp(shared_from_this()); -} - -void DerivationGoal::tryLocalBuild() { - /* If `build-users-group' is not empty, then we have to build as one of the members of that group. */ if (settings.buildUsersGroup != "" && getuid() == 0) { From 4d863a9fcb9460a9e4978466e03d2982d32e39f0 Mon Sep 17 00:00:00 2001 From: Paul Opiyo <59094545+paulopiyo777@users.noreply.github.com> Date: Thu, 24 Sep 2020 18:05:47 -0500 Subject: [PATCH 23/57] Remove redundant value checks std::optional had redundant checks for whether it had a value. An object is emplaced either way so it can be dereferenced without repeating a value check --- src/libexpr/flake/flake.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 460eea5ea..760ed1a6e 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -48,17 +48,17 @@ static std::tuple fetchOrSubstituteTree( resolvedRef = originalRef.resolve(state.store); auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef); if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store)); - flakeCache.push_back({resolvedRef, fetchedResolved.value()}); - fetched.emplace(fetchedResolved.value()); + flakeCache.push_back({resolvedRef, *fetchedResolved}); + fetched.emplace(*fetchedResolved); } else { throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef); } } - flakeCache.push_back({originalRef, fetched.value()}); + flakeCache.push_back({originalRef, *fetched}); } - auto [tree, lockedRef] = fetched.value(); + auto [tree, lockedRef] = *fetched; debug("got tree '%s' from '%s'", state.store->printStorePath(tree.storePath), lockedRef); From 83fec38fc93922192ada7c0409fec76578ef8dfb Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Thu, 24 Sep 2020 22:41:24 -0700 Subject: [PATCH 24/57] Update document generation for empty json object values. --- doc/manual/generate-options.nix | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/manual/generate-options.nix b/doc/manual/generate-options.nix index 7afe279c3..3c31a4eec 100644 --- a/doc/manual/generate-options.nix +++ b/doc/manual/generate-options.nix @@ -13,7 +13,12 @@ concatStrings (map then "*empty*" else if isBool option.value then (if option.value then "`true`" else "`false`") - else "`" + toString option.value + "`") + "\n\n" + else + # n.b. a StringMap value type is specified as a string, but + # this shows the value type. The empty stringmap is "null" in + # JSON, but that converts to "{ }" here. + (if isAttrs option.value then "`\"\"`" + else "`" + toString option.value + "`")) + "\n\n" + (if option.aliases != [] then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n" else "") From a439e9488df6c13d0e44dd4816df98487d69f4c6 Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Thu, 24 Sep 2020 22:42:59 -0700 Subject: [PATCH 25/57] Support StringMap configuration settings. Allows Configuration values that are space-separated key=value pairs. --- src/libutil/config.cc | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/libutil/config.cc b/src/libutil/config.cc index 309d23b40..b14feb10d 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -268,6 +268,26 @@ template<> std::string BaseSetting::to_string() const return concatStringsSep(" ", value); } +template<> void BaseSetting::set(const std::string & str) +{ + auto kvpairs = tokenizeString(str); + for (auto & s : kvpairs) + { + auto eq = s.find_first_of('='); + if (std::string::npos != eq) + value.emplace(std::string(s, 0, eq), std::string(s, eq + 1)); + // else ignored + } +} + +template<> std::string BaseSetting::to_string() const +{ + Strings kvstrs; + std::transform(value.begin(), value.end(), back_inserter(kvstrs), + [&](auto kvpair){ return kvpair.first + "=" + kvpair.second; }); + return concatStringsSep(" ", kvstrs); +} + template class BaseSetting; template class BaseSetting; template class BaseSetting; @@ -278,6 +298,7 @@ template class BaseSetting; template class BaseSetting; template class BaseSetting; template class BaseSetting; +template class BaseSetting; void PathSetting::set(const std::string & str) { From c2f48cfcee501dd15690245d481d154444456f66 Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Thu, 24 Sep 2020 22:46:03 -0700 Subject: [PATCH 26/57] Complete conversion of "url" to "host" with associated variable renaming. Completes the change begun in commit 56f1e0d to consistently use the "host" attribute for "github" and "gitlab" inputs instead of a "url" attribute. --- src/libfetchers/github.cc | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index d8d0351b9..eb758bc5f 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -65,9 +65,9 @@ struct GitArchiveInputScheme : InputScheme throw BadURL("URL '%s' contains multiple branch/tag names", url.url); ref = value; } - else if (name == "url") { + else if (name == "host") { if (!std::regex_match(value, urlRegex)) - throw BadURL("URL '%s' contains an invalid instance url", url.url); + throw BadURL("URL '%s' contains an invalid instance host", url.url); host_url = value; } // FIXME: barf on unsupported attributes @@ -82,7 +82,7 @@ struct GitArchiveInputScheme : InputScheme input.attrs.insert_or_assign("repo", path[1]); if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); if (ref) input.attrs.insert_or_assign("ref", *ref); - if (host_url) input.attrs.insert_or_assign("url", *host_url); + if (host_url) input.attrs.insert_or_assign("host", *host_url); return input; } @@ -92,7 +92,7 @@ struct GitArchiveInputScheme : InputScheme if (maybeGetStrAttr(attrs, "type") != type()) return {}; for (auto & [name, value] : attrs) - if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified") + if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host") throw Error("unsupported input attribute '%s'", name); getStrAttr(attrs, "owner"); @@ -208,9 +208,9 @@ struct GitHubInputScheme : GitArchiveInputScheme Hash getRevFromRef(nix::ref store, const Input & input) const override { - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); + auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); Headers headers; std::string accessToken = settings.githubAccessToken.get(); @@ -230,9 +230,9 @@ struct GitHubInputScheme : GitArchiveInputScheme { // FIXME: use regular /archive URLs instead? api.github.com // might have stricter rate limits. - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); + auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); std::string accessToken = settings.githubAccessToken.get(); @@ -246,9 +246,9 @@ struct GitHubInputScheme : GitArchiveInputScheme void clone(const Input & input, const Path & destDir) override { - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); + auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .clone(destDir); } @@ -284,10 +284,14 @@ struct GitLabInputScheme : GitArchiveInputScheme DownloadUrl getDownloadUrl(const Input & input) const override { - // FIXME: This endpoint has a rate limit threshold of 5 requests per minute - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); + // This endpoint has a rate limit threshold that may be + // server-specific and vary based whether the user is + // authenticated via an accessToken or not, but the usual rate + // is 10 reqs/sec/ip-addr. See + // https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits + auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); std::string accessToken = settings.gitlabAccessToken.get(); @@ -302,10 +306,10 @@ struct GitLabInputScheme : GitArchiveInputScheme void clone(const Input & input, const Path & destDir) override { - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); + auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // FIXME: get username somewhere Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .clone(destDir); } From 8fba2a8b54283ea1cf56ae75faf4ced5f3e8e4a1 Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Thu, 24 Sep 2020 22:49:44 -0700 Subject: [PATCH 27/57] Update to use access-tokens configuration for github/gitlab access. This change provides support for using access tokens with other instances of GitHub and GitLab beyond just github.com and gitlab.com (especially company-specific or foundation-specific instances). This change also provides the ability to specify the type of access token being used, where different types may have different handling, based on the forge type. --- src/libfetchers/github.cc | 100 ++++++++++++++++++++++---------------- src/libstore/globals.hh | 50 ++++++++++++++++++- 2 files changed, 107 insertions(+), 43 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index eb758bc5f..0e0655367 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -5,6 +5,7 @@ #include "store-api.hh" #include "types.hh" +#include #include namespace nix::fetchers { @@ -12,13 +13,10 @@ namespace nix::fetchers { struct DownloadUrl { std::string url; - std::optional> access_token_header; + Headers headers; - DownloadUrl(const std::string & url) - : url(url) { } - - DownloadUrl(const std::string & url, const std::pair & access_token_header) - : url(url), access_token_header(access_token_header) { } + DownloadUrl(const std::string & url, const Headers & headers) + : url(url), headers(headers) { } }; // A github or gitlab url @@ -29,7 +27,7 @@ struct GitArchiveInputScheme : InputScheme { virtual std::string type() = 0; - virtual std::pair accessHeaderFromToken(const std::string & token) const = 0; + virtual std::optional > accessHeaderFromToken(const std::string & token) const = 0; std::optional inputFromURL(const ParsedURL & url) override { @@ -144,6 +142,27 @@ struct GitArchiveInputScheme : InputScheme return input; } + std::optional getAccessToken(const std::string &host) const { + auto tokens = settings.accessTokens.get(); + auto pat = tokens.find(host); + if (pat == tokens.end()) + return std::nullopt; + return pat->second; + } + + Headers makeHeadersWithAuthTokens(const std::string & host) const { + Headers headers; + auto accessToken = getAccessToken(host); + if (accessToken) { + auto hdr = accessHeaderFromToken(*accessToken); + if (hdr) + headers.push_back(*hdr); + else + warn("Unrecognized access token for host '%s'", host); + } + return headers; + } + virtual Hash getRevFromRef(nix::ref store, const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Input & input) const = 0; @@ -175,12 +194,7 @@ struct GitArchiveInputScheme : InputScheme auto url = getDownloadUrl(input); - Headers headers; - if (url.access_token_header) { - headers.push_back(*url.access_token_header); - } - - auto [tree, lastModified] = downloadTarball(store, url.url, headers, "source", true); + auto [tree, lastModified] = downloadTarball(store, url.url, url.headers, "source", true); input.attrs.insert_or_assign("lastModified", lastModified); @@ -202,7 +216,13 @@ struct GitHubInputScheme : GitArchiveInputScheme { std::string type() override { return "github"; } - std::pair accessHeaderFromToken(const std::string & token) const { + std::optional > accessHeaderFromToken(const std::string & token) const { + // Github supports PAT/OAuth2 tokens and HTTP Basic + // Authentication. The former simply specifies the token, the + // latter can use the token as the password. Only the first + // is used here. See + // https://developer.github.com/v3/#authentication and + // https://docs.github.com/en/developers/apps/authorizing-oath-apps return std::pair("Authorization", fmt("token %s", token)); } @@ -212,10 +232,7 @@ struct GitHubInputScheme : GitArchiveInputScheme auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); - Headers headers; - std::string accessToken = settings.githubAccessToken.get(); - if (accessToken != "") - headers.push_back(accessHeaderFromToken(accessToken)); + Headers headers = makeHeadersWithAuthTokens(host); auto json = nlohmann::json::parse( readFile( @@ -235,13 +252,8 @@ struct GitHubInputScheme : GitArchiveInputScheme host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); - std::string accessToken = settings.githubAccessToken.get(); - if (accessToken != "") { - auto auth_header = accessHeaderFromToken(accessToken); - return DownloadUrl(url, auth_header); - } else { - return DownloadUrl(url); - } + Headers headers = makeHeadersWithAuthTokens(host); + return DownloadUrl(url, headers); } void clone(const Input & input, const Path & destDir) override @@ -258,20 +270,32 @@ struct GitLabInputScheme : GitArchiveInputScheme { std::string type() override { return "gitlab"; } - std::pair accessHeaderFromToken(const std::string & token) const { - return std::pair("Authorization", fmt("Bearer %s", token)); + std::optional > accessHeaderFromToken(const std::string & token) const { + // Gitlab supports 4 kinds of authorization, two of which are + // relevant here: OAuth2 and PAT (Private Access Token). The + // user can indicate which token is used by specifying the + // token as :, where type is "OAuth2" or "PAT". + // If the is unrecognized, this will fall back to + // treating this simply has :. See + // https://docs.gitlab.com/12.10/ee/api/README.html#authentication + auto fldsplit = token.find_first_of(':'); + // n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ... + if ("OAuth2" == token.substr(0, fldsplit)) + return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1))); + if ("PAT" == token.substr(0, fldsplit)) + return std::make_pair("Private-token", token.substr(fldsplit+1)); + warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit)); + return std::nullopt; } Hash getRevFromRef(nix::ref store, const Input & input) const override { - auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); + auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); + // See rate limiting note below auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", - host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); + host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); - Headers headers; - std::string accessToken = settings.gitlabAccessToken.get(); - if (accessToken != "") - headers.push_back(accessHeaderFromToken(accessToken)); + Headers headers = makeHeadersWithAuthTokens(host); auto json = nlohmann::json::parse( readFile( @@ -294,14 +318,8 @@ struct GitLabInputScheme : GitArchiveInputScheme host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), input.getRev()->to_string(Base16, false)); - std::string accessToken = settings.gitlabAccessToken.get(); - if (accessToken != "") { - auto auth_header = accessHeaderFromToken(accessToken); - return DownloadUrl(url, auth_header); - } else { - return DownloadUrl(url); - } - + Headers headers = makeHeadersWithAuthTokens(host); + return DownloadUrl(url, headers); } void clone(const Input & input, const Path & destDir) override diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b2e7610ee..646422399 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -863,8 +863,54 @@ public: Setting githubAccessToken{this, "", "github-access-token", "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."}; - Setting gitlabAccessToken{this, "", "gitlab-access-token", - "GitLab access token to get access to GitLab data through the GitLab API for gitlab:<..> flakes."}; + Setting accessTokens{this, {}, "access-tokens", + R"( + Access tokens used to access protected GitHub, GitLab, or + other locations requiring token-based authentication. + + Access tokens are specified as a string made up of + space-separated `host=token` values. The specific token + used is selected by matching the `host` portion against the + "host" specification of the input. The actual use of the + `token` value is determined by the type of resource being + accessed: + + * Github: the token value is the OAUTH-TOKEN string obtained + as the Personal Access Token from the Github server (see + https://docs.github.com/en/developers/apps/authorizing-oath-apps). + + * Gitlab: the token value is either the OAuth2 token or the + Personal Access Token (these are different types tokens + for gitlab, see + https://docs.gitlab.com/12.10/ee/api/README.html#authentication). + The `token` value should be `type:tokenstring` where + `type` is either `OAuth2` or `PAT` to indicate which type + of token is being specified. + + Example `~/.config/nix/nix.conf`: + + ``` + personal-access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk" + ``` + + Example `~/code/flake.nix`: + + ```nix + input.foo = { + type="gitlab"; + host="gitlab.mycompany.com"; + owner="mycompany"; + repo="pro"; + }; + ``` + + This example specifies three tokens, one each for accessing + github.com, gitlab.mycompany.com, and sourceforge.net. + + The `input.foo` uses the "gitlab" fetcher, which might + requires specifying the token type along with the token + value. + )"}; Setting experimentalFeatures{this, {}, "experimental-features", "Experimental Nix features to enable."}; From 7b2ae472ff05a39cd635ac10dbbce3cd17b60c93 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 25 Sep 2020 10:27:40 +0200 Subject: [PATCH 28/57] expectArg(): Respect the 'optional' flag --- src/libutil/args.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 3c1f87f7e..f41242e17 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -192,7 +192,7 @@ public: { expectArgs({ .label = label, - .optional = true, + .optional = optional, .handler = {dest} }); } From ef2a14be190f7162e85e9bdd44dd45bd9ddfe391 Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Fri, 25 Sep 2020 08:08:27 -0700 Subject: [PATCH 29/57] Fix reference to older name for access-tokens config value. --- src/libstore/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 646422399..959ebe360 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -890,7 +890,7 @@ public: Example `~/.config/nix/nix.conf`: ``` - personal-access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk" + access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk" ``` Example `~/code/flake.nix`: From 5a35cc29bffc88b88f883dfcdd1bb251eab53ecd Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Fri, 25 Sep 2020 08:09:56 -0700 Subject: [PATCH 30/57] Re-add support for github-access-token, but mark as deprecated. --- src/libfetchers/github.cc | 10 ++++++++++ src/libstore/globals.hh | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 0e0655367..443644639 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -146,7 +146,17 @@ struct GitArchiveInputScheme : InputScheme auto tokens = settings.accessTokens.get(); auto pat = tokens.find(host); if (pat == tokens.end()) + { + if ("github.com" == host) + { + auto oldcfg = settings.githubAccessToken.get(); + if (!oldcfg.empty()) { + warn("using deprecated 'github-access-token' config value; please use 'access-tokens' instead"); + return oldcfg; + } + } return std::nullopt; + } return pat->second; } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 959ebe360..bd36ffc17 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -861,7 +861,7 @@ public: )"}; Setting githubAccessToken{this, "", "github-access-token", - "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."}; + "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes (deprecated, please use 'access-tokens' instead)."}; Setting accessTokens{this, {}, "access-tokens", R"( From cb186f1e7536c9448455bfbf8dec16ad6600e88e Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Fri, 25 Sep 2020 09:36:18 -0700 Subject: [PATCH 31/57] Use "?dir=..." portion of "registry add" local path specification. The registry targets generally follow a URL formatting schema with support for a query parameter of "?dir=subpath" to specify a sub-path location below the URL root. Alternatively, an absolute path can be specified. This specification mode accepts the query parameter but ignores/drops it. It would probably be better to either (a) disallow the query parameter for the path form, or (b) recognize the query parameter and add to the path. This patch implements (b) for consistency, and to make it easier for tooling that might switch between a remote git reference and a local path reference. See also issue #4050. --- src/libexpr/flake/flakeref.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index d5c2ffe66..762e27e1f 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -157,7 +157,8 @@ std::pair parseFlakeRefWithFragment( } else { if (!hasPrefix(path, "/")) throw BadURL("flake reference '%s' is not an absolute path", url); - path = canonPath(path); + auto query = decodeQuery(match[2]); + path = canonPath(path + "/" + get(query, "dir").value_or("")); } fetchers::Attrs attrs; From 5db83dd771b92bcacb0cd4dea7d4e06f767769ca Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 26 Sep 2020 03:21:36 +0000 Subject: [PATCH 32/57] BinaryCacheStore::addTextToStore include CA field --- src/libstore/binary-cache-store.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index f7a52a296..a3a73fe27 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -419,7 +419,8 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, const StorePathSet & references, RepairFlag repair) { - auto path = computeStorePathForText(name, s, references); + auto textHash = hashString(htSHA256, s); + auto path = makeTextPath(name, textHash, references); if (!repair && isValidPath(path)) return path; @@ -428,6 +429,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { path, nar.first }; info.narSize = nar.second; + info.ca = TextHash { textHash }; info.references = references; return info; })->path; From 1832436526307ac92baec0146b89e9a5cf3aca35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 26 Sep 2020 04:56:29 +0000 Subject: [PATCH 33/57] Fix up BinaryCacheStore::addToStore taking a path --- src/libstore/binary-cache-store.cc | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index a3a73fe27..b34b10fd1 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -401,19 +401,30 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath /* FIXME: Make BinaryCacheStore::addToStoreCommon support non-recursive+sha256 so we can just use the default implementation of this method in terms of addToStoreFromDump. */ - StringSink sink; - std::optional h; + + HashSink sink { hashAlgo }; if (method == FileIngestionMethod::Recursive) { dumpPath(srcPath, sink, filter); - h = hashString(hashAlgo, *sink.s); } else { - auto s = readFile(srcPath); - dumpString(s, sink); - h = hashString(hashAlgo, s); + readFile(srcPath, sink); } + auto h = sink.finish().first; - auto source = StringSource { *sink.s }; - return addToStoreFromDump(source, name, FileIngestionMethod::Recursive, htSHA256, repair); + auto source = sinkToSource([&](Sink & sink) { + dumpPath(srcPath, sink, filter); + }); + return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { + ValidPathInfo info { + makeFixedOutputPath(method, h, name), + nar.first, + }; + info.narSize = nar.second; + info.ca = FixedOutputHash { + .method = method, + .hash = h, + }; + return info; + })->path; } StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, From 8b4a542d1767e0df7b3c0902b766f34352cb0958 Mon Sep 17 00:00:00 2001 From: Mateusz Piotrowski <0mp@FreeBSD.org> Date: Sat, 26 Sep 2020 13:33:04 +0200 Subject: [PATCH 34/57] Fix a typo (#4073) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3cf4e44fa..11fe5f932 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ for more details. ## Installation -On Linux and macOS the easiest way to Install Nix is to run the following shell command +On Linux and macOS the easiest way to install Nix is to run the following shell command (as a user other than root): ```console From 25fffdda865e51f68e72e0ca1775800b60391820 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 26 Sep 2020 10:17:30 -0400 Subject: [PATCH 35/57] Remove redundant nar hash and size setting Co-authored-by: Robert Hensing --- src/libstore/binary-cache-store.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index b34b10fd1..59d02cab5 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -173,8 +173,6 @@ ref BinaryCacheStore::addToStoreCommon( auto info = mkInfo(narHashSink.finish()); auto narInfo = make_ref(info); - narInfo->narSize = info.narSize; // FIXME needed? - narInfo->narHash = info.narHash; // FIXME needed? narInfo->compression = compression; auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; From a76fb07314d3c5dea06ac2c1a36f8af1e76c2dde Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sat, 26 Sep 2020 17:38:11 +0200 Subject: [PATCH 36/57] libmain/progress-bar: don't trim whitespace on the left When running `nix build -L` it can be fairly hard to read the output if the build program intentionally renders whitespace on the left. A typical example is `g++` displaying compilation errors. With this patch, the whitespace on the left is retained to make the log more readable: ``` foo> no configure script, doing nothing foo> building foo> foobar.cc: In function 'int main()': foo> foobar.cc:5:5: error: 'wrong_func' was not declared in this scope foo> 5 | wrong_func(1); foo> | ^~~~~~~~~~ error: --- Error ------------------------------------------------------------------------------------- nix error: --- Error --- nix-daemon builder for '/nix/store/i1q76cw6cyh91raaqg5p5isd1l2x6rx2-foo-1.0.drv' failed with exit code 1 ``` --- src/libmain/progress-bar.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index be3c06a38..07b45b3b5 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -256,7 +256,7 @@ public: } else if (type == resBuildLogLine || type == resPostBuildLogLine) { - auto lastLine = trim(getS(fields, 0)); + auto lastLine = chomp(getS(fields, 0)); if (!lastLine.empty()) { auto i = state->its.find(act); assert(i != state->its.end()); From 5885b0cfd878b4b60556c5b03bbe52244d04191a Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Sun, 27 Sep 2020 13:04:06 -0700 Subject: [PATCH 37/57] Miscellaneous spelling fixes in comments. (#4071) --- src/libexpr/flake/flake.cc | 2 +- src/libfetchers/fetchers.hh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 760ed1a6e..b4ede542c 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -247,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup } /* Compute an in-memory lock file for the specified top-level flake, - and optionally write it to file, it the flake is writable. */ + and optionally write it to file, if the flake is writable. */ LockedFlake lockFlake( EvalState & state, const FlakeRef & topRef, diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 89b1e6e7d..191e91978 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -73,7 +73,7 @@ public: StorePath computeStorePath(Store & store) const; - // Convience functions for common attributes. + // Convenience functions for common attributes. std::string getType() const; std::optional getNarHash() const; std::optional getRef() const; From 3655875483306fa893ec2b01295151819a00ccaa Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sun, 27 Sep 2020 22:35:03 +0200 Subject: [PATCH 38/57] doc/manual: update hacking docs (#4078) * By default, build artifacts should be installed into `outputs/` rather than `inst/`[1]. * Add instructions on how to run unit-tests. [1] 733d2e9402807e54d503c3113e854bfddb3d44e0 --- doc/manual/src/hacking.md | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/doc/manual/src/hacking.md b/doc/manual/src/hacking.md index 5bd884ce8..2a1e55e5b 100644 --- a/doc/manual/src/hacking.md +++ b/doc/manual/src/hacking.md @@ -39,17 +39,17 @@ To build Nix itself in this shell: ```console [nix-shell]$ ./bootstrap.sh -[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/inst +[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out [nix-shell]$ make -j $NIX_BUILD_CORES ``` -To install it in `$(pwd)/inst` and test it: +To install it in `$(pwd)/outputs` and test it: ```console [nix-shell]$ make install -[nix-shell]$ make installcheck -[nix-shell]$ ./inst/bin/nix --version -nix (Nix) 2.4 +[nix-shell]$ make installcheck -j $NIX_BUILD_CORES +[nix-shell]$ ./outputs/out/bin/nix --version +nix (Nix) 3.0 ``` To run a functional test: @@ -58,6 +58,12 @@ To run a functional test: make tests/test-name-should-auto-complete.sh.test ``` +To run the unit-tests for C++ code: + +``` +make check +``` + If you have a flakes-enabled Nix you can replace: ```console From 095a91f55a89d856e51ff099686e2bbe6fb9a384 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Sep 2020 05:37:07 +0000 Subject: [PATCH 39/57] Bump cachix/install-nix-action from v10 to v11 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from v10 to v11. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Commits](https://github.com/cachix/install-nix-action/compare/v10...95a8068e317b8def9482980abe762f36c77ccc99) Signed-off-by: dependabot[bot] --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1f504a8ea..2f5c548d9 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,7 +12,7 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v10 + - uses: cachix/install-nix-action@v11 with: skip_adding_nixpkgs_channel: true #- run: nix flake check From ed66d010659a710646f5f1015c97a58614a713b3 Mon Sep 17 00:00:00 2001 From: Mateusz Piotrowski <0mp@FreeBSD.org> Date: Mon, 28 Sep 2020 15:23:21 +0200 Subject: [PATCH 40/57] Fix tar invocation on FreeBSD tar(1) on FreeBSD does not use standard output or input when the -f flag is not provided. Instead, it defaults to /dev/sa0 on FreeBSD. Make this tar invocation a bit more robust and explicitly tell tar(1) to use standard output. This is one of the issues discovered while porting Nix to FreeBSD. It has been tested and committed locally to FreeBSD ports: https://svnweb.freebsd.org/ports/head/sysutils/nix/Makefile?revision=550026&view=markup#l108 --- tests/tarball.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/tarball.sh b/tests/tarball.sh index 88a1a07a0..fe65a22e4 100644 --- a/tests/tarball.sh +++ b/tests/tarball.sh @@ -17,7 +17,7 @@ test_tarball() { local compressor="$2" tarball=$TEST_ROOT/tarball.tar$ext - (cd $TEST_ROOT && tar c tarball) | $compressor > $tarball + (cd $TEST_ROOT && tar cf - tarball) | $compressor > $tarball nix-env -f file://$tarball -qa --out-path | grep -q dependencies From 6c31297d80b71ab9c2c1084fae22f726f7d89daa Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 28 Sep 2020 11:32:58 -0400 Subject: [PATCH 41/57] Update src/libstore/binary-cache-store.cc Co-authored-by: Eelco Dolstra --- src/libstore/binary-cache-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 59d02cab5..f6224d6a0 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -307,7 +307,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource return; } - (void) addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) { + addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) { /* FIXME reinstate these, once we can correctly do hash modulo sink as needed. We need to throw here in case we uploaded a corrupted store path. */ // assert(info.narHash == nar.first); From 80e335bb5837d7bfbf1f14d4f3d39525013c4f4d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 28 Sep 2020 15:43:56 +0000 Subject: [PATCH 42/57] Use `drvPath2` and give it a better name --- src/libstore/build.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 0a6f2ccb5..0499273a4 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -4315,19 +4315,19 @@ void DerivationGoal::registerOutputs() but it's fine to do in all cases. */ bool isCaFloating = drv->type() == DerivationType::CAFloating; - auto drvPath2 = drvPath; + auto drvPathResolved = drvPath; if (!useDerivation && isCaFloating) { /* Once a floating CA derivations reaches this point, it must already be resolved, so we don't bother trying to downcast drv to get would would just be an empty inputDrvs field. */ Derivation drv2 { *drv }; - drvPath2 = writeDerivation(worker.store, drv2); + drvPathResolved = writeDerivation(worker.store, drv2); } if (useDerivation || isCaFloating) for (auto & [outputName, newInfo] : infos) - worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); + worker.store.linkDeriverToPath(drvPathResolved, outputName, newInfo.path); } From c89fa3f644eec0652e97c11d9246f9580e5929fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Mon, 28 Sep 2020 21:08:14 +0300 Subject: [PATCH 43/57] Update .github/workflows/test.yml --- .github/workflows/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2f5c548d9..adc56e223 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,7 +13,6 @@ jobs: with: fetch-depth: 0 - uses: cachix/install-nix-action@v11 - with: skip_adding_nixpkgs_channel: true #- run: nix flake check - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) From f1428484be248c7ba3f96379d8bf256b8df1a706 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Mon, 28 Sep 2020 21:08:24 +0300 Subject: [PATCH 44/57] Update .github/workflows/test.yml --- .github/workflows/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index adc56e223..829111b67 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,6 +13,5 @@ jobs: with: fetch-depth: 0 - uses: cachix/install-nix-action@v11 - skip_adding_nixpkgs_channel: true #- run: nix flake check - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) From 00135e13f49e9b20e3ef03f2516e7cc277c40ca9 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 28 Sep 2020 18:19:10 +0000 Subject: [PATCH 45/57] Clarify comment a bit --- src/libstore/derivations.hh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 74601134e..d48266774 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -128,11 +128,12 @@ struct Derivation : BasicDerivation std::string unparse(const Store & store, bool maskOutputs, std::map * actualInputs = nullptr) const; - /* Return the underlying basic derivation but with + /* Return the underlying basic derivation but with these changes: - 1. input drv outputs moved to input sources. + 1. Input drvs are emptied, but the outputs of them that were used are + added directly to input sources. - 2. placeholders replaced with realized input store paths. */ + 2. Input placeholders are replaced with realized input store paths. */ std::optional tryResolve(Store & store); Derivation() = default; From de86abbf3f9f0d18f7506b1d50072597786332ea Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 29 Sep 2020 12:55:06 +0200 Subject: [PATCH 46/57] Cleanup --- src/libfetchers/github.cc | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 7bf155c69..3734c4278 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -14,12 +14,6 @@ struct DownloadUrl { std::string url; std::optional> access_token_header; - - DownloadUrl(const std::string & url) - : url(url) { } - - DownloadUrl(const std::string & url, const std::pair & access_token_header) - : url(url), access_token_header(access_token_header) { } }; // A github or gitlab host @@ -239,9 +233,9 @@ struct GitHubInputScheme : GitArchiveInputScheme std::string accessToken = settings.githubAccessToken.get(); if (accessToken != "") { auto auth_header = accessHeaderFromToken(accessToken); - return DownloadUrl(url, auth_header); + return DownloadUrl { url, auth_header }; } else { - return DownloadUrl(url); + return DownloadUrl { url }; } } @@ -294,9 +288,9 @@ struct GitLabInputScheme : GitArchiveInputScheme std::string accessToken = settings.gitlabAccessToken.get(); if (accessToken != "") { auto auth_header = accessHeaderFromToken(accessToken); - return DownloadUrl(url, auth_header); + return DownloadUrl { url, auth_header }; } else { - return DownloadUrl(url); + return DownloadUrl { url }; } } From 5999978a053b3ec16f448c40b54a1a62ceb82c90 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 29 Sep 2020 13:05:19 +0200 Subject: [PATCH 47/57] Make Headers an optional argument --- src/libexpr/common-eval-args.cc | 2 +- src/libexpr/parser.y | 2 +- src/libexpr/primops/fetchTree.cc | 4 ++-- src/libfetchers/fetchers.hh | 8 ++++---- src/libfetchers/github.cc | 6 +++--- src/libfetchers/registry.cc | 2 +- src/libfetchers/tarball.cc | 15 ++++++++------- src/libstore/filetransfer.hh | 3 --- src/nix-channel/nix-channel.cc | 6 +++--- 9 files changed, 23 insertions(+), 25 deletions(-) diff --git a/src/libexpr/common-eval-args.cc b/src/libexpr/common-eval-args.cc index d71aa22f1..10c1a6975 100644 --- a/src/libexpr/common-eval-args.cc +++ b/src/libexpr/common-eval-args.cc @@ -76,7 +76,7 @@ Path lookupFileArg(EvalState & state, string s) if (isUri(s)) { return state.store->toRealPath( fetchers::downloadTarball( - state.store, resolveUri(s), Headers {}, "source", false).first.storePath); + state.store, resolveUri(s), "source", false).first.storePath); } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { Path p = s.substr(1, s.size() - 2); return state.findFile(p); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index e879280c0..a4c84c526 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -718,7 +718,7 @@ std::pair EvalState::resolveSearchPathElem(const SearchPathEl if (isUri(elem.second)) { try { res = { true, store->toRealPath(fetchers::downloadTarball( - store, resolveUri(elem.second), Headers {}, "source", false).first.storePath) }; + store, resolveUri(elem.second), "source", false).first.storePath) }; } catch (FileTransferError & e) { logWarning({ .name = "Entry download", diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 3001957b4..06e8304b8 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -201,8 +201,8 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, auto storePath = unpack - ? fetchers::downloadTarball(state.store, *url, Headers {}, name, (bool) expectedHash).first.storePath - : fetchers::downloadFile(state.store, *url, Headers{}, name, (bool) expectedHash).storePath; + ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath + : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; auto path = state.store->toRealPath(storePath); diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 0adc2c9f5..36d44f6e1 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -118,15 +118,15 @@ struct DownloadFileResult DownloadFileResult downloadFile( ref store, const std::string & url, - const Headers & headers, const std::string & name, - bool immutable); + bool immutable, + const Headers & headers = {}); std::pair downloadTarball( ref store, const std::string & url, - const Headers & headers, const std::string & name, - bool immutable); + bool immutable, + const Headers & headers = {}); } diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 3734c4278..ec99481e1 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -175,7 +175,7 @@ struct GitArchiveInputScheme : InputScheme headers.push_back(*url.access_token_header); } - auto [tree, lastModified] = downloadTarball(store, url.url, headers, "source", true); + auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, headers); input.attrs.insert_or_assign("lastModified", lastModified); @@ -215,7 +215,7 @@ struct GitHubInputScheme : GitArchiveInputScheme auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, headers, "source", false).storePath))); + downloadFile(store, url, "source", false, headers).storePath))); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; @@ -271,7 +271,7 @@ struct GitLabInputScheme : GitArchiveInputScheme auto json = nlohmann::json::parse( readFile( store->toRealPath( - downloadFile(store, url, headers, "source", false).storePath))); + downloadFile(store, url, "source", false, headers).storePath))); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 551e7684a..4367ee810 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -145,7 +145,7 @@ static std::shared_ptr getGlobalRegistry(ref store) auto path = settings.flakeRegistry.get(); if (!hasPrefix(path, "/")) { - auto storePath = downloadFile(store, path, Headers {}, "flake-registry.json", false).storePath; + auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; if (auto store2 = store.dynamic_pointer_cast()) store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json"); path = store->toRealPath(storePath); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index cf6d6e3d2..ca49482a9 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -12,9 +12,9 @@ namespace nix::fetchers { DownloadFileResult downloadFile( ref store, const std::string & url, - const Headers & headers, const std::string & name, - bool immutable) + bool immutable, + const Headers & headers) { // FIXME: check store @@ -38,7 +38,8 @@ DownloadFileResult downloadFile( if (cached && !cached->expired) return useCached(); - FileTransferRequest request(url, headers); + FileTransferRequest request(url); + request.headers = headers; if (cached) request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); FileTransferResult res; @@ -112,9 +113,9 @@ DownloadFileResult downloadFile( std::pair downloadTarball( ref store, const std::string & url, - const Headers & headers, const std::string & name, - bool immutable) + bool immutable, + const Headers & headers) { Attrs inAttrs({ {"type", "tarball"}, @@ -130,7 +131,7 @@ std::pair downloadTarball( getIntAttr(cached->infoAttrs, "lastModified") }; - auto res = downloadFile(store, url, headers, name, immutable); + auto res = downloadFile(store, url, name, immutable, headers); std::optional unpackedStorePath; time_t lastModified; @@ -225,7 +226,7 @@ struct TarballInputScheme : InputScheme std::pair fetch(ref store, const Input & input) override { - auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), Headers {}, "source", false).first; + auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first; return {std::move(tree), input}; } }; diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh index 7e302ff39..c89c51a21 100644 --- a/src/libstore/filetransfer.hh +++ b/src/libstore/filetransfer.hh @@ -66,9 +66,6 @@ struct FileTransferRequest FileTransferRequest(const std::string & uri) : uri(uri), parentAct(getCurActivity()) { } - FileTransferRequest(const std::string & uri, Headers headers) - : uri(uri), headers(headers) { } - std::string verb() { return data ? "upload" : "download"; diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc index 760bbea86..e48f7af9a 100755 --- a/src/nix-channel/nix-channel.cc +++ b/src/nix-channel/nix-channel.cc @@ -94,7 +94,7 @@ static void update(const StringSet & channelNames) // We want to download the url to a file to see if it's a tarball while also checking if we // got redirected in the process, so that we can grab the various parts of a nix channel // definition from a consistent location if the redirect changes mid-download. - auto result = fetchers::downloadFile(store, url, Headers {}, std::string(baseNameOf(url)), false); + auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false); auto filename = store->toRealPath(result.storePath); url = result.effectiveUrl; @@ -119,9 +119,9 @@ static void update(const StringSet & channelNames) if (!unpacked) { // Download the channel tarball. try { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", Headers {}, "nixexprs.tar.xz", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath); } catch (FileTransferError & e) { - filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", Headers {}, "nixexprs.tar.bz2", false).storePath); + filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath); } } From 64e9b3c83b7cf7f3c7348426666ccca2ca395d28 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 29 Sep 2020 23:33:16 +0200 Subject: [PATCH 48/57] nix registry list: Show 'dir' attribute Issue #4050. --- src/libexpr/flake/flakeref.cc | 6 +++--- src/libfetchers/fetchers.cc | 8 ++++++++ src/libfetchers/fetchers.hh | 2 ++ src/nix/registry.cc | 4 ++-- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index d5c2ffe66..87b202643 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -16,10 +16,10 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege std::string FlakeRef::to_string() const { - auto url = input.toURL(); + std::map extraQuery; if (subdir != "") - url.query.insert_or_assign("dir", subdir); - return url.to_string(); + extraQuery.insert_or_assign("dir", subdir); + return input.toURLString(extraQuery); } fetchers::Attrs FlakeRef::toAttrs() const diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index eaa635595..49851f7bc 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -69,6 +69,14 @@ ParsedURL Input::toURL() const return scheme->toURL(*this); } +std::string Input::toURLString(const std::map & extraQuery) const +{ + auto url = toURL(); + for (auto & attr : extraQuery) + url.query.insert(attr); + return url.to_string(); +} + std::string Input::to_string() const { return toURL().to_string(); diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 36d44f6e1..cc31a31b9 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -39,6 +39,8 @@ public: ParsedURL toURL() const; + std::string toURLString(const std::map & extraQuery = {}) const; + std::string to_string() const; Attrs toAttrs() const; diff --git a/src/nix/registry.cc b/src/nix/registry.cc index 367268683..cb11ec195 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -31,8 +31,8 @@ struct CmdRegistryList : StoreCommand registry->type == Registry::User ? "user " : registry->type == Registry::System ? "system" : "global", - entry.from.to_string(), - entry.to.to_string()); + entry.from.toURLString(), + entry.to.toURLString(attrsToQuery(entry.extraAttrs))); } } } From 5e7838512e2b8de3c8fe271b8beae5ca9e1efaf9 Mon Sep 17 00:00:00 2001 From: Kevin Quick Date: Tue, 29 Sep 2020 16:20:54 -0700 Subject: [PATCH 49/57] Remove github-access-token in favor of access-token. --- src/libfetchers/github.cc | 10 ---------- src/libstore/globals.hh | 3 --- 2 files changed, 13 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 142b8b87c..8286edf75 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -147,17 +147,7 @@ struct GitArchiveInputScheme : InputScheme auto tokens = settings.accessTokens.get(); auto pat = tokens.find(host); if (pat == tokens.end()) - { - if ("github.com" == host) - { - auto oldcfg = settings.githubAccessToken.get(); - if (!oldcfg.empty()) { - warn("using deprecated 'github-access-token' config value; please use 'access-tokens' instead"); - return oldcfg; - } - } return std::nullopt; - } return pat->second; } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 3b8ccadf3..0f0c0fe6f 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -859,9 +859,6 @@ public: are loaded as plugins (non-recursively). )"}; - Setting githubAccessToken{this, "", "github-access-token", - "GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes (deprecated, please use 'access-tokens' instead)."}; - Setting accessTokens{this, {}, "access-tokens", R"( Access tokens used to access protected GitHub, GitLab, or From 45a0ed82f089158a79c8c25ef844c55e4a74fc35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 30 Sep 2020 00:39:06 +0000 Subject: [PATCH 50/57] Revert "Use template structs instead of phantoms" This reverts commit 9ab07e99f527d1fa3adfa02839da477a1528d64b. --- src/libstore/build.cc | 4 +- src/libstore/daemon.cc | 38 ++++---- src/libstore/derivations.cc | 4 +- src/libstore/export-import.cc | 4 +- src/libstore/legacy-ssh-store.cc | 14 +-- src/libstore/remote-store.cc | 70 +++++++------- src/libstore/worker-protocol.hh | 151 ++++++++++++++++--------------- src/nix-store/nix-store.cc | 16 ++-- 8 files changed, 153 insertions(+), 148 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 56d454b6b..928858203 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1976,7 +1976,7 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - WorkerProto::write(worker.store, hook->sink, inputPaths); + nix::worker_proto::write(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ @@ -1987,7 +1987,7 @@ HookReply DerivationGoal::tryBuildHook() if (buildMode != bmCheck && status.known->isValid()) continue; missingPaths.insert(status.known->path); } - WorkerProto::write(worker.store, hook->sink, missingPaths); + nix::worker_proto::write(worker.store, hook->sink, missingPaths); } hook->sink = FdSink(); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 98fd2048d..72203d1b2 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -247,7 +247,7 @@ static void writeValidPathInfo( { to << (info->deriver ? store->printStorePath(*info->deriver) : "") << info->narHash.to_string(Base16, false); - WorkerProto::write(*store, to, info->references); + nix::worker_proto::write(*store, to, info->references); to << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { to << info->ultimate @@ -272,11 +272,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryValidPaths: { - auto paths = WorkerProto::read(*store, from); + auto paths = nix::worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto res = store->queryValidPaths(paths); logger->stopWork(); - WorkerProto::write(*store, to, res); + nix::worker_proto::write(*store, to, res); break; } @@ -292,11 +292,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQuerySubstitutablePaths: { - auto paths = WorkerProto::read(*store, from); + auto paths = nix::worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto res = store->querySubstitutablePaths(paths); logger->stopWork(); - WorkerProto::write(*store, to, res); + nix::worker_proto::write(*store, to, res); break; } @@ -325,7 +325,7 @@ static void performOp(TunnelLogger * logger, ref store, paths = store->queryValidDerivers(path); else paths = store->queryDerivationOutputs(path); logger->stopWork(); - WorkerProto::write(*store, to, paths); + nix::worker_proto::write(*store, to, paths); break; } @@ -343,7 +343,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto outputs = store->queryPartialDerivationOutputMap(path); logger->stopWork(); - WorkerProto>>::write(*store, to, outputs); + nix::worker_proto::write(*store, to, outputs); break; } @@ -369,7 +369,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(clientVersion) >= 25) { auto name = readString(from); auto camStr = readString(from); - auto refs = WorkerProto::read(*store, from); + auto refs = nix::worker_proto::read(*store, from, Phantom {}); bool repairBool; from >> repairBool; auto repair = RepairFlag{repairBool}; @@ -449,7 +449,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopAddTextToStore: { string suffix = readString(from); string s = readString(from); - auto refs = WorkerProto::read(*store, from); + auto refs = nix::worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto path = store->addTextToStore(suffix, s, refs, NoRepair); logger->stopWork(); @@ -608,7 +608,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = WorkerProto::read(*store, from); + options.pathsToDelete = nix::worker_proto::read(*store, from, Phantom {}); from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); @@ -677,7 +677,7 @@ static void performOp(TunnelLogger * logger, ref store, else { to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); - WorkerProto::write(*store, to, i->second.references); + nix::worker_proto::write(*store, to, i->second.references); to << i->second.downloadSize << i->second.narSize; } @@ -688,11 +688,11 @@ static void performOp(TunnelLogger * logger, ref store, SubstitutablePathInfos infos; StorePathCAMap pathsMap = {}; if (GET_PROTOCOL_MINOR(clientVersion) < 22) { - auto paths = WorkerProto::read(*store, from); + auto paths = nix::worker_proto::read(*store, from, Phantom {}); for (auto & path : paths) pathsMap.emplace(path, std::nullopt); } else - pathsMap = WorkerProto::read(*store, from); + pathsMap = nix::worker_proto::read(*store, from, Phantom {}); logger->startWork(); store->querySubstitutablePathInfos(pathsMap, infos); logger->stopWork(); @@ -700,7 +700,7 @@ static void performOp(TunnelLogger * logger, ref store, for (auto & i : infos) { to << store->printStorePath(i.first) << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); - WorkerProto::write(*store, to, i.second.references); + nix::worker_proto::write(*store, to, i.second.references); to << i.second.downloadSize << i.second.narSize; } break; @@ -710,7 +710,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto paths = store->queryAllValidPaths(); logger->stopWork(); - WorkerProto::write(*store, to, paths); + nix::worker_proto::write(*store, to, paths); break; } @@ -782,7 +782,7 @@ static void performOp(TunnelLogger * logger, ref store, ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = WorkerProto::read(*store, from); + info.references = nix::worker_proto::read(*store, from, Phantom {}); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); info.ca = parseContentAddressOpt(readString(from)); @@ -835,9 +835,9 @@ static void performOp(TunnelLogger * logger, ref store, uint64_t downloadSize, narSize; store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); logger->stopWork(); - WorkerProto::write(*store, to, willBuild); - WorkerProto::write(*store, to, willSubstitute); - WorkerProto::write(*store, to, unknown); + nix::worker_proto::write(*store, to, willBuild); + nix::worker_proto::write(*store, to, willSubstitute); + nix::worker_proto::write(*store, to, unknown); to << downloadSize << narSize; break; } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index faffe01e7..f8e7d773b 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -584,7 +584,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = WorkerProto::read(store, in); + drv.inputSrcs = nix::worker_proto::read(store, in, Phantom {}); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -622,7 +622,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, }, i.second.output); } - WorkerProto::write(store, out, drv.inputSrcs); + nix::worker_proto::write(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index b59b34dee..40a6f3c63 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) teeSink << exportMagic << printStorePath(path); - WorkerProto::write(*this, teeSink, info->references); + nix::worker_proto::write(*this, teeSink, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; @@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); - auto references = WorkerProto::read(*this, source); + auto references = nix::worker_proto::read(*this, source, Phantom {}); auto deriver = readString(source); auto narHash = hashString(htSHA256, *saved.s); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index fdf3f91b9..e056859fd 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -122,7 +122,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = WorkerProto::read(*this, conn->from); + info->references = nix::worker_proto::read(*this, conn->from, Phantom {}); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -156,7 +156,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(Base16, false); - WorkerProto::write(*this, conn->to, info.references); + nix::worker_proto::write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize @@ -185,7 +185,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig conn->to << exportMagic << printStorePath(info.path); - WorkerProto::write(*this, conn->to, info.references); + nix::worker_proto::write(*this, conn->to, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -301,10 +301,10 @@ public: conn->to << cmdQueryClosure << includeOutputs; - WorkerProto::write(*this, conn->to, paths); + nix::worker_proto::write(*this, conn->to, paths); conn->to.flush(); - for (auto & i : WorkerProto::read(*this, conn->from)) + for (auto & i : nix::worker_proto::read(*this, conn->from, Phantom {})) out.insert(i); } @@ -317,10 +317,10 @@ public: << cmdQueryValidPaths << false // lock << maybeSubstitute; - WorkerProto::write(*this, conn->to, paths); + nix::worker_proto::write(*this, conn->to, paths); conn->to.flush(); - return WorkerProto::read(*this, conn->from); + return nix::worker_proto::read(*this, conn->from, Phantom {}); } void connect() override diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 273466137..43853db4e 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -24,61 +24,65 @@ namespace nix { -std::string WorkerProto::read(const Store & store, Source & from) +namespace worker_proto { + +std::string read(const Store & store, Source & from, Phantom _) { return readString(from); } -void WorkerProto::write(const Store & store, Sink & out, const std::string & str) +void write(const Store & store, Sink & out, const std::string & str) { out << str; } -StorePath WorkerProto::read(const Store & store, Source & from) +StorePath read(const Store & store, Source & from, Phantom _) { return store.parseStorePath(readString(from)); } -void WorkerProto::write(const Store & store, Sink & out, const StorePath & storePath) +void write(const Store & store, Sink & out, const StorePath & storePath) { out << store.printStorePath(storePath); } -ContentAddress WorkerProto::read(const Store & store, Source & from) +ContentAddress read(const Store & store, Source & from, Phantom _) { return parseContentAddress(readString(from)); } -void WorkerProto::write(const Store & store, Sink & out, const ContentAddress & ca) +void write(const Store & store, Sink & out, const ContentAddress & ca) { out << renderContentAddress(ca); } -std::optional WorkerProto>::read(const Store & store, Source & from) +std::optional read(const Store & store, Source & from, Phantom> _) { auto s = readString(from); return s == "" ? std::optional {} : store.parseStorePath(s); } -void WorkerProto>::write(const Store & store, Sink & out, const std::optional & storePathOpt) +void write(const Store & store, Sink & out, const std::optional & storePathOpt) { out << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); } -std::optional WorkerProto>::read(const Store & store, Source & from) +std::optional read(const Store & store, Source & from, Phantom> _) { return parseContentAddressOpt(readString(from)); } -void WorkerProto>::write(const Store & store, Sink & out, const std::optional & caOpt) +void write(const Store & store, Sink & out, const std::optional & caOpt) { out << (caOpt ? renderContentAddress(*caOpt) : ""); } +} + /* TODO: Separate these store impls into different files, give them better names */ RemoteStore::RemoteStore(const Params & params) @@ -325,9 +329,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute return res; } else { conn->to << wopQueryValidPaths; - WorkerProto::write(*this, conn->to, paths); + nix::worker_proto::write(*this, conn->to, paths); conn.processStderr(); - return WorkerProto::read(*this, conn->from); + return worker_proto::read(*this, conn->from, Phantom {}); } } @@ -337,7 +341,7 @@ StorePathSet RemoteStore::queryAllValidPaths() auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return WorkerProto::read(*this, conn->from); + return nix::worker_proto::read(*this, conn->from, Phantom {}); } @@ -354,9 +358,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) return res; } else { conn->to << wopQuerySubstitutablePaths; - WorkerProto::write(*this, conn->to, paths); + nix::worker_proto::write(*this, conn->to, paths); conn.processStderr(); - return WorkerProto::read(*this, conn->from); + return worker_proto::read(*this, conn->from, Phantom {}); } } @@ -378,7 +382,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = WorkerProto::read(*this, conn->from); + info.references = worker_proto::read(*this, conn->from, Phantom {}); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); infos.insert_or_assign(i.first, std::move(info)); @@ -391,9 +395,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S StorePathSet paths; for (auto & path : pathsMap) paths.insert(path.first); - WorkerProto::write(*this, conn->to, paths); + worker_proto::write(*this, conn->to, paths); } else - WorkerProto::write(*this, conn->to, pathsMap); + worker_proto::write(*this, conn->to, pathsMap); conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { @@ -401,7 +405,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = WorkerProto::read(*this, conn->from); + info.references = worker_proto::read(*this, conn->from, Phantom {}); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } @@ -416,7 +420,7 @@ ref RemoteStore::readValidPathInfo(ConnectionHandle & conn, auto narHash = Hash::parseAny(readString(conn->from), htSHA256); auto info = make_ref(path, narHash); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = WorkerProto::read(*this, conn->from); + info->references = worker_proto::read(*this, conn->from, Phantom {}); conn->from >> info->registrationTime >> info->narSize; if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { conn->from >> info->ultimate; @@ -460,7 +464,7 @@ void RemoteStore::queryReferrers(const StorePath & path, auto conn(getConnection()); conn->to << wopQueryReferrers << printStorePath(path); conn.processStderr(); - for (auto & i : WorkerProto::read(*this, conn->from)) + for (auto & i : worker_proto::read(*this, conn->from, Phantom {})) referrers.insert(i); } @@ -470,7 +474,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) auto conn(getConnection()); conn->to << wopQueryValidDerivers << printStorePath(path); conn.processStderr(); - return WorkerProto::read(*this, conn->from); + return worker_proto::read(*this, conn->from, Phantom {}); } @@ -482,7 +486,7 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) } conn->to << wopQueryDerivationOutputs << printStorePath(path); conn.processStderr(); - return WorkerProto::read(*this, conn->from); + return worker_proto::read(*this, conn->from, Phantom {}); } @@ -492,7 +496,7 @@ std::map> RemoteStore::queryPartialDerivat auto conn(getConnection()); conn->to << wopQueryDerivationOutputMap << printStorePath(path); conn.processStderr(); - return WorkerProto>>::read(*this, conn->from); + return worker_proto::read(*this, conn->from, Phantom>> {}); } else { // Fallback for old daemon versions. // For floating-CA derivations (and their co-dependencies) this is an @@ -537,7 +541,7 @@ ref RemoteStore::addCAToStore( << wopAddToStore << name << renderContentAddressMethod(caMethod); - WorkerProto::write(*this, conn->to, references); + worker_proto::write(*this, conn->to, references); conn->to << repair; conn.withFramedSink([&](Sink & sink) { @@ -554,7 +558,7 @@ ref RemoteStore::addCAToStore( [&](TextHashMethod thm) -> void { std::string s = dump.drain(); conn->to << wopAddTextToStore << name << s; - WorkerProto::write(*this, conn->to, references); + worker_proto::write(*this, conn->to, references); conn.processStderr(); }, [&](FixedOutputHashMethod fohm) -> void { @@ -623,7 +627,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, sink << exportMagic << printStorePath(info.path); - WorkerProto::write(*this, sink, info.references); + worker_proto::write(*this, sink, info.references); sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature @@ -633,7 +637,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn.processStderr(0, source2.get()); - auto importedPaths = WorkerProto::read(*this, conn->from); + auto importedPaths = worker_proto::read(*this, conn->from, Phantom {}); assert(importedPaths.size() <= 1); } @@ -642,7 +646,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(Base16, false); - WorkerProto::write(*this, conn->to, info.references); + worker_proto::write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; @@ -764,7 +768,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) conn->to << wopCollectGarbage << options.action; - WorkerProto::write(*this, conn->to, options.pathsToDelete); + worker_proto::write(*this, conn->to, options.pathsToDelete); conn->to << options.ignoreLiveness << options.maxFreed /* removed options */ @@ -826,9 +830,9 @@ void RemoteStore::queryMissing(const std::vector & targets ss.push_back(p.to_string(*this)); conn->to << ss; conn.processStderr(); - willBuild = WorkerProto::read(*this, conn->from); - willSubstitute = WorkerProto::read(*this, conn->from); - unknown = WorkerProto::read(*this, conn->from); + willBuild = worker_proto::read(*this, conn->from, Phantom {}); + willSubstitute = worker_proto::read(*this, conn->from, Phantom {}); + unknown = worker_proto::read(*this, conn->from, Phantom {}); conn->from >> downloadSize >> narSize; return; } diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index bb87cf3ec..fd6c2b2cf 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -66,96 +66,95 @@ typedef enum { class Store; struct Source; +/* To guide overloading */ template -struct WorkerProto { - static T read(const Store & store, Source & from); - static void write(const Store & store, Sink & out, const T & t); -}; +struct Phantom {}; -#define MAKE_WORKER_PROTO(T) \ - template<> \ - struct WorkerProto< T > { \ - static T read(const Store & store, Source & from); \ - static void write(const Store & store, Sink & out, const T & t); \ - } -MAKE_WORKER_PROTO(std::string); -MAKE_WORKER_PROTO(StorePath); -MAKE_WORKER_PROTO(ContentAddress); +namespace worker_proto { +/* FIXME maybe move more stuff inside here */ + +#define MAKE_WORKER_PROTO(TEMPLATE, T) \ + TEMPLATE T read(const Store & store, Source & from, Phantom< T > _); \ + TEMPLATE void write(const Store & store, Sink & out, const T & str) + +MAKE_WORKER_PROTO(, std::string); +MAKE_WORKER_PROTO(, StorePath); +MAKE_WORKER_PROTO(, ContentAddress); + +MAKE_WORKER_PROTO(template, std::set); +MAKE_WORKER_PROTO(template, std::optional); + +#define X_ template +#define Y_ std::map +MAKE_WORKER_PROTO(X_, Y_); +#undef X_ +#undef Y_ template -struct WorkerProto> { - - static std::set read(const Store & store, Source & from) - { - std::set resSet; - auto size = readNum(from); - while (size--) { - resSet.insert(WorkerProto::read(store, from)); - } - return resSet; +std::set read(const Store & store, Source & from, Phantom> _) +{ + std::set resSet; + auto size = readNum(from); + while (size--) { + resSet.insert(read(store, from, Phantom {})); } + return resSet; +} - static void write(const Store & store, Sink & out, const std::set & resSet) - { - out << resSet.size(); - for (auto & key : resSet) { - WorkerProto::write(store, out, key); - } +template +void write(const Store & store, Sink & out, const std::set & resSet) +{ + out << resSet.size(); + for (auto & key : resSet) { + write(store, out, key); } - -}; +} template -struct WorkerProto> { - - static std::map read(const Store & store, Source & from) - { - std::map resMap; - auto size = readNum(from); - while (size--) { - auto k = WorkerProto::read(store, from); - auto v = WorkerProto::read(store, from); - resMap.insert_or_assign(std::move(k), std::move(v)); - } - return resMap; +std::map read(const Store & store, Source & from, Phantom> _) +{ + std::map resMap; + auto size = readNum(from); + while (size--) { + auto k = read(store, from, Phantom {}); + auto v = read(store, from, Phantom {}); + resMap.insert_or_assign(std::move(k), std::move(v)); } + return resMap; +} - static void write(const Store & store, Sink & out, const std::map & resMap) - { - out << resMap.size(); - for (auto & i : resMap) { - WorkerProto::write(store, out, i.first); - WorkerProto::write(store, out, i.second); - } +template +void write(const Store & store, Sink & out, const std::map & resMap) +{ + out << resMap.size(); + for (auto & i : resMap) { + write(store, out, i.first); + write(store, out, i.second); } - -}; +} template -struct WorkerProto> { - - static std::optional read(const Store & store, Source & from) - { - auto tag = readNum(from); - switch (tag) { - case 0: - return std::nullopt; - case 1: - return WorkerProto::read(store, from); - default: - throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); - } +std::optional read(const Store & store, Source & from, Phantom> _) +{ + auto tag = readNum(from); + switch (tag) { + case 0: + return std::nullopt; + case 1: + return read(store, from, Phantom {}); + default: + throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag); } +} - static void write(const Store & store, Sink & out, const std::optional & optVal) - { - out << (uint64_t) (optVal ? 1 : 0); - if (optVal) - WorkerProto::write(store, out, *optVal); - } - -}; +template +void write(const Store & store, Sink & out, const std::optional & optVal) +{ + out << (uint64_t) (optVal ? 1 : 0); + if (optVal) + nix::worker_proto::write(store, out, *optVal); +} /* Specialization which uses and empty string for the empty case, taking advantage of the fact these types always serialize to non-empty strings. @@ -163,7 +162,9 @@ struct WorkerProto> { std::optional, where <= is the compatability partial order, T is one of the types below. */ -MAKE_WORKER_PROTO(std::optional); -MAKE_WORKER_PROTO(std::optional); +MAKE_WORKER_PROTO(, std::optional); +MAKE_WORKER_PROTO(, std::optional); + +} } diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 141dab478..4dcdebe2f 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -822,7 +822,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = WorkerProto::read(*store, in); + auto paths = nix::worker_proto::read(*store, in, Phantom {}); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -852,19 +852,19 @@ static void opServe(Strings opFlags, Strings opArgs) } } - WorkerProto::write(*store, out, store->queryValidPaths(paths)); + nix::worker_proto::write(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - auto paths = WorkerProto::read(*store, in); + auto paths = nix::worker_proto::read(*store, in, Phantom {}); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - WorkerProto::write(*store, out, info->references); + nix::worker_proto::write(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -892,7 +892,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(WorkerProto::read(*store, in), out); + store->exportPaths(nix::worker_proto::read(*store, in, Phantom {}), out); break; } @@ -941,9 +941,9 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(WorkerProto::read(*store, in), + store->computeFSClosure(nix::worker_proto::read(*store, in, Phantom {}), closure, false, includeOutputs); - WorkerProto::write(*store, out, closure); + nix::worker_proto::write(*store, out, closure); break; } @@ -958,7 +958,7 @@ static void opServe(Strings opFlags, Strings opArgs) }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = WorkerProto::read(*store, in); + info.references = nix::worker_proto::read(*store, in, Phantom {}); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = parseContentAddressOpt(readString(in)); From b7597016529cebdc3c9432a101c1f8d9227713cc Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 30 Sep 2020 00:41:18 +0000 Subject: [PATCH 51/57] nix::worker_proto -> worker_proto --- src/libstore/build.cc | 4 ++-- src/libstore/daemon.cc | 38 ++++++++++++++++---------------- src/libstore/derivations.cc | 4 ++-- src/libstore/export-import.cc | 4 ++-- src/libstore/legacy-ssh-store.cc | 14 ++++++------ src/libstore/remote-store.cc | 6 ++--- src/libstore/worker-protocol.hh | 2 +- src/nix-store/nix-store.cc | 16 +++++++------- 8 files changed, 44 insertions(+), 44 deletions(-) diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 928858203..c688acd58 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -1976,7 +1976,7 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - nix::worker_proto::write(worker.store, hook->sink, inputPaths); + worker_proto::write(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ @@ -1987,7 +1987,7 @@ HookReply DerivationGoal::tryBuildHook() if (buildMode != bmCheck && status.known->isValid()) continue; missingPaths.insert(status.known->path); } - nix::worker_proto::write(worker.store, hook->sink, missingPaths); + worker_proto::write(worker.store, hook->sink, missingPaths); } hook->sink = FdSink(); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 72203d1b2..0713c4853 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -247,7 +247,7 @@ static void writeValidPathInfo( { to << (info->deriver ? store->printStorePath(*info->deriver) : "") << info->narHash.to_string(Base16, false); - nix::worker_proto::write(*store, to, info->references); + worker_proto::write(*store, to, info->references); to << info->registrationTime << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { to << info->ultimate @@ -272,11 +272,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryValidPaths: { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto res = store->queryValidPaths(paths); logger->stopWork(); - nix::worker_proto::write(*store, to, res); + worker_proto::write(*store, to, res); break; } @@ -292,11 +292,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQuerySubstitutablePaths: { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto res = store->querySubstitutablePaths(paths); logger->stopWork(); - nix::worker_proto::write(*store, to, res); + worker_proto::write(*store, to, res); break; } @@ -325,7 +325,7 @@ static void performOp(TunnelLogger * logger, ref store, paths = store->queryValidDerivers(path); else paths = store->queryDerivationOutputs(path); logger->stopWork(); - nix::worker_proto::write(*store, to, paths); + worker_proto::write(*store, to, paths); break; } @@ -343,7 +343,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto outputs = store->queryPartialDerivationOutputMap(path); logger->stopWork(); - nix::worker_proto::write(*store, to, outputs); + worker_proto::write(*store, to, outputs); break; } @@ -369,7 +369,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(clientVersion) >= 25) { auto name = readString(from); auto camStr = readString(from); - auto refs = nix::worker_proto::read(*store, from, Phantom {}); + auto refs = worker_proto::read(*store, from, Phantom {}); bool repairBool; from >> repairBool; auto repair = RepairFlag{repairBool}; @@ -449,7 +449,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopAddTextToStore: { string suffix = readString(from); string s = readString(from); - auto refs = nix::worker_proto::read(*store, from, Phantom {}); + auto refs = worker_proto::read(*store, from, Phantom {}); logger->startWork(); auto path = store->addTextToStore(suffix, s, refs, NoRepair); logger->stopWork(); @@ -608,7 +608,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = nix::worker_proto::read(*store, from, Phantom {}); + options.pathsToDelete = worker_proto::read(*store, from, Phantom {}); from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); @@ -677,7 +677,7 @@ static void performOp(TunnelLogger * logger, ref store, else { to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); - nix::worker_proto::write(*store, to, i->second.references); + worker_proto::write(*store, to, i->second.references); to << i->second.downloadSize << i->second.narSize; } @@ -688,11 +688,11 @@ static void performOp(TunnelLogger * logger, ref store, SubstitutablePathInfos infos; StorePathCAMap pathsMap = {}; if (GET_PROTOCOL_MINOR(clientVersion) < 22) { - auto paths = nix::worker_proto::read(*store, from, Phantom {}); + auto paths = worker_proto::read(*store, from, Phantom {}); for (auto & path : paths) pathsMap.emplace(path, std::nullopt); } else - pathsMap = nix::worker_proto::read(*store, from, Phantom {}); + pathsMap = worker_proto::read(*store, from, Phantom {}); logger->startWork(); store->querySubstitutablePathInfos(pathsMap, infos); logger->stopWork(); @@ -700,7 +700,7 @@ static void performOp(TunnelLogger * logger, ref store, for (auto & i : infos) { to << store->printStorePath(i.first) << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); - nix::worker_proto::write(*store, to, i.second.references); + worker_proto::write(*store, to, i.second.references); to << i.second.downloadSize << i.second.narSize; } break; @@ -710,7 +710,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto paths = store->queryAllValidPaths(); logger->stopWork(); - nix::worker_proto::write(*store, to, paths); + worker_proto::write(*store, to, paths); break; } @@ -782,7 +782,7 @@ static void performOp(TunnelLogger * logger, ref store, ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = nix::worker_proto::read(*store, from, Phantom {}); + info.references = worker_proto::read(*store, from, Phantom {}); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); info.ca = parseContentAddressOpt(readString(from)); @@ -835,9 +835,9 @@ static void performOp(TunnelLogger * logger, ref store, uint64_t downloadSize, narSize; store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); logger->stopWork(); - nix::worker_proto::write(*store, to, willBuild); - nix::worker_proto::write(*store, to, willSubstitute); - nix::worker_proto::write(*store, to, unknown); + worker_proto::write(*store, to, willBuild); + worker_proto::write(*store, to, willSubstitute); + worker_proto::write(*store, to, unknown); to << downloadSize << narSize; break; } diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index f8e7d773b..7ffc94818 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -584,7 +584,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = nix::worker_proto::read(store, in, Phantom {}); + drv.inputSrcs = worker_proto::read(store, in, Phantom {}); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -622,7 +622,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, }, i.second.output); } - nix::worker_proto::write(store, out, drv.inputSrcs); + worker_proto::write(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 40a6f3c63..02c839520 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) teeSink << exportMagic << printStorePath(path); - nix::worker_proto::write(*this, teeSink, info->references); + worker_proto::write(*this, teeSink, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; @@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); - auto references = nix::worker_proto::read(*this, source, Phantom {}); + auto references = worker_proto::read(*this, source, Phantom {}); auto deriver = readString(source); auto narHash = hashString(htSHA256, *saved.s); diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index e056859fd..6db44047d 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -122,7 +122,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = nix::worker_proto::read(*this, conn->from, Phantom {}); + info->references = worker_proto::read(*this, conn->from, Phantom {}); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -156,7 +156,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(Base16, false); - nix::worker_proto::write(*this, conn->to, info.references); + worker_proto::write(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize @@ -185,7 +185,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig conn->to << exportMagic << printStorePath(info.path); - nix::worker_proto::write(*this, conn->to, info.references); + worker_proto::write(*this, conn->to, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -301,10 +301,10 @@ public: conn->to << cmdQueryClosure << includeOutputs; - nix::worker_proto::write(*this, conn->to, paths); + worker_proto::write(*this, conn->to, paths); conn->to.flush(); - for (auto & i : nix::worker_proto::read(*this, conn->from, Phantom {})) + for (auto & i : worker_proto::read(*this, conn->from, Phantom {})) out.insert(i); } @@ -317,10 +317,10 @@ public: << cmdQueryValidPaths << false // lock << maybeSubstitute; - nix::worker_proto::write(*this, conn->to, paths); + worker_proto::write(*this, conn->to, paths); conn->to.flush(); - return nix::worker_proto::read(*this, conn->from, Phantom {}); + return worker_proto::read(*this, conn->from, Phantom {}); } void connect() override diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 43853db4e..049d4e954 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -329,7 +329,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute return res; } else { conn->to << wopQueryValidPaths; - nix::worker_proto::write(*this, conn->to, paths); + worker_proto::write(*this, conn->to, paths); conn.processStderr(); return worker_proto::read(*this, conn->from, Phantom {}); } @@ -341,7 +341,7 @@ StorePathSet RemoteStore::queryAllValidPaths() auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return nix::worker_proto::read(*this, conn->from, Phantom {}); + return worker_proto::read(*this, conn->from, Phantom {}); } @@ -358,7 +358,7 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) return res; } else { conn->to << wopQuerySubstitutablePaths; - nix::worker_proto::write(*this, conn->to, paths); + worker_proto::write(*this, conn->to, paths); conn.processStderr(); return worker_proto::read(*this, conn->from, Phantom {}); } diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index fd6c2b2cf..2934c1d67 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -153,7 +153,7 @@ void write(const Store & store, Sink & out, const std::optional & optVal) { out << (uint64_t) (optVal ? 1 : 0); if (optVal) - nix::worker_proto::write(store, out, *optVal); + worker_proto::write(store, out, *optVal); } /* Specialization which uses and empty string for the empty case, taking diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 4dcdebe2f..da91caef1 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -822,7 +822,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = nix::worker_proto::read(*store, in, Phantom {}); + auto paths = worker_proto::read(*store, in, Phantom {}); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -852,19 +852,19 @@ static void opServe(Strings opFlags, Strings opArgs) } } - nix::worker_proto::write(*store, out, store->queryValidPaths(paths)); + worker_proto::write(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - auto paths = nix::worker_proto::read(*store, in, Phantom {}); + auto paths = worker_proto::read(*store, in, Phantom {}); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - nix::worker_proto::write(*store, out, info->references); + worker_proto::write(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -892,7 +892,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(nix::worker_proto::read(*store, in, Phantom {}), out); + store->exportPaths(worker_proto::read(*store, in, Phantom {}), out); break; } @@ -941,9 +941,9 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(nix::worker_proto::read(*store, in, Phantom {}), + store->computeFSClosure(worker_proto::read(*store, in, Phantom {}), closure, false, includeOutputs); - nix::worker_proto::write(*store, out, closure); + worker_proto::write(*store, out, closure); break; } @@ -958,7 +958,7 @@ static void opServe(Strings opFlags, Strings opArgs) }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = nix::worker_proto::read(*store, in, Phantom {}); + info.references = worker_proto::read(*store, in, Phantom {}); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = parseContentAddressOpt(readString(in)); From 274357eb6acd9a0812872f32dd487354d51f0f13 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Sep 2020 12:09:18 +0200 Subject: [PATCH 52/57] Simplify --- src/libfetchers/github.cc | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 8286edf75..42eb346a5 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -15,9 +15,6 @@ struct DownloadUrl { std::string url; Headers headers; - - DownloadUrl(const std::string & url, const Headers & headers) - : url(url), headers(headers) { } }; // A github or gitlab host @@ -254,7 +251,7 @@ struct GitHubInputScheme : GitArchiveInputScheme input.getRev()->to_string(Base16, false)); Headers headers = makeHeadersWithAuthTokens(host); - return DownloadUrl(url, headers); + return DownloadUrl { url, headers }; } void clone(const Input & input, const Path & destDir) override @@ -320,7 +317,7 @@ struct GitLabInputScheme : GitArchiveInputScheme input.getRev()->to_string(Base16, false)); Headers headers = makeHeadersWithAuthTokens(host); - return DownloadUrl(url, headers); + return DownloadUrl { url, headers }; } void clone(const Input & input, const Path & destDir) override From 20a1e20d9194527d725898c745d1243d3de16277 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Sep 2020 12:11:22 +0200 Subject: [PATCH 53/57] Style --- src/libfetchers/github.cc | 19 +++++++++++-------- src/libstore/globals.hh | 8 ++++---- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 42eb346a5..8610fe447 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -140,15 +140,16 @@ struct GitArchiveInputScheme : InputScheme return input; } - std::optional getAccessToken(const std::string &host) const { + std::optional getAccessToken(const std::string & host) const + { auto tokens = settings.accessTokens.get(); - auto pat = tokens.find(host); - if (pat == tokens.end()) - return std::nullopt; - return pat->second; + if (auto token = get(tokens, host)) + return *token; + return {}; } - Headers makeHeadersWithAuthTokens(const std::string & host) const { + Headers makeHeadersWithAuthTokens(const std::string & host) const + { Headers headers; auto accessToken = getAccessToken(host); if (accessToken) { @@ -214,7 +215,8 @@ struct GitHubInputScheme : GitArchiveInputScheme { std::string type() override { return "github"; } - std::optional > accessHeaderFromToken(const std::string & token) const { + std::optional > accessHeaderFromToken(const std::string & token) const + { // Github supports PAT/OAuth2 tokens and HTTP Basic // Authentication. The former simply specifies the token, the // latter can use the token as the password. Only the first @@ -268,7 +270,8 @@ struct GitLabInputScheme : GitArchiveInputScheme { std::string type() override { return "gitlab"; } - std::optional > accessHeaderFromToken(const std::string & token) const { + std::optional > accessHeaderFromToken(const std::string & token) const + { // Gitlab supports 4 kinds of authorization, two of which are // relevant here: OAuth2 and PAT (Private Access Token). The // user can indicate which token is used by specifying the diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 0f0c0fe6f..8c63c5b34 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -893,10 +893,10 @@ public: ```nix input.foo = { - type="gitlab"; - host="gitlab.mycompany.com"; - owner="mycompany"; - repo="pro"; + type = "gitlab"; + host = "gitlab.mycompany.com"; + owner = "mycompany"; + repo = "pro"; }; ``` From 924712eef1fe86d349635ba666d413632e62519c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 30 Sep 2020 17:48:49 +0200 Subject: [PATCH 54/57] Installer: Set a known umask Fixes #1560, #2377. --- scripts/install-nix-from-closure.sh | 2 ++ scripts/install.in | 2 ++ 2 files changed, 4 insertions(+) diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh index 6efd8af18..14fb91534 100644 --- a/scripts/install-nix-from-closure.sh +++ b/scripts/install-nix-from-closure.sh @@ -2,6 +2,8 @@ set -e +umask 0022 + dest="/nix" self="$(dirname "$0")" nix="@nix@" diff --git a/scripts/install.in b/scripts/install.in index 1d26c4ff0..39fae37e3 100644 --- a/scripts/install.in +++ b/scripts/install.in @@ -10,6 +10,8 @@ oops() { exit 1 } +umask 0022 + tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \ oops "Can't create temporary directory for downloading the Nix binary tarball")" cleanup() { From f3280004e2be3f7533190ddf71ee1ec7c0d7d60d Mon Sep 17 00:00:00 2001 From: DavHau Date: Thu, 1 Oct 2020 11:34:13 +0700 Subject: [PATCH 55/57] add more examples to --help of `nix run` --- src/nix/run.cc | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/nix/run.cc b/src/nix/run.cc index cbaba9d90..e6584346e 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -167,6 +167,14 @@ struct CmdRun : InstallableCommand, RunCommon "To run Blender:", "nix run blender-bin" }, + Example{ + "To run vim from nixpkgs:", + "nix run nixpkgs#vim" + }, + Example{ + "To run vim from nixpkgs with arguments:", + "nix run nixpkgs#vim -- --help" + }, }; } From d5d196b0a15fdba50363b89a61eb894a49153b08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20M=C3=B6ller?= Date: Fri, 2 Oct 2020 12:10:31 +0200 Subject: [PATCH 56/57] Fix profile update in nix command --- src/nix/command.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/command.cc b/src/nix/command.cc index 37a4bc785..ba7de9fdd 100644 --- a/src/nix/command.cc +++ b/src/nix/command.cc @@ -152,7 +152,7 @@ void MixProfile::updateProfile(const Buildables & buildables) for (auto & output : bfd.outputs) { /* Output path should be known because we just tried to build it. */ - assert(!output.second); + assert(output.second); result.push_back(*output.second); } }, From 88a667e49e10af4a9e2daa51badbed63ad19d817 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Oct 2020 17:53:30 +0200 Subject: [PATCH 57/57] Fix s3:// store Fixes https://github.com/NixOS/nixos-org-configurations/issues/123. --- src/libstore/http-binary-cache-store.cc | 1 + src/libstore/store-api.hh | 1 + src/libutil/url-parts.hh | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 86be7c006..3d3d91e5e 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -73,6 +73,7 @@ public: if (forceHttp) ret.insert("file"); return ret; } + protected: void maybeDisable() diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index ba202375b..854446987 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -830,6 +830,7 @@ struct StoreFactory std::function (const std::string & scheme, const std::string & uri, const Store::Params & params)> create; std::function ()> getConfig; }; + struct Implementations { static std::vector * registered; diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 64e06cfbc..68be15cb0 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -7,7 +7,7 @@ namespace nix { // URI stuff. const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; -const static std::string schemeRegex = "(?:[a-z+.-]+)"; +const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)"; const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";