diff --git a/doc/manual/src/expressions/language-constructs.md b/doc/manual/src/expressions/language-constructs.md index cb0169239..1c01f2cc7 100644 --- a/doc/manual/src/expressions/language-constructs.md +++ b/doc/manual/src/expressions/language-constructs.md @@ -284,6 +284,10 @@ The points of interest are: function is called with the `localServer` argument set to `true` but the `db4` argument set to `null`, then the evaluation fails. + Note that `->` is the [logical + implication](https://en.wikipedia.org/wiki/Truth_table#Logical_implication) + Boolean operation. + 2. This is a more subtle condition: if Subversion is built with Apache (`httpServer`) support, then the Expat library (an XML library) used by Subversion should be same as the one used by Apache. This is diff --git a/doc/manual/src/release-notes/rl-2.4.md b/doc/manual/src/release-notes/rl-2.4.md index 0f632f100..8b566fc7b 100644 --- a/doc/manual/src/release-notes/rl-2.4.md +++ b/doc/manual/src/release-notes/rl-2.4.md @@ -276,6 +276,9 @@ more than 2800 commits from 195 contributors since release 2.3. * Plugins can now register `nix` subcommands. +* The `--indirect` flag to `nix-store --add-root` has become a no-op. + `--add-root` will always generate indirect GC roots from now on. + ## Incompatible changes * The `nix` command is now marked as an experimental feature. This diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md index a516887b1..f20acbd3f 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-next.md @@ -1,8 +1,15 @@ # Release X.Y (202?-??-??) +* The Nix cli now searches for a flake.nix up until the root of the current git repository or a filesystem boundary rather than just in the current directory * The TOML parser used by `builtins.fromTOML` has been replaced by [a more compliant one](https://github.com/ToruNiina/toml11). * Added `:st`/`:show-trace` commands to nix repl, which are used to set or toggle display of error traces. * New builtin function `builtins.zipAttrsWith` with same functionality as `lib.zipAttrsWith` from nixpkgs, but much more efficient. +* New command `nix store copy-log` to copy build logs from one store + to another. +* The `commit-lockfile-summary` option can be set to a non-empty string + to override the commit summary used when commiting an updated lockfile. + This may be used in conjunction with the nixConfig attribute in + `flake.nix` to better conform to repository conventions. diff --git a/docker.nix b/docker.nix index 28745fc5d..251bd2f46 100644 --- a/docker.nix +++ b/docker.nix @@ -21,6 +21,7 @@ let cacert.out findutils iana-etc + git ]; users = { @@ -200,6 +201,8 @@ let mkdir $out/tmp + mkdir -p $out/var/tmp + mkdir -p $out/etc/nix cat $nixConfContentsPath > $out/etc/nix/nix.conf @@ -235,6 +238,7 @@ pkgs.dockerTools.buildLayeredImageWithNixDb { ''; fakeRootCommands = '' chmod 1777 tmp + chmod 1777 var/tmp ''; config = { diff --git a/flake.nix b/flake.nix index a96aed490..9bfa36fdf 100644 --- a/flake.nix +++ b/flake.nix @@ -299,6 +299,8 @@ propagatedBuildInputs = propagatedDeps; + disallowedReferences = [ boost ]; + preConfigure = '' # Copy libboost_context so we don't get all of Boost in our closure. @@ -310,6 +312,13 @@ chmod u+w $out/lib/*.so.* patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* ''} + ${lib.optionalString currentStdenv.isDarwin '' + for LIB in $out/lib/*.dylib; do + chmod u+w $LIB + install_name_tool -id $LIB $LIB + done + install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib + ''} ''; configureFlags = configureFlags ++ @@ -326,6 +335,12 @@ postInstall = '' mkdir -p $doc/nix-support echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products + ${lib.optionalString currentStdenv.isDarwin '' + install_name_tool \ + -change ${boost}/lib/libboost_context.dylib \ + $out/lib/libboost_context.dylib \ + $out/lib/libnixutil.dylib + ''} ''; doInstallCheck = true; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 429cd32cc..6d183dfad 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -54,6 +54,36 @@ void StoreCommand::run() run(getStore()); } +CopyCommand::CopyCommand() +{ + addFlag({ + .longName = "from", + .description = "URL of the source Nix store.", + .labels = {"store-uri"}, + .handler = {&srcUri}, + }); + + addFlag({ + .longName = "to", + .description = "URL of the destination Nix store.", + .labels = {"store-uri"}, + .handler = {&dstUri}, + }); +} + +ref CopyCommand::createStore() +{ + return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri); +} + +ref CopyCommand::getDstStore() +{ + if (srcUri.empty() && dstUri.empty()) + throw UsageError("you must pass '--from' and/or '--to'"); + + return dstUri.empty() ? openStore() : openStore(dstUri); +} + EvalCommand::EvalCommand() { } @@ -73,13 +103,16 @@ ref EvalCommand::getEvalStore() ref EvalCommand::getEvalState() { - if (!evalState) evalState = -#if HAVE_BOEHMGC - std::allocate_shared(traceable_allocator(), -#else - std::make_shared( -#endif - searchPath, getEvalStore(), getStore()); + if (!evalState) + evalState = + #if HAVE_BOEHMGC + std::allocate_shared(traceable_allocator(), + searchPath, getEvalStore(), getStore()) + #else + std::make_shared( + searchPath, getEvalStore(), getStore()) + #endif + ; return ref(evalState); } diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index 07f398468..bd2a0a7ee 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -43,6 +43,19 @@ private: std::shared_ptr _store; }; +/* A command that copies something between `--from` and `--to` + stores. */ +struct CopyCommand : virtual StoreCommand +{ + std::string srcUri, dstUri; + + CopyCommand(); + + ref createStore() override; + + ref getDstStore(); +}; + struct EvalCommand : virtual StoreCommand, MixEvalArgs { EvalCommand(); diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 6c20bb7b1..0c2db0399 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -345,6 +345,18 @@ Installable::getCursor(EvalState & state) return cursors[0]; } +static StorePath getDeriver( + ref store, + const Installable & i, + const StorePath & drvPath) +{ + auto derivers = store->queryValidDerivers(drvPath); + if (derivers.empty()) + throw Error("'%s' does not have a known deriver", i.what()); + // FIXME: use all derivers? + return *derivers.begin(); +} + struct InstallableStorePath : Installable { ref store; @@ -353,7 +365,7 @@ struct InstallableStorePath : Installable InstallableStorePath(ref store, StorePath && storePath) : store(store), storePath(std::move(storePath)) { } - std::string what() override { return store->printStorePath(storePath); } + std::string what() const override { return store->printStorePath(storePath); } DerivedPaths toDerivedPaths() override { @@ -374,6 +386,15 @@ struct InstallableStorePath : Installable } } + StorePathSet toDrvPaths(ref store) override + { + if (storePath.isDerivation()) { + return {storePath}; + } else { + return {getDeriver(store, *this, storePath)}; + } + } + std::optional getStorePath() override { return storePath; @@ -402,6 +423,14 @@ DerivedPaths InstallableValue::toDerivedPaths() return res; } +StorePathSet InstallableValue::toDrvPaths(ref store) +{ + StorePathSet res; + for (auto & drv : toDerivations()) + res.insert(drv.drvPath); + return res; +} + struct InstallableAttrPath : InstallableValue { SourceExprCommand & cmd; @@ -412,7 +441,7 @@ struct InstallableAttrPath : InstallableValue : InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath) { } - std::string what() override { return attrPath; } + std::string what() const override { return attrPath; } std::pair toValue(EvalState & state) override { @@ -836,11 +865,7 @@ StorePathSet toDerivations( [&](const DerivedPath::Opaque & bo) { if (!useDeriver) throw Error("argument '%s' did not evaluate to a derivation", i->what()); - auto derivers = store->queryValidDerivers(bo.path); - if (derivers.empty()) - throw Error("'%s' does not have a known deriver", i->what()); - // FIXME: use all derivers? - drvPaths.insert(*derivers.begin()); + drvPaths.insert(getDeriver(store, *i, bo.path)); }, [&](const DerivedPath::Built & bfd) { drvPaths.insert(bfd.drvPath); diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index 79931ad3e..ced6b3f10 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -33,10 +33,15 @@ struct Installable { virtual ~Installable() { } - virtual std::string what() = 0; + virtual std::string what() const = 0; virtual DerivedPaths toDerivedPaths() = 0; + virtual StorePathSet toDrvPaths(ref store) + { + throw Error("'%s' cannot be converted to a derivation path", what()); + } + DerivedPath toDerivedPath(); UnresolvedApp toApp(EvalState & state); @@ -81,6 +86,8 @@ struct InstallableValue : Installable virtual std::vector toDerivations() = 0; DerivedPaths toDerivedPaths() override; + + StorePathSet toDrvPaths(ref store) override; }; struct InstallableFlake : InstallableValue @@ -99,7 +106,7 @@ struct InstallableFlake : InstallableValue Strings && prefixes, const flake::LockFlags & lockFlags); - std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); } + std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); } std::vector getActualAttrPaths(); diff --git a/src/libexpr/attr-set.hh b/src/libexpr/attr-set.hh index 82c348287..3e4899efc 100644 --- a/src/libexpr/attr-set.hh +++ b/src/libexpr/attr-set.hh @@ -121,6 +121,8 @@ class BindingsBuilder Bindings * bindings; public: + // needed by std::back_inserter + using value_type = Attr; EvalState & state; @@ -134,6 +136,11 @@ public: } void insert(const Attr & attr) + { + push_back(attr); + } + + void push_back(const Attr & attr) { bindings->push_back(attr); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 81aa86641..94ffab175 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -36,6 +36,19 @@ namespace nix { +static char * allocString(size_t size) +{ + char * t; +#if HAVE_BOEHMGC + t = (char *) GC_MALLOC_ATOMIC(size); +#else + t = malloc(size); +#endif + if (!t) throw std::bad_alloc(); + return t; +} + + static char * dupString(const char * s) { char * t; @@ -412,6 +425,11 @@ EvalState::EvalState( , sDescription(symbols.create("description")) , sSelf(symbols.create("self")) , sEpsilon(symbols.create("")) + , sStartSet(symbols.create("startSet")) + , sOperator(symbols.create("operator")) + , sKey(symbols.create("key")) + , sPath(symbols.create("path")) + , sPrefix(symbols.create("prefix")) , repair(NoRepair) , emptyBindings(0) , store(store) @@ -771,17 +789,28 @@ void Value::mkString(std::string_view s) } +static void copyContextToValue(Value & v, const PathSet & context) +{ + if (!context.empty()) { + size_t n = 0; + v.string.context = (const char * *) + allocBytes((context.size() + 1) * sizeof(char *)); + for (auto & i : context) + v.string.context[n++] = dupString(i.c_str()); + v.string.context[n] = 0; + } +} + void Value::mkString(std::string_view s, const PathSet & context) { mkString(s); - if (!context.empty()) { - size_t n = 0; - string.context = (const char * *) - allocBytes((context.size() + 1) * sizeof(char *)); - for (auto & i : context) - string.context[n++] = dupString(i.c_str()); - string.context[n] = 0; - } + copyContextToValue(*this, context); +} + +void Value::mkStringMove(const char * s, const PathSet & context) +{ + mkString(s); + copyContextToValue(*this, context); } @@ -1660,13 +1689,34 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) { PathSet context; - std::ostringstream s; + std::vector s; + size_t sSize = 0; NixInt n = 0; NixFloat nf = 0; bool first = !forceString; ValueType firstType = nString; + const auto str = [&] { + std::string result; + result.reserve(sSize); + for (const auto & part : s) result += part; + return result; + }; + /* c_str() is not str().c_str() because we want to create a string + Value. allocating a GC'd string directly and moving it into a + Value lets us avoid an allocation and copy. */ + const auto c_str = [&] { + char * result = allocString(sSize + 1); + char * tmp = result; + for (const auto & part : s) { + memcpy(tmp, part.c_str(), part.size()); + tmp += part.size(); + } + *tmp = 0; + return result; + }; + for (auto & [i_pos, i] : *es) { Value vTmp; i->eval(state, env, vTmp); @@ -1696,11 +1746,15 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf += vTmp.fpoint; } else throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp)); - } else + } else { + if (s.empty()) s.reserve(es->size()); /* skip canonization of first path, which would only be not canonized in the first place if it's coming from a ./${foo} type path */ - s << state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first); + s.emplace_back( + state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first)); + sSize += s.back().size(); + } first = false; } @@ -1712,9 +1766,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) else if (firstType == nPath) { if (!context.empty()) throwEvalError(pos, "a string that refers to a store path cannot be appended to a path"); - v.mkPath(canonPath(s.str())); + v.mkPath(canonPath(str())); } else - v.mkString(s.str(), context); + v.mkStringMove(c_str(), context); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 89814785e..82ce9d1b3 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -80,7 +80,8 @@ public: sContentAddressed, sOutputHash, sOutputHashAlgo, sOutputHashMode, sRecurseForDerivations, - sDescription, sSelf, sEpsilon; + sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, + sPrefix; Symbol sDerivationNix; /* If set, force copying files to the Nix store even if they @@ -179,8 +180,8 @@ public: Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv); /* Parse a Nix expression from the specified string. */ - Expr * parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv); - Expr * parseExprFromString(std::string_view s, const Path & basePath); + Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv); + Expr * parseExprFromString(std::string s, const Path & basePath); Expr * parseStdin(); @@ -308,7 +309,7 @@ private: friend struct ExprAttrs; friend struct ExprLet; - Expr * parse(const char * text, FileOrigin origin, const Path & path, + Expr * parse(char * text, size_t length, FileOrigin origin, const Path & path, const Path & basePath, StaticEnv & staticEnv); public: @@ -399,6 +400,7 @@ private: friend struct ExprSelect; friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v); friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v); + friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v); friend struct Value; }; diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 190a128d7..0fbe9b960 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -633,12 +633,24 @@ LockedFlake lockFlake( newLockFile.write(path); + std::optional commitMessage = std::nullopt; + if (lockFlags.commitLockFile) { + std::string cm; + + cm = settings.commitLockFileSummary.get(); + + if (cm == "") { + cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add"); + } + + cm += "\n\nFlake lock file updates:\n\n"; + cm += filterANSIEscapes(diff, true); + commitMessage = cm; + } + topRef.input.markChangedFile( (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock", - lockFlags.commitLockFile - ? std::optional(fmt("%s: %s\n\nFlake lock file changes:\n\n%s", - relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true))) - : std::nullopt); + commitMessage); /* Rewriting the lockfile changed the top-level repo, so we should re-read it. FIXME: we could diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index c3b74e0fe..930ed9ccd 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -122,6 +122,28 @@ std::pair parseFlakeRefWithFragment( if (isFlake) { + if (!allowMissing && !pathExists(path + "/flake.nix")){ + notice("path '%s' does not contain a 'flake.nix', searching up",path); + + // Save device to detect filesystem boundary + dev_t device = lstat(path).st_dev; + bool found = false; + while (path != "/") { + if (pathExists(path + "/flake.nix")) { + found = true; + break; + } else if (pathExists(path + "/.git")) + throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path); + else { + if (lstat(path).st_dev != device) + throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path); + } + path = dirOf(path); + } + if (!found) + throw BadURL("could not find a flake.nix file"); + } + if (!S_ISDIR(lstat(path).st_mode)) throw BadURL("path '%s' is not a flake (because it's not a directory)", path); diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index c18877e29..a0e7a1877 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -64,29 +64,32 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) } -// FIXME: optimize -static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length) +// we make use of the fact that the parser receives a private copy of the input +// string and can munge around in it. +static Expr * unescapeStr(SymbolTable & symbols, char * s, size_t length) { - string t; - t.reserve(length); + char * result = s; + char * t = s; char c; + // the input string is terminated with *two* NULs, so we can safely take + // *one* character after the one being checked against. while ((c = *s++)) { if (c == '\\') { - assert(*s); c = *s++; - if (c == 'n') t += '\n'; - else if (c == 'r') t += '\r'; - else if (c == 't') t += '\t'; - else t += c; + if (c == 'n') *t = '\n'; + else if (c == 'r') *t = '\r'; + else if (c == 't') *t = '\t'; + else *t = c; } else if (c == '\r') { /* Normalise CR and CR/LF into LF. */ - t += '\n'; + *t = '\n'; if (*s == '\n') s++; /* cr/lf */ } - else t += c; + else *t = c; + t++; } - return new ExprString(symbols.create(t)); + return new ExprString(symbols.create({result, size_t(t - result)})); } @@ -139,7 +142,7 @@ or { return OR_KW; } \/\/ { return UPDATE; } \+\+ { return CONCAT; } -{ID} { yylval->id = strdup(yytext); return ID; } +{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; } {INT} { errno = 0; try { yylval->n = boost::lexical_cast(yytext); @@ -221,14 +224,14 @@ or { return OR_KW; } {PATH_SEG} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = strdup(yytext); + yylval->path = {yytext, (size_t) yyleng}; return PATH; } {HPATH_START} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = strdup(yytext); + yylval->path = {yytext, (size_t) yyleng}; return HPATH; } @@ -237,7 +240,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = strdup(yytext); + yylval->path = {yytext, (size_t) yyleng}; return PATH; } {HPATH} { @@ -245,7 +248,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = strdup(yytext); + yylval->path = {yytext, (size_t) yyleng}; return HPATH; } @@ -280,8 +283,8 @@ or { return OR_KW; } throw ParseError("path has a trailing slash"); } -{SPATH} { yylval->path = strdup(yytext); return SPATH; } -{URI} { yylval->uri = strdup(yytext); return URI; } +{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; } +{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; } [ \t\r\n]+ /* eat up whitespace */ \#[^\r\n]* /* single-line comments */ diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index a75357871..640c44c01 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -473,7 +473,7 @@ string ExprLambda::showNamePos() const size_t SymbolTable::totalSize() const { size_t n = 0; - for (auto & i : symbols) + for (auto & i : store) n += i.size(); return n; } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index f8aaea582..a3e713937 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -273,9 +273,16 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err nix::Formal * formal; nix::NixInt n; nix::NixFloat nf; - const char * id; // !!! -> Symbol - char * path; - char * uri; + // using C a struct allows us to avoid having to define the special + // members that using string_view here would implicitly delete. + struct StringToken { + const char * p; + size_t l; + operator std::string_view() const { return {p, l}; } + }; + StringToken id; // !!! -> Symbol + StringToken path; + StringToken uri; std::vector * attrNames; std::vector > * string_parts; } @@ -397,7 +404,7 @@ expr_select expr_simple : ID { - if (strcmp($1, "__curPos") == 0) + if (strncmp($1.p, "__curPos", $1.l) == 0) $$ = new ExprPos(CUR_POS); else $$ = new ExprVar(CUR_POS, data->symbols.create($1)); @@ -414,7 +421,7 @@ expr_simple $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { - string path($1 + 1, strlen($1) - 2); + string path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__findFile")), {new ExprVar(data->symbols.create("__nixPath")), @@ -460,14 +467,14 @@ string_parts_interpolated path_start : PATH { - Path path(absPath($1, data->basePath)); + Path path(absPath({$1.p, $1.l}, data->basePath)); /* add back in the trailing '/' to the first segment */ - if ($1[strlen($1)-1] == '/' && strlen($1) > 1) + if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; $$ = new ExprPath(path); } | HPATH { - Path path(getHome() + string($1 + 1)); + Path path(getHome() + string($1.p + 1, $1.l - 1)); $$ = new ExprPath(path); } ; @@ -543,7 +550,7 @@ attrpath attr : ID { $$ = $1; } - | OR_KW { $$ = "or"; } + | OR_KW { $$ = {"or", 2}; } ; string_attr @@ -589,7 +596,7 @@ formal namespace nix { -Expr * EvalState::parse(const char * text, FileOrigin origin, +Expr * EvalState::parse(char * text, size_t length, FileOrigin origin, const Path & path, const Path & basePath, StaticEnv & staticEnv) { yyscan_t scanner; @@ -609,7 +616,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin, data.basePath = basePath; yylex_init(&scanner); - yy_scan_string(text, scanner); + yy_scan_buffer(text, length, scanner); int res = yyparse(scanner, &data); yylex_destroy(scanner); @@ -655,26 +662,33 @@ Expr * EvalState::parseExprFromFile(const Path & path) Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv) { - return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv); + auto buffer = readFile(path); + // readFile should have left some extra space for terminators + buffer.append("\0\0", 2); + return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv); } -Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv) +Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv) { - return parse(s.data(), foString, "", basePath, staticEnv); + s.append("\0\0", 2); + return parse(s.data(), s.size(), foString, "", basePath, staticEnv); } -Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath) +Expr * EvalState::parseExprFromString(std::string s, const Path & basePath) { - return parseExprFromString(s, basePath, staticBaseEnv); + return parseExprFromString(std::move(s), basePath, staticBaseEnv); } Expr * EvalState::parseStdin() { //Activity act(*logger, lvlTalkative, format("parsing standard input")); - return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv); + auto buffer = drainFD(0); + // drainFD should have left some extra space for terminators + buffer.append("\0\0", 2); + return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1d2a7d5d2..5c476ddd4 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -12,6 +12,8 @@ #include "value-to-xml.hh" #include "primops.hh" +#include + #include #include #include @@ -378,7 +380,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v) auto output = runProgram(program, true, commandArgs); Expr * parsed; try { - parsed = state.parseExprFromString(output, pos.file); + parsed = state.parseExprFromString(std::move(output), pos.file); } catch (Error & e) { e.addTrace(pos, "While parsing the output from '%1%'", program); throw; @@ -592,16 +594,16 @@ typedef list ValueList; static Bindings::iterator getAttr( EvalState & state, - string funcName, - string attrName, + std::string_view funcName, + Symbol attrSym, Bindings * attrSet, const Pos & pos) { - Bindings::iterator value = attrSet->find(state.symbols.create(attrName)); + Bindings::iterator value = attrSet->find(attrSym); if (value == attrSet->end()) { hintformat errorMsg = hintfmt( "attribute '%s' missing for call to '%s'", - attrName, + attrSym, funcName ); @@ -635,7 +637,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar Bindings::iterator startSet = getAttr( state, "genericClosure", - "startSet", + state.sStartSet, args[0]->attrs, pos ); @@ -650,7 +652,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar Bindings::iterator op = getAttr( state, "genericClosure", - "operator", + state.sOperator, args[0]->attrs, pos ); @@ -672,7 +674,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar state.forceAttrs(*e, pos); Bindings::iterator key = - e->attrs->find(state.symbols.create("key")); + e->attrs->find(state.sKey); if (key == e->attrs->end()) throw EvalError({ .msg = hintfmt("attribute 'key' required"), @@ -1079,10 +1081,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * * } else { auto s = state.coerceToString(*i->pos, *i->value, context, true); drv.env.emplace(key, s); - if (i->name == state.sBuilder) drv.builder = s; - else if (i->name == state.sSystem) drv.platform = s; - else if (i->name == state.sOutputHash) outputHash = s; - else if (i->name == state.sOutputHashAlgo) outputHashAlgo = s; + if (i->name == state.sBuilder) drv.builder = std::move(s); + else if (i->name == state.sSystem) drv.platform = std::move(s); + else if (i->name == state.sOutputHash) outputHash = std::move(s); + else if (i->name == state.sOutputHashAlgo) outputHashAlgo = std::move(s); else if (i->name == state.sOutputHashMode) handleHashMode(s); else if (i->name == state.sOutputs) handleOutputs(tokenizeString(s)); @@ -1498,14 +1500,14 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va state.forceAttrs(*v2, pos); string prefix; - Bindings::iterator i = v2->attrs->find(state.symbols.create("prefix")); + Bindings::iterator i = v2->attrs->find(state.sPrefix); if (i != v2->attrs->end()) prefix = state.forceStringNoCtx(*i->value, pos); i = getAttr( state, "findFile", - "path", + state.sPath, v2->attrs, pos ); @@ -2163,7 +2165,10 @@ static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args, v.listElems()[n++] = (Value *) &i; std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return (string) ((Attr *) v1)->name < (string) ((Attr *) v2)->name; }); + [](Value * v1, Value * v2) { + std::string_view s1 = ((Attr *) v1)->name, s2 = ((Attr *) v2)->name; + return s1 < s2; + }); for (unsigned int i = 0; i < n; ++i) v.listElems()[i] = ((Attr *) v.listElems()[i])->value; @@ -2184,11 +2189,10 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v) { string attr = state.forceStringNoCtx(*args[0], pos); state.forceAttrs(*args[1], pos); - // !!! Should we create a symbol here or just do a lookup? Bindings::iterator i = getAttr( state, "getAttr", - attr, + state.symbols.create(attr), args[1]->attrs, pos ); @@ -2268,21 +2272,25 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args, state.forceAttrs(*args[0], pos); state.forceList(*args[1], pos); - /* Get the attribute names to be removed. */ - std::set names; + /* Get the attribute names to be removed. + We keep them as Attrs instead of Symbols so std::set_difference + can be used to remove them from attrs[0]. */ + boost::container::small_vector names; + names.reserve(args[1]->listSize()); for (auto elem : args[1]->listItems()) { state.forceStringNoCtx(*elem, pos); - names.insert(state.symbols.create(elem->string.s)); + names.emplace_back(state.symbols.create(elem->string.s), nullptr); } + std::sort(names.begin(), names.end()); /* Copy all attributes not in that set. Note that we don't need to sort v.attrs because it's a subset of an already sorted vector. */ auto attrs = state.buildBindings(args[0]->attrs->size()); - for (auto & i : *args[0]->attrs) { - if (!names.count(i.name)) - attrs.insert(i); - } + std::set_difference( + args[0]->attrs->begin(), args[0]->attrs->end(), + names.begin(), names.end(), + std::back_inserter(attrs)); v.mkAttrs(attrs.alreadySorted()); } @@ -3520,18 +3528,20 @@ static RegisterPrimOp primop_match({ /* Split a string with a regular expression, and return a list of the non-matching parts interleaved by the lists of the matching groups. */ -static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v) +void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v) { auto re = state.forceStringNoCtx(*args[0], pos); try { - std::regex regex(re, std::regex::extended); + auto regex = state.regexCache->cache.find(re); + if (regex == state.regexCache->cache.end()) + regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first; PathSet context; const std::string str = state.forceString(*args[1], context, pos); - auto begin = std::sregex_iterator(str.begin(), str.end(), regex); + auto begin = std::sregex_iterator(str.begin(), str.end(), regex->second); auto end = std::sregex_iterator(); // Any matches results are surrounded by non-matching results. @@ -3915,9 +3925,12 @@ void EvalState::createBaseEnv() /* Note: we have to initialize the 'derivation' constant *after* building baseEnv/staticBaseEnv because it uses 'builtins'. */ - eval(parse( + char code[] = #include "primops/derivation.nix.gen.hh" - , foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation); + // the parser needs two NUL bytes as terminators; one of them + // is implied by being a C string. + "\0"; + eval(parse(code, sizeof(code), foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation); } diff --git a/src/libexpr/symbol-table.hh b/src/libexpr/symbol-table.hh index 4eb6dac81..a090ebae5 100644 --- a/src/libexpr/symbol-table.hh +++ b/src/libexpr/symbol-table.hh @@ -1,7 +1,8 @@ #pragma once +#include #include -#include +#include #include "types.hh" @@ -70,15 +71,21 @@ public: class SymbolTable { private: - typedef std::unordered_set Symbols; - Symbols symbols; + std::unordered_map symbols; + std::list store; public: Symbol create(std::string_view s) { - // FIXME: avoid allocation if 's' already exists in the symbol table. - std::pair res = symbols.emplace(std::string(s)); - return Symbol(&*res.first); + // Most symbols are looked up more than once, so we trade off insertion performance + // for lookup performance. + // TODO: could probably be done more efficiently with transparent Hash and Equals + // on the original implementation using unordered_set + auto it = symbols.find(s); + if (it != symbols.end()) return it->second; + + const string & rawSym = store.emplace_back(s); + return symbols.emplace(rawSym, Symbol(&rawSym)).first->second; } size_t size() const @@ -91,7 +98,7 @@ public: template void dump(T callback) { - for (auto & s : symbols) + for (auto & s : store) callback(s); } }; diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 7d007ebdc..1896c7563 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -241,6 +241,8 @@ public: void mkString(std::string_view s, const PathSet & context); + void mkStringMove(const char * s, const PathSet & context); + inline void mkString(const Symbol & s) { mkString(((const std::string &) s).c_str()); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 69d020748..c933475ca 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -67,18 +67,18 @@ DownloadFileResult downloadFile( storePath = std::move(cached->storePath); } else { StringSink sink; - dumpString(*res.data, sink); - auto hash = hashString(htSHA256, *res.data); + dumpString(res.data, sink); + auto hash = hashString(htSHA256, res.data); ValidPathInfo info { store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name), - hashString(htSHA256, *sink.s), + hashString(htSHA256, sink.s), }; - info.narSize = sink.s->size(); + info.narSize = sink.s.size(); info.ca = FixedOutputHash { .method = FileIngestionMethod::Flat, .hash = hash, }; - auto source = StringSource { *sink.s }; + auto source = StringSource(sink.s); store->addToStore(info, source, NoRepair, NoCheckSigs); storePath = std::move(info.path); } diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 13c086a46..6e4458f7a 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -31,7 +31,7 @@ BinaryCacheStore::BinaryCacheStore(const Params & params) StringSink sink; sink << narVersionMagic1; - narMagic = *sink.s; + narMagic = sink.s; } void BinaryCacheStore::init() @@ -68,7 +68,7 @@ void BinaryCacheStore::upsertFile(const std::string & path, } void BinaryCacheStore::getFile(const std::string & path, - Callback> callback) noexcept + Callback> callback) noexcept { try { callback(getFile(path)); @@ -77,9 +77,9 @@ void BinaryCacheStore::getFile(const std::string & path, void BinaryCacheStore::getFile(const std::string & path, Sink & sink) { - std::promise> promise; + std::promise> promise; getFile(path, - {[&](std::future> result) { + {[&](std::future> result) { try { promise.set_value(result.get()); } catch (...) { @@ -89,15 +89,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink) sink(*promise.get_future().get()); } -std::shared_ptr BinaryCacheStore::getFile(const std::string & path) +std::optional BinaryCacheStore::getFile(const std::string & path) { StringSink sink; try { getFile(path, sink); } catch (NoSuchBinaryCacheFile &) { - return nullptr; + return std::nullopt; } - return sink.s; + return std::move(sink.s); } std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath) @@ -367,11 +367,11 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, auto callbackPtr = std::make_shared(std::move(callback)); getFile(narInfoFile, - {[=](std::future> fut) { + {[=](std::future> fut) { try { auto data = fut.get(); - if (!data) return (*callbackPtr)(nullptr); + if (!data) return (*callbackPtr)({}); stats.narInfoRead++; @@ -429,7 +429,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s StringSink sink; dumpString(s, sink); - auto source = StringSource { *sink.s }; + StringSource source(sink.s); return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { path, nar.first }; info.narSize = nar.second; @@ -446,11 +446,11 @@ void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id, auto callbackPtr = std::make_shared(std::move(callback)); - Callback> newCallback = { - [=](std::future> fut) { + Callback> newCallback = { + [=](std::future> fut) { try { auto data = fut.get(); - if (!data) return (*callbackPtr)(nullptr); + if (!data) return (*callbackPtr)({}); auto realisation = Realisation::fromJSON( nlohmann::json::parse(*data), outputInfoFilePath); @@ -490,7 +490,7 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe writeNarInfo(narInfo); } -std::shared_ptr BinaryCacheStore::getBuildLog(const StorePath & path) +std::optional BinaryCacheStore::getBuildLog(const StorePath & path) { auto drvPath = path; @@ -498,10 +498,10 @@ std::shared_ptr BinaryCacheStore::getBuildLog(const StorePath & pat try { auto info = queryPathInfo(path); // FIXME: add a "Log" field to .narinfo - if (!info->deriver) return nullptr; + if (!info->deriver) return std::nullopt; drvPath = *info->deriver; } catch (InvalidPath &) { - return nullptr; + return std::nullopt; } } @@ -512,4 +512,14 @@ std::shared_ptr BinaryCacheStore::getBuildLog(const StorePath & pat return getFile(logPath); } +void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log) +{ + assert(drvPath.isDerivation()); + + upsertFile( + "log/" + std::string(drvPath.to_string()), + (std::string) log, // FIXME: don't copy + "text/plain; charset=utf-8"); +} + } diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 9815af591..7599230d9 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -51,6 +51,7 @@ public: const std::string & mimeType) = 0; void upsertFile(const std::string & path, + // FIXME: use std::string_view std::string && data, const std::string & mimeType); @@ -62,10 +63,11 @@ public: /* Fetch the specified file and call the specified callback with the result. A subclass may implement this asynchronously. */ - virtual void getFile(const std::string & path, - Callback> callback) noexcept; + virtual void getFile( + const std::string & path, + Callback> callback) noexcept; - std::shared_ptr getFile(const std::string & path); + std::optional getFile(const std::string & path); public: @@ -117,7 +119,9 @@ public: void addSignatures(const StorePath & storePath, const StringSet & sigs) override; - std::shared_ptr getBuildLog(const StorePath & path) override; + std::optional getBuildLog(const StorePath & path) override; + + void addBuildLog(const StorePath & drvPath, std::string_view log) override; }; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d34e53fe8..151217b8b 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -278,7 +278,7 @@ void DerivationGoal::outputsSubstitutionTried() if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) { done(BuildResult::TransientFailure, - fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", + Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ", worker.store.printStorePath(drvPath))); return; } diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index e4d2add73..0d0afea2d 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2226,8 +2226,8 @@ void LocalDerivationGoal::registerOutputs() StringSink sink; dumpPath(actualPath, sink); deletePath(actualPath); - sink.s = make_ref(rewriteStrings(*sink.s, outputRewrites)); - StringSource source(*sink.s); + sink.s = rewriteStrings(sink.s, outputRewrites); + StringSource source(sink.s); restorePath(actualPath, source); } }; @@ -2295,7 +2295,7 @@ void LocalDerivationGoal::registerOutputs() StringSink sink; dumpPath(actualPath, sink); RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink); - rsink2(*sink.s); + rsink2(sink.s); rsink2.flush(); }); Path tmpPath = actualPath + ".tmp"; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 59325da79..101aa13a5 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -69,7 +69,7 @@ struct TunnelLogger : public Logger StringSink buf; buf << STDERR_NEXT << (fs.s + "\n"); - enqueueMsg(*buf.s); + enqueueMsg(buf.s); } void logEI(const ErrorInfo & ei) override @@ -81,7 +81,7 @@ struct TunnelLogger : public Logger StringSink buf; buf << STDERR_NEXT << oss.str(); - enqueueMsg(*buf.s); + enqueueMsg(buf.s); } /* startWork() means that we're starting an operation for which we @@ -129,7 +129,7 @@ struct TunnelLogger : public Logger StringSink buf; buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent; - enqueueMsg(*buf.s); + enqueueMsg(buf.s); } void stopActivity(ActivityId act) override @@ -137,7 +137,7 @@ struct TunnelLogger : public Logger if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; StringSink buf; buf << STDERR_STOP_ACTIVITY << act; - enqueueMsg(*buf.s); + enqueueMsg(buf.s); } void result(ActivityId act, ResultType type, const Fields & fields) override @@ -145,7 +145,7 @@ struct TunnelLogger : public Logger if (GET_PROTOCOL_MINOR(clientVersion) < 20) return; StringSink buf; buf << STDERR_RESULT << act << type << fields; - enqueueMsg(*buf.s); + enqueueMsg(buf.s); } }; @@ -468,10 +468,12 @@ static void performOp(TunnelLogger * logger, ref store, dontCheckSigs = false; logger->startWork(); - FramedSource source(from); - store->addMultipleToStore(source, - RepairFlag{repair}, - dontCheckSigs ? NoCheckSigs : CheckSigs); + { + FramedSource source(from); + store->addMultipleToStore(source, + RepairFlag{repair}, + dontCheckSigs ? NoCheckSigs : CheckSigs); + } logger->stopWork(); break; } @@ -852,14 +854,14 @@ static void performOp(TunnelLogger * logger, ref store, else { std::unique_ptr source; + StringSink saved; if (GET_PROTOCOL_MINOR(clientVersion) >= 21) source = std::make_unique(from, to); else { - StringSink saved; TeeSource tee { from, saved }; ParseSink ether; parseDump(ether, tee); - source = std::make_unique(std::move(*saved.s)); + source = std::make_unique(saved.s); } logger->startWork(); @@ -920,6 +922,22 @@ static void performOp(TunnelLogger * logger, ref store, break; } + case wopAddBuildLog: { + StorePath path{readString(from)}; + logger->startWork(); + if (!trusted) + throw Error("you are not privileged to add logs"); + { + FramedSource source(from); + StringSink sink; + source.drainInto(sink); + store->addBuildLog(path, sink.s); + } + logger->stopWork(); + to << 1; + break; + } + default: throw Error("invalid operation %1%", op); } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 02c839520..9875da909 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -75,20 +75,20 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) auto references = worker_proto::read(*this, source, Phantom {}); auto deriver = readString(source); - auto narHash = hashString(htSHA256, *saved.s); + auto narHash = hashString(htSHA256, saved.s); ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = parseStorePath(deriver); info.references = references; - info.narSize = saved.s->size(); + info.narSize = saved.s.size(); // Ignore optional legacy signature. if (readInt(source) == 1) readString(source); // Can't use underlying source, which would have been exhausted - auto source = StringSource { *saved.s }; + auto source = StringSource(saved.s); addToStore(info, source, NoRepair, checkSigs); res.push_back(info.path); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 17980ab22..6b62311cf 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -106,7 +106,7 @@ struct curlFileTransfer : public FileTransfer this->request.dataCallback(data); } } else - this->result.data->append(data); + this->result.data.append(data); }) { if (!request.expectedETag.empty()) @@ -195,7 +195,7 @@ struct curlFileTransfer : public FileTransfer std::smatch match; if (std::regex_match(line, match, statusLine)) { result.etag = ""; - result.data = std::make_shared(); + result.data.clear(); result.bodySize = 0; statusMsg = trim(match[1]); acceptRanges = false; @@ -340,7 +340,7 @@ struct curlFileTransfer : public FileTransfer if (writtenToSink) curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink); - result.data = std::make_shared(); + result.data.clear(); result.bodySize = 0; } @@ -434,21 +434,21 @@ struct curlFileTransfer : public FileTransfer attempt++; - std::shared_ptr response; + std::optional response; if (errorSink) - response = errorSink->s; + response = std::move(errorSink->s); auto exc = code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted - ? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri) + ? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri) : httpStatus != 0 ? FileTransferError(err, - response, + std::move(response), fmt("unable to %s '%s': HTTP error %d ('%s')", request.verb(), request.uri, httpStatus, statusMsg) + (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code))) ) : FileTransferError(err, - response, + std::move(response), fmt("unable to %s '%s': %s (%d)", request.verb(), request.uri, curl_easy_strerror(code), code)); @@ -705,7 +705,7 @@ struct curlFileTransfer : public FileTransfer FileTransferResult res; if (!s3Res.data) throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri); - res.data = s3Res.data; + res.data = std::move(*s3Res.data); callback(std::move(res)); #else throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); @@ -859,7 +859,7 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink) } template -FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr response, const Args & ... args) +FileTransferError::FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args) : Error(args...), error(error), response(response) { const auto hf = hintfmt(args...); diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh index 45d9ccf89..3e61b23b1 100644 --- a/src/libstore/filetransfer.hh +++ b/src/libstore/filetransfer.hh @@ -59,7 +59,7 @@ struct FileTransferRequest unsigned int baseRetryTimeMs = 250; ActivityId parentAct; bool decompress = true; - std::shared_ptr data; + std::optional data; std::string mimeType; std::function dataCallback; @@ -77,7 +77,7 @@ struct FileTransferResult bool cached = false; std::string etag; std::string effectiveUri; - std::shared_ptr data; + std::string data; uint64_t bodySize = 0; }; @@ -119,10 +119,10 @@ class FileTransferError : public Error { public: FileTransfer::Error error; - std::shared_ptr response; // intentionally optional + std::optional response; // intentionally optional template - FileTransferError(FileTransfer::Error error, std::shared_ptr response, const Args & ... args); + FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args); virtual const char* sname() const override { return "FileTransferError"; } }; diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 1911ec855..f65893b10 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -966,6 +966,13 @@ public: Setting acceptFlakeConfig{this, false, "accept-flake-config", "Whether to accept nix configuration from a flake without prompting."}; + + Setting commitLockFileSummary{ + this, "", "commit-lockfile-summary", + R"( + The commit summary to use when commiting changed flake lock files. If + empty, the summary is generated based on the action performed. + )"}; }; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 605ec4b28..3cb5efdbf 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -126,7 +126,7 @@ protected: const std::string & mimeType) override { auto req = makeRequest(path); - req.data = std::make_shared(StreamToSourceAdapter(istream).drain()); + req.data = StreamToSourceAdapter(istream).drain(); req.mimeType = mimeType; try { getFileTransfer()->upload(req); @@ -159,7 +159,7 @@ protected: } void getFile(const std::string & path, - Callback> callback) noexcept override + Callback> callback) noexcept override { checkEnabled(); @@ -170,10 +170,10 @@ protected: getFileTransfer()->enqueueFileTransfer(request, {[callbackPtr, this](std::future result) { try { - (*callbackPtr)(result.get().data); + (*callbackPtr)(std::move(result.get().data)); } catch (FileTransferError & e) { if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return (*callbackPtr)(std::shared_ptr()); + return (*callbackPtr)({}); maybeDisable(); callbackPtr->rethrow(); } catch (...) { diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 4861d185e..f8b2662af 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -94,7 +94,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor conn->sshConn->in.close(); auto msg = conn->from.drain(); throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - host, chomp(*saved.s + msg)); + host, chomp(saved.s + msg)); } conn->remoteVersion = readInt(conn->from); if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index f93111fce..f754770f9 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -96,6 +96,7 @@ void LocalBinaryCacheStore::init() createDirs(binaryCacheDir + "/" + realisationsPrefix); if (writeDebugInfo) createDirs(binaryCacheDir + "/debuginfo"); + createDirs(binaryCacheDir + "/log"); BinaryCacheStore::init(); } diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 6de13c73a..c933251db 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -87,34 +87,32 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) const string LocalFSStore::drvsLogDir = "drvs"; - - -std::shared_ptr LocalFSStore::getBuildLog(const StorePath & path_) +std::optional LocalFSStore::getBuildLog(const StorePath & path_) { auto path = path_; if (!path.isDerivation()) { try { auto info = queryPathInfo(path); - if (!info->deriver) return nullptr; + if (!info->deriver) return std::nullopt; path = *info->deriver; } catch (InvalidPath &) { - return nullptr; + return std::nullopt; } } - auto baseName = std::string(baseNameOf(printStorePath(path))); + auto baseName = path.to_string(); for (int j = 0; j < 2; j++) { Path logPath = j == 0 - ? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2)) + ? fmt("%s/%s/%s/%s", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2)) : fmt("%s/%s/%s", logDir, drvsLogDir, baseName); Path logBz2Path = logPath + ".bz2"; if (pathExists(logPath)) - return std::make_shared(readFile(logPath)); + return readFile(logPath); else if (pathExists(logBz2Path)) { try { @@ -124,7 +122,7 @@ std::shared_ptr LocalFSStore::getBuildLog(const StorePath & path_) } - return nullptr; + return std::nullopt; } } diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh index f8b19d00d..e44b27cc2 100644 --- a/src/libstore/local-fs-store.hh +++ b/src/libstore/local-fs-store.hh @@ -45,7 +45,8 @@ public: return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1); } - std::shared_ptr getBuildLog(const StorePath & path) override; + std::optional getBuildLog(const StorePath & path) override; + }; } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 9ebdfd6ed..1807940d8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -9,6 +9,7 @@ #include "callback.hh" #include "topo-sort.hh" #include "finally.hh" +#include "compression.hh" #include #include @@ -1461,12 +1462,12 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s, StringSink sink; dumpString(s, sink); - auto narHash = hashString(htSHA256, *sink.s); + auto narHash = hashString(htSHA256, sink.s); optimisePath(realPath, repair); ValidPathInfo info { dstPath, narHash }; - info.narSize = sink.s->size(); + info.narSize = sink.s.size(); info.references = references; info.ca = TextHash { .hash = hash }; registerValidPath(info); @@ -1898,4 +1899,24 @@ FixedOutputHash LocalStore::hashCAPath( }; } +void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) +{ + assert(drvPath.isDerivation()); + + auto baseName = drvPath.to_string(); + + auto logPath = fmt("%s/%s/%s/%s.bz2", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2)); + + if (pathExists(logPath)) return; + + createDirs(dirOf(logPath)); + + auto tmpFile = fmt("%s.tmp.%d", logPath, getpid()); + + writeFile(tmpFile, compress("bzip2", log)); + + if (rename(tmpFile.c_str(), logPath.c_str()) != 0) + throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath); +} + } // namespace nix diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index c4d7b80bd..6d867d778 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -280,6 +280,8 @@ private: const std::string_view pathHash ); + void addBuildLog(const StorePath & drvPath, std::string_view log) override; + friend struct LocalDerivationGoal; friend struct PathSubstitutionGoal; friend struct SubstitutionGoal; diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 784ebb719..7d27d7667 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -28,7 +28,7 @@ struct NarMember struct NarAccessor : public FSAccessor { - std::shared_ptr nar; + std::optional nar; GetNarBytes getNarBytes; @@ -104,7 +104,7 @@ struct NarAccessor : public FSAccessor } }; - NarAccessor(ref nar) : nar(nar) + NarAccessor(std::string && _nar) : nar(_nar) { StringSource source(*nar); NarIndexer indexer(*this, source); @@ -224,9 +224,9 @@ struct NarAccessor : public FSAccessor } }; -ref makeNarAccessor(ref nar) +ref makeNarAccessor(std::string && nar) { - return make_ref(nar); + return make_ref(std::move(nar)); } ref makeNarAccessor(Source & source) diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh index 8af1272f6..c2241a04c 100644 --- a/src/libstore/nar-accessor.hh +++ b/src/libstore/nar-accessor.hh @@ -10,7 +10,7 @@ struct Source; /* Return an object that provides access to the contents of a NAR file. */ -ref makeNarAccessor(ref nar); +ref makeNarAccessor(std::string && nar); ref makeNarAccessor(Source & source); diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index f871e6437..d63ec5ea2 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -78,7 +78,7 @@ Realisation Realisation::fromJSON( auto fieldIterator = json.find(fieldName); if (fieldIterator == json.end()) return std::nullopt; - return *fieldIterator; + return {*fieldIterator}; }; auto getField = [&](std::string fieldName) -> std::string { if (auto field = getOptionalField(fieldName)) diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index f43456f0b..0ce335646 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -22,9 +22,18 @@ Path RemoteFSAccessor::makeCacheFile(std::string_view hashPart, const std::strin return fmt("%s/%s.%s", cacheDir, hashPart, ext); } -void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string & nar, - ref narAccessor) +ref RemoteFSAccessor::addToCache(std::string_view hashPart, std::string && nar) { + if (cacheDir != "") { + try { + /* FIXME: do this asynchronously. */ + writeFile(makeCacheFile(hashPart, "nar"), nar); + } catch (...) { + ignoreException(); + } + } + + auto narAccessor = makeNarAccessor(std::move(nar)); nars.emplace(hashPart, narAccessor); if (cacheDir != "") { @@ -33,14 +42,12 @@ void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string & JSONPlaceholder jsonRoot(str); listNar(jsonRoot, narAccessor, "", true); writeFile(makeCacheFile(hashPart, "ls"), str.str()); - - /* FIXME: do this asynchronously. */ - writeFile(makeCacheFile(hashPart, "nar"), nar); - } catch (...) { ignoreException(); } } + + return narAccessor; } std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, bool requireValidPath) @@ -55,7 +62,6 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, boo auto i = nars.find(std::string(storePath.hashPart())); if (i != nars.end()) return {i->second, restPath}; - StringSink sink; std::string listing; Path cacheFile; @@ -86,19 +92,15 @@ std::pair, Path> RemoteFSAccessor::fetch(const Path & path_, boo } catch (SysError &) { } try { - *sink.s = nix::readFile(cacheFile); - - auto narAccessor = makeNarAccessor(sink.s); + auto narAccessor = makeNarAccessor(nix::readFile(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SysError &) { } } + StringSink sink; store->narFromPath(storePath, sink); - auto narAccessor = makeNarAccessor(sink.s); - addToCache(storePath.hashPart(), *sink.s, narAccessor); - return {narAccessor, restPath}; + return {addToCache(storePath.hashPart(), std::move(sink.s)), restPath}; } FSAccessor::Stat RemoteFSAccessor::stat(const Path & path) diff --git a/src/libstore/remote-fs-accessor.hh b/src/libstore/remote-fs-accessor.hh index 594852d0e..99f5544ef 100644 --- a/src/libstore/remote-fs-accessor.hh +++ b/src/libstore/remote-fs-accessor.hh @@ -20,8 +20,7 @@ class RemoteFSAccessor : public FSAccessor Path makeCacheFile(std::string_view hashPart, const std::string & ext); - void addToCache(std::string_view hashPart, const std::string & nar, - ref narAccessor); + ref addToCache(std::string_view hashPart, std::string && nar); public: diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 57cc260b0..aac2965e0 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -172,7 +172,7 @@ void RemoteStore::initConnection(Connection & conn) it. */ conn.closeWrite(); auto msg = conn.from.drain(); - throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg)); + throw Error("protocol mismatch, got '%s'", chomp(saved.s + msg)); } conn.from >> conn.daemonVersion; @@ -908,6 +908,18 @@ void RemoteStore::queryMissing(const std::vector & targets, } +void RemoteStore::addBuildLog(const StorePath & drvPath, std::string_view log) +{ + auto conn(getConnection()); + conn->to << wopAddBuildLog << drvPath.to_string(); + StringSource source(log); + conn.withFramedSink([&](Sink & sink) { + source.drainInto(sink); + }); + readInt(conn->from); +} + + void RemoteStore::connect() { auto conn(getConnection()); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 0fd67f371..4754ff45a 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -116,6 +116,8 @@ public: StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown, uint64_t & downloadSize, uint64_t & narSize) override; + void addBuildLog(const StorePath & drvPath, std::string_view log) override; + void connect() override; unsigned int getProtocol() override; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 7accad7f4..a024e971d 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -385,7 +385,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual auto compress = [&](std::string compression) { auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain()); - return std::make_shared(std::move(*compressed)); + return std::make_shared(std::move(compressed)); }; if (narinfoCompression != "" && hasSuffix(path, ".narinfo")) diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh index 2042bffcf..3f55c74db 100644 --- a/src/libstore/s3.hh +++ b/src/libstore/s3.hh @@ -4,6 +4,8 @@ #include "ref.hh" +#include + namespace Aws { namespace Client { class ClientConfiguration; } } namespace Aws { namespace S3 { class S3Client; } } @@ -20,7 +22,7 @@ struct S3Helper struct FileTransferResult { - std::shared_ptr data; + std::optional data; unsigned int durationMs; }; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index aab4ce94c..84767e917 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1109,6 +1109,21 @@ void copyClosure( copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute); } +void copyClosure( + Store & srcStore, + Store & dstStore, + const StorePathSet & storePaths, + RepairFlag repair, + CheckSigsFlag checkSigs, + SubstituteFlag substitute) +{ + if (&srcStore == &dstStore) return; + + StorePathSet closure; + srcStore.computeFSClosure(storePaths, closure); + copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute); +} + std::optional decodeValidPathInfo(const Store & store, std::istream & str, std::optional hashGiven) { std::string path; diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 3dd446f23..8306509f3 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -724,8 +724,11 @@ public: /* Return the build log of the specified store path, if available, or null otherwise. */ - virtual std::shared_ptr getBuildLog(const StorePath & path) - { return nullptr; } + virtual std::optional getBuildLog(const StorePath & path) + { return std::nullopt; } + + virtual void addBuildLog(const StorePath & path, std::string_view log) + { unsupported("addBuildLog"); } /* Hack to allow long-running processes like hydra-queue-runner to occasionally flush their path info cache. */ @@ -812,6 +815,13 @@ void copyClosure( CheckSigsFlag checkSigs = CheckSigs, SubstituteFlag substitute = NoSubstitute); +void copyClosure( + Store & srcStore, Store & dstStore, + const StorePathSet & paths, + RepairFlag repair = NoRepair, + CheckSigsFlag checkSigs = CheckSigs, + SubstituteFlag substitute = NoSubstitute); + /* Remove the temporary roots file for this process. Any temporary root becomes garbage after this point unless it has been registered as a (permanent) root. */ diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 93cf546d2..ecf42a5d0 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -56,6 +56,7 @@ typedef enum { wopRegisterDrvOutput = 42, wopQueryRealisation = 43, wopAddMultipleToStore = 44, + wopAddBuildLog = 45, } WorkerOp; diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index f80ca664c..89180e7a7 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -190,13 +190,13 @@ struct BrotliDecompressionSink : ChunkedCompressionSink } }; -ref decompress(const std::string & method, const std::string & in) +std::string decompress(const std::string & method, std::string_view in) { StringSink ssink; auto sink = makeDecompressionSink(method, ssink); (*sink)(in); sink->finish(); - return ssink.s; + return std::move(ssink.s); } std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink) @@ -281,13 +281,13 @@ ref makeCompressionSink(const std::string & method, Sink & next throw UnknownCompressionMethod("unknown compression method '%s'", method); } -ref compress(const std::string & method, const std::string & in, const bool parallel, int level) +std::string compress(const std::string & method, std::string_view in, const bool parallel, int level) { StringSink ssink; auto sink = makeCompressionSink(method, ssink, parallel, level); (*sink)(in); sink->finish(); - return ssink.s; + return std::move(ssink.s); } } diff --git a/src/libutil/compression.hh b/src/libutil/compression.hh index 9b1e4a9d4..c470b82a5 100644 --- a/src/libutil/compression.hh +++ b/src/libutil/compression.hh @@ -15,11 +15,11 @@ struct CompressionSink : BufferedSink, FinishSink using FinishSink::finish; }; -ref decompress(const std::string & method, const std::string & in); +std::string decompress(const std::string & method, std::string_view in); std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink); -ref compress(const std::string & method, const std::string & in, const bool parallel = false, int level = -1); +std::string compress(const std::string & method, std::string_view in, const bool parallel = false, int level = -1); ref makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false, int level = -1); diff --git a/src/libutil/error.hh b/src/libutil/error.hh index ff58d3e00..6fe5e4857 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -137,7 +137,7 @@ public: { } template - BaseError(const std::string & fs, const Args & ... args) + explicit BaseError(const std::string & fs, const Args & ... args) : err { .level = lvlError, .msg = hintfmt(fs, args...) } { } diff --git a/src/libutil/json.cc b/src/libutil/json.cc index 01331947e..3a981376f 100644 --- a/src/libutil/json.cc +++ b/src/libutil/json.cc @@ -7,16 +7,38 @@ namespace nix { void toJSON(std::ostream & str, const char * start, const char * end) { - str << '"'; - for (auto i = start; i != end; i++) - if (*i == '\"' || *i == '\\') str << '\\' << *i; - else if (*i == '\n') str << "\\n"; - else if (*i == '\r') str << "\\r"; - else if (*i == '\t') str << "\\t"; - else if (*i >= 0 && *i < 32) - str << "\\u" << std::setfill('0') << std::setw(4) << std::hex << (uint16_t) *i << std::dec; - else str << *i; - str << '"'; + constexpr size_t BUF_SIZE = 4096; + char buf[BUF_SIZE + 7]; // BUF_SIZE + largest single sequence of puts + size_t bufPos = 0; + + const auto flush = [&] { + str.write(buf, bufPos); + bufPos = 0; + }; + const auto put = [&] (char c) { + buf[bufPos++] = c; + }; + + put('"'); + for (auto i = start; i != end; i++) { + if (bufPos >= BUF_SIZE) flush(); + if (*i == '\"' || *i == '\\') { put('\\'); put(*i); } + else if (*i == '\n') { put('\\'); put('n'); } + else if (*i == '\r') { put('\\'); put('r'); } + else if (*i == '\t') { put('\\'); put('t'); } + else if (*i >= 0 && *i < 32) { + const char hex[17] = "0123456789abcdef"; + put('\\'); + put('u'); + put(hex[(uint16_t(*i) >> 12) & 0xf]); + put(hex[(uint16_t(*i) >> 8) & 0xf]); + put(hex[(uint16_t(*i) >> 4) & 0xf]); + put(hex[(uint16_t(*i) >> 0) & 0xf]); + } + else put(*i); + } + put('"'); + flush(); } void toJSON(std::ostream & str, const char * s) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 16f3476c2..fe703de06 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -110,7 +110,7 @@ std::string Source::drain() { StringSink s; drainInto(s); - return *s.s; + return std::move(s.s); } @@ -325,7 +325,7 @@ void writeString(std::string_view data, Sink & sink) } -Sink & operator << (Sink & sink, const string & s) +Sink & operator << (Sink & sink, std::string_view s) { writeString(s, sink); return sink; @@ -450,11 +450,11 @@ Error readError(Source & source) void StringSink::operator () (std::string_view data) { static bool warned = false; - if (!warned && s->size() > threshold) { + if (!warned && s.size() > threshold) { warnLargeDump(); warned = true; } - s->append(data); + s.append(data); } size_t ChainSource::read(char * data, size_t len) diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 0fe6e8332..fdd35aa00 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -154,12 +154,13 @@ private: /* A sink that writes data to a string. */ struct StringSink : Sink { - ref s; - StringSink() : s(make_ref()) { }; - explicit StringSink(const size_t reservedSize) : s(make_ref()) { - s->reserve(reservedSize); + std::string s; + StringSink() { } + explicit StringSink(const size_t reservedSize) + { + s.reserve(reservedSize); }; - StringSink(ref s) : s(s) { }; + StringSink(std::string && s) : s(std::move(s)) { }; void operator () (std::string_view data) override; }; @@ -167,9 +168,9 @@ struct StringSink : Sink /* A source that reads data from a string. */ struct StringSource : Source { - const string & s; + std::string_view s; size_t pos; - StringSource(const string & _s) : s(_s), pos(0) { } + StringSource(std::string_view s) : s(s), pos(0) { } size_t read(char * data, size_t len) override; }; @@ -317,10 +318,10 @@ inline Sink & operator << (Sink & sink, uint64_t n) return sink; } -Sink & operator << (Sink & sink, const string & s); +Sink & operator << (Sink & in, const Error & ex); +Sink & operator << (Sink & sink, std::string_view s); Sink & operator << (Sink & sink, const Strings & s); Sink & operator << (Sink & sink, const StringSet & s); -Sink & operator << (Sink & in, const Error & ex); MakeError(SerialisationError, Error); diff --git a/src/libutil/tests/compression.cc b/src/libutil/tests/compression.cc index 2efa3266b..bbbf3500f 100644 --- a/src/libutil/tests/compression.cc +++ b/src/libutil/tests/compression.cc @@ -12,17 +12,17 @@ namespace nix { } TEST(compress, noneMethodDoesNothingToTheInput) { - ref o = compress("none", "this-is-a-test"); + auto o = compress("none", "this-is-a-test"); - ASSERT_EQ(*o, "this-is-a-test"); + ASSERT_EQ(o, "this-is-a-test"); } TEST(decompress, decompressNoneCompressed) { auto method = "none"; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - ref o = decompress(method, str); + auto o = decompress(method, str); - ASSERT_EQ(*o, str); + ASSERT_EQ(o, str); } TEST(decompress, decompressEmptyCompressed) { @@ -30,33 +30,33 @@ namespace nix { // (Content-Encoding == ""). auto method = ""; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - ref o = decompress(method, str); + auto o = decompress(method, str); - ASSERT_EQ(*o, str); + ASSERT_EQ(o, str); } TEST(decompress, decompressXzCompressed) { auto method = "xz"; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - ref o = decompress(method, *compress(method, str)); + auto o = decompress(method, compress(method, str)); - ASSERT_EQ(*o, str); + ASSERT_EQ(o, str); } TEST(decompress, decompressBzip2Compressed) { auto method = "bzip2"; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - ref o = decompress(method, *compress(method, str)); + auto o = decompress(method, compress(method, str)); - ASSERT_EQ(*o, str); + ASSERT_EQ(o, str); } TEST(decompress, decompressBrCompressed) { auto method = "br"; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - ref o = decompress(method, *compress(method, str)); + auto o = decompress(method, compress(method, str)); - ASSERT_EQ(*o, str); + ASSERT_EQ(o, str); } TEST(decompress, decompressInvalidInputThrowsCompressionError) { @@ -77,7 +77,7 @@ namespace nix { (*sink)(inputString); sink->finish(); - ASSERT_STREQ((*strSink.s).c_str(), inputString); + ASSERT_STREQ(strSink.s.c_str(), inputString); } TEST(makeCompressionSink, compressAndDecompress) { @@ -90,7 +90,7 @@ namespace nix { sink->finish(); decompressionSink->finish(); - ASSERT_STREQ((*strSink.s).c_str(), inputString); + ASSERT_STREQ(strSink.s.c_str(), inputString); } } diff --git a/src/libutil/types.hh b/src/libutil/types.hh index 9c85fef62..8f72c926f 100644 --- a/src/libutil/types.hh +++ b/src/libutil/types.hh @@ -22,6 +22,7 @@ typedef std::map StringMap; /* Paths are just strings. */ typedef string Path; +typedef std::string_view PathView; typedef list Paths; typedef set PathSet; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 9edd69c64..1f1f2c861 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -106,16 +106,16 @@ Path absPath(Path path, std::optional dir, bool resolveSymlinks) } -Path canonPath(const Path & path, bool resolveSymlinks) +Path canonPath(PathView path, bool resolveSymlinks) { assert(path != ""); string s; + s.reserve(256); if (path[0] != '/') throw Error("not an absolute path: '%1%'", path); - string::const_iterator i = path.begin(), end = path.end(); string temp; /* Count the number of times we follow a symlink and stop at some @@ -125,33 +125,37 @@ Path canonPath(const Path & path, bool resolveSymlinks) while (1) { /* Skip slashes. */ - while (i != end && *i == '/') i++; - if (i == end) break; + while (!path.empty() && path[0] == '/') path.remove_prefix(1); + if (path.empty()) break; /* Ignore `.'. */ - if (*i == '.' && (i + 1 == end || i[1] == '/')) - i++; + if (path == "." || path.substr(0, 2) == "./") + path.remove_prefix(1); /* If `..', delete the last component. */ - else if (*i == '.' && i + 1 < end && i[1] == '.' && - (i + 2 == end || i[2] == '/')) + else if (path == ".." || path.substr(0, 3) == "../") { if (!s.empty()) s.erase(s.rfind('/')); - i += 2; + path.remove_prefix(2); } /* Normal component; copy it. */ else { s += '/'; - while (i != end && *i != '/') s += *i++; + if (const auto slash = path.find('/'); slash == string::npos) { + s += path; + path = {}; + } else { + s += path.substr(0, slash); + path = path.substr(slash + 1); + } /* If s points to a symlink, resolve it and continue from there */ if (resolveSymlinks && isLink(s)) { if (++followCount >= maxFollow) throw Error("infinite symlink recursion in path '%1%'", path); - temp = readLink(s) + string(i, end); - i = temp.begin(); - end = temp.end(); + temp = concatStrings(readLink(s), path); + path = temp; if (!temp.empty() && temp[0] == '/') { s.clear(); /* restart for symlinks pointing to absolute path */ } else { @@ -164,7 +168,7 @@ Path canonPath(const Path & path, bool resolveSymlinks) } } - return s.empty() ? "/" : s; + return s.empty() ? "/" : std::move(s); } @@ -668,9 +672,11 @@ void writeFull(int fd, std::string_view s, bool allowInterrupts) string drainFD(int fd, bool block, const size_t reserveSize) { - StringSink sink(reserveSize); + // the parser needs two extra bytes to append terminating characters, other users will + // not care very much about the extra memory. + StringSink sink(reserveSize + 2); drainFD(fd, sink, block); - return std::move(*sink.s); + return std::move(sink.s); } @@ -1055,7 +1061,7 @@ std::pair runProgram(RunOptions && options) status = e.status; } - return {status, std::move(*sink.s)}; + return {status, std::move(sink.s)}; } void runProgram2(const RunOptions & options) @@ -1229,23 +1235,22 @@ void _interrupted() ////////////////////////////////////////////////////////////////////// -template C tokenizeString(std::string_view s, const string & separators) +template C tokenizeString(std::string_view s, std::string_view separators) { C result; string::size_type pos = s.find_first_not_of(separators, 0); while (pos != string::npos) { string::size_type end = s.find_first_of(separators, pos + 1); if (end == string::npos) end = s.size(); - string token(s, pos, end - pos); - result.insert(result.end(), token); + result.insert(result.end(), string(s, pos, end - pos)); pos = s.find_first_not_of(separators, end); } return result; } -template Strings tokenizeString(std::string_view s, const string & separators); -template StringSet tokenizeString(std::string_view s, const string & separators); -template vector tokenizeString(std::string_view s, const string & separators); +template Strings tokenizeString(std::string_view s, std::string_view separators); +template StringSet tokenizeString(std::string_view s, std::string_view separators); +template vector tokenizeString(std::string_view s, std::string_view separators); string chomp(std::string_view s) diff --git a/src/libutil/util.hh b/src/libutil/util.hh index dfad33ed2..369c44f78 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -11,6 +11,8 @@ #include #include +#include + #include #include #include @@ -54,7 +56,7 @@ Path absPath(Path path, double or trailing slashes. Optionally resolves all symlink components such that each component of the resulting path is *not* a symbolic link. */ -Path canonPath(const Path & path, bool resolveSymlinks = false); +Path canonPath(PathView path, bool resolveSymlinks = false); /* Return the directory part of the given canonical path, i.e., everything before the final `/'. If the path is the root or an @@ -366,15 +368,19 @@ MakeError(FormatError, Error); /* String tokenizer. */ -template C tokenizeString(std::string_view s, const string & separators = " \t\n\r"); +template C tokenizeString(std::string_view s, std::string_view separators = " \t\n\r"); /* Concatenate the given strings with a separator between the elements. */ template -string concatStringsSep(const string & sep, const C & ss) +string concatStringsSep(const std::string_view sep, const C & ss) { + size_t size = 0; + // need a cast to string_view since this is also called with Symbols + for (const auto & s : ss) size += sep.size() + std::string_view(s).size(); string s; + s.reserve(size); for (auto & i : ss) { if (s.size() != 0) s += sep; s += i; @@ -382,6 +388,14 @@ string concatStringsSep(const string & sep, const C & ss) return s; } +template +auto concatStrings(Parts && ... parts) + -> std::enable_if_t<(... && std::is_convertible_v), string> +{ + std::string_view views[sizeof...(parts)] = { parts... }; + return concatStringsSep({}, views); +} + /* Add quotes around a collection of strings. */ template Strings quoteStrings(const C & c) @@ -419,21 +433,21 @@ bool statusOk(int status); /* Parse a string into an integer. */ template -std::optional string2Int(const std::string & s) +std::optional string2Int(const std::string_view s) { if (s.substr(0, 1) == "-" && !std::numeric_limits::is_signed) return std::nullopt; - std::istringstream str(s); - N n; - str >> n; - if (str && str.get() == EOF) return n; - return std::nullopt; + try { + return boost::lexical_cast(s.data(), s.size()); + } catch (const boost::bad_lexical_cast &) { + return std::nullopt; + } } /* Like string2Int(), but support an optional suffix 'K', 'M', 'G' or 'T' denoting a binary unit prefix. */ template -N string2IntWithUnitPrefix(std::string s) +N string2IntWithUnitPrefix(std::string_view s) { N multiplier = 1; if (!s.empty()) { @@ -444,7 +458,7 @@ N string2IntWithUnitPrefix(std::string s) else if (u == 'G') multiplier = 1ULL << 30; else if (u == 'T') multiplier = 1ULL << 40; else throw UsageError("invalid unit specifier '%1%'", u); - s.resize(s.size() - 1); + s.remove_suffix(1); } } if (auto n = string2Int(s)) @@ -454,13 +468,13 @@ N string2IntWithUnitPrefix(std::string s) /* Parse a string into a float. */ template -std::optional string2Float(const string & s) +std::optional string2Float(const std::string_view s) { - std::istringstream str(s); - N n; - str >> n; - if (str && str.get() == EOF) return n; - return std::nullopt; + try { + return boost::lexical_cast(s.data(), s.size()); + } catch (const boost::bad_lexical_cast &) { + return std::nullopt; + } } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index b73eb0795..471a03ac7 100755 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -292,7 +292,7 @@ static void main_nix_build(int argc, char * * argv) else for (auto i : left) { if (fromArgs) - exprs.push_back(state->parseExprFromString(i, absPath("."))); + exprs.push_back(state->parseExprFromString(std::move(i), absPath("."))); else { auto absolute = i; try { diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 2ae042789..5168413d2 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -32,7 +32,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand StringSink sink; dumpPath(path, sink); - auto narHash = hashString(htSHA256, *sink.s); + auto narHash = hashString(htSHA256, sink.s); Hash hash = narHash; if (ingestionMethod == FileIngestionMethod::Flat) { @@ -45,14 +45,14 @@ struct CmdAddToStore : MixDryRun, StoreCommand store->makeFixedOutputPath(ingestionMethod, hash, *namePart), narHash, }; - info.narSize = sink.s->size(); + info.narSize = sink.s.size(); info.ca = std::optional { FixedOutputHash { .method = ingestionMethod, .hash = hash, } }; if (!dryRun) { - auto source = StringSource { *sink.s }; + auto source = StringSource(sink.s); store->addToStore(info, source); } diff --git a/src/nix/app.cc b/src/nix/app.cc index 2fcf4752c..e104cc9c1 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -19,7 +19,7 @@ struct InstallableDerivedPath : Installable } - std::string what() override { return derivedPath.to_string(*store); } + std::string what() const override { return derivedPath.to_string(*store); } DerivedPaths toDerivedPaths() override { diff --git a/src/nix/cat.cc b/src/nix/cat.cc index e28ee3c50..6420a0f79 100644 --- a/src/nix/cat.cc +++ b/src/nix/cat.cc @@ -78,7 +78,7 @@ struct CmdCatNar : StoreCommand, MixCat void run(ref store) override { - cat(makeNarAccessor(make_ref(readFile(narPath)))); + cat(makeNarAccessor(readFile(narPath))); } }; diff --git a/src/nix/copy.cc b/src/nix/copy.cc index 197c85316..8730a9a5c 100644 --- a/src/nix/copy.cc +++ b/src/nix/copy.cc @@ -1,17 +1,11 @@ #include "command.hh" #include "shared.hh" #include "store-api.hh" -#include "sync.hh" -#include "thread-pool.hh" - -#include using namespace nix; -struct CmdCopy : BuiltPathsCommand +struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand { - std::string srcUri, dstUri; - CheckSigsFlag checkSigs = CheckSigs; SubstituteFlag substitute = NoSubstitute; @@ -21,20 +15,6 @@ struct CmdCopy : BuiltPathsCommand CmdCopy() : BuiltPathsCommand(true) { - addFlag({ - .longName = "from", - .description = "URL of the source Nix store.", - .labels = {"store-uri"}, - .handler = {&srcUri}, - }); - - addFlag({ - .longName = "to", - .description = "URL of the destination Nix store.", - .labels = {"store-uri"}, - .handler = {&dstUri}, - }); - addFlag({ .longName = "no-check-sigs", .description = "Do not require that paths are signed by trusted keys.", @@ -65,22 +45,9 @@ struct CmdCopy : BuiltPathsCommand Category category() override { return catSecondary; } - ref createStore() override - { - return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri); - } - - void run(ref store) override - { - if (srcUri.empty() && dstUri.empty()) - throw UsageError("you must pass '--from' and/or '--to'"); - - BuiltPathsCommand::run(store); - } - void run(ref srcStore, BuiltPaths && paths) override { - ref dstStore = dstUri.empty() ? openStore() : openStore(dstUri); + auto dstStore = getDstStore(); RealisedPath::Set stuffToCopy; diff --git a/src/nix/flake.md b/src/nix/flake.md index 3b5812a0a..a8436bcaa 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -137,15 +137,6 @@ Currently the `type` attribute can be one of the following: *path* must be a directory in the file system containing a file named `flake.nix`. - If the directory or any of its parents is a Git repository, then - this is essentially equivalent to `git+file://` (see below), - except that the `dir` parameter is derived automatically. For - example, if `/foo/bar` is a Git repository, then the flake reference - `/foo/bar/flake` is equivalent to `/foo/bar?dir=flake`. - - If the directory is not inside a Git repository, then the flake - contents is the entire contents of *path*. - *path* generally must be an absolute path. However, on the command line, it can be a relative path (e.g. `.` or `./foo`) which is interpreted as relative to the current directory. In this case, it diff --git a/src/nix/ls.cc b/src/nix/ls.cc index c1dc9a95b..07554994b 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -157,7 +157,7 @@ struct CmdLsNar : Command, MixLs void run() override { - list(makeNarAccessor(make_ref(readFile(narPath)))); + list(makeNarAccessor(readFile(narPath))); } }; diff --git a/src/nix/main.cc b/src/nix/main.cc index fe7469be4..b923f2535 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -59,7 +59,6 @@ struct HelpRequested { }; struct NixArgs : virtual MultiCommand, virtual MixCommonArgs { - bool printBuildLogs = false; bool useNet = true; bool refresh = false; bool showVersion = false; @@ -270,11 +269,15 @@ void mainWrapped(int argc, char * * argv) if (legacy) return legacy(argc, argv); } - verbosity = lvlNotice; - settings.verboseBuild = false; evalSettings.pureEval = true; setLogFormat("bar"); + settings.verboseBuild = false; + if (isatty(STDERR_FILENO)) { + verbosity = lvlNotice; + } else { + verbosity = lvlInfo; + } Finally f([] { logger->stop(); }); diff --git a/src/nix/make-content-addressable.cc b/src/nix/make-content-addressable.cc index 12f303a10..2e75a3b61 100644 --- a/src/nix/make-content-addressable.cc +++ b/src/nix/make-content-addressable.cc @@ -61,10 +61,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON } } - *sink.s = rewriteStrings(*sink.s, rewrites); + sink.s = rewriteStrings(sink.s, rewrites); HashModuloSink hashModuloSink(htSHA256, oldHashPart); - hashModuloSink(*sink.s); + hashModuloSink(sink.s); auto narHash = hashModuloSink.finish().first; @@ -74,7 +74,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON }; info.references = std::move(references); if (hasSelfReference) info.references.insert(info.path); - info.narSize = sink.s->size(); + info.narSize = sink.s.size(); info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash, @@ -85,7 +85,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON auto source = sinkToSource([&](Sink & nextSink) { RewritingSink rsink2(oldHashPart, std::string(info.path.hashPart()), nextSink); - rsink2(*sink.s); + rsink2(sink.s); rsink2.flush(); }); diff --git a/src/nix/nix.md b/src/nix/nix.md index d10de7c01..2f54c5e2c 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -57,9 +57,47 @@ the Nix store. Here are the recognised types of installables: These have the form *flakeref*[`#`*attrpath*], where *flakeref* is a flake reference and *attrpath* is an optional attribute path. For more information on flakes, see [the `nix flake` manual - page](./nix3-flake.md). Flake references are most commonly a flake - identifier in the flake registry (e.g. `nixpkgs`) or a path - (e.g. `/path/to/my-flake` or `.`). + page](./nix3-flake.md). Flake references are most commonly a flake + identifier in the flake registry (e.g. `nixpkgs`), or a raw path + (e.g. `/path/to/my-flake` or `.` or `../foo`), or a full URL + (e.g. `github:nixos/nixpkgs` or `path:.`) + + When the flake reference is a raw path (a path without any URL + scheme), it is interpreted as a `path:` or `git+file:` url in the following + way: + + - If the path is within a Git repository, then the url will be of the form + `git+file://[GIT_REPO_ROOT]?dir=[RELATIVE_FLAKE_DIR_PATH]` + where `GIT_REPO_ROOT` is the path to the root of the git repository, + and `RELATIVE_FLAKE_DIR_PATH` is the path (relative to the directory + root) of the closest parent of the given path that contains a `flake.nix` within + the git repository. + If no such directory exists, then Nix will error-out. + + Note that the search will only include files indexed by git. In particular, files + which are matched by `.gitignore` or have never been `git add`-ed will not be + available in the flake. If this is undesireable, specify `path:` explicitly; + + For example, if `/foo/bar` is a git repository with the following structure: + ``` + . + └── baz + ├── blah + │  └── file.txt + └── flake.nix + ``` + + Then `/foo/bar/baz/blah` will resolve to `git+file:///foo/bar?dir=baz` + + - If the supplied path is not a git repository, then the url will have the form + `path:FLAKE_DIR_PATH` where `FLAKE_DIR_PATH` is the closest parent + of the supplied path that contains a `flake.nix` file (within the same file-system). + If no such directory exists, then Nix will error-out. + + For example, if `/foo/bar/flake.nix` exists, then `/foo/bar/baz/` will resolve to + `path:/foo/bar` + + If *attrpath* is omitted, Nix tries some default values; for most subcommands, the default is `defaultPackage.`*system* diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 96a20f673..9b7c999af 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -157,17 +157,17 @@ struct ProfileManifest StringSink sink; dumpPath(tempDir, sink); - auto narHash = hashString(htSHA256, *sink.s); + auto narHash = hashString(htSHA256, sink.s); ValidPathInfo info { store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references), narHash, }; info.references = std::move(references); - info.narSize = sink.s->size(); + info.narSize = sink.s.size(); info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash }; - auto source = StringSource { *sink.s }; + StringSource source(sink.s); store->addToStore(info, source); return std::move(info.path); diff --git a/src/nix/repl.cc b/src/nix/repl.cc index a62ad66c2..be9f96836 100644 --- a/src/nix/repl.cc +++ b/src/nix/repl.cc @@ -703,7 +703,7 @@ void NixRepl::addVarToScope(const Symbol & name, Value & v) Expr * NixRepl::parseString(string s) { - Expr * e = state->parseExprFromString(s, curDir, staticEnv); + Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv); return e; } diff --git a/src/nix/store-copy-log.cc b/src/nix/store-copy-log.cc new file mode 100644 index 000000000..079cd6b3e --- /dev/null +++ b/src/nix/store-copy-log.cc @@ -0,0 +1,46 @@ +#include "command.hh" +#include "shared.hh" +#include "store-api.hh" +#include "sync.hh" +#include "thread-pool.hh" + +#include + +using namespace nix; + +struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand +{ + std::string description() override + { + return "copy build logs between Nix stores"; + } + + std::string doc() override + { + return + #include "store-copy-log.md" + ; + } + + Category category() override { return catUtility; } + + void run(ref srcStore) override + { + auto dstStore = getDstStore(); + + StorePathSet drvPaths; + + for (auto & i : installables) + for (auto & drvPath : i->toDrvPaths(getEvalStore())) + drvPaths.insert(drvPath); + + for (auto & drvPath : drvPaths) { + if (auto log = srcStore->getBuildLog(drvPath)) + dstStore->addBuildLog(drvPath, *log); + else + throw Error("build log for '%s' is not available", srcStore->printStorePath(drvPath)); + } + } +}; + +static auto rCmdCopyLog = registerCommand2({"store", "copy-log"}); diff --git a/src/nix/store-copy-log.md b/src/nix/store-copy-log.md new file mode 100644 index 000000000..19ae57079 --- /dev/null +++ b/src/nix/store-copy-log.md @@ -0,0 +1,33 @@ +R""( + +# Examples + +* To copy the build log of the `hello` package from + https://cache.nixos.org to the local store: + + ```console + # nix store copy-log --from https://cache.nixos.org --eval-store auto nixpkgs#hello + ``` + + You can verify that the log is available locally: + + ```console + # nix log --substituters '' nixpkgs#hello + ``` + + (The flag `--substituters ''` avoids querying + `https://cache.nixos.org` for the log.) + +* To copy the log for a specific store derivation via SSH: + + ```console + # nix store copy-log --to ssh-ng://machine /nix/store/ilgm50plpmcgjhcp33z6n4qbnpqfhxym-glibc-2.33-59.drv + ``` + +# Description + +`nix store copy-log` copies build logs between two Nix stores. The +source store is specified using `--from` and the destination using +`--to`. If one of these is omitted, it defaults to the local store. + +)"" diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 9cd567896..17a5a77ee 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -140,7 +140,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand auto state = std::make_unique(Strings(), store); auto v = state->allocValue(); - state->eval(state->parseExprFromString(*res.data, "/no-such-path"), *v); + state->eval(state->parseExprFromString(res.data, "/no-such-path"), *v); Bindings & bindings(*state->allocBindings(0)); auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first; diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index e27f56ef5..657df30d7 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -31,6 +31,7 @@ struct CmdWhyDepends : SourceExprCommand { std::string _package, _dependency; bool all = false; + bool precise = false; CmdWhyDepends() { @@ -56,6 +57,12 @@ struct CmdWhyDepends : SourceExprCommand .description = "Show all edges in the dependency graph leading from *package* to *dependency*, rather than just a shortest path.", .handler = {&all, true}, }); + + addFlag({ + .longName = "precise", + .description = "For each edge in the dependency graph, show the files in the parent that cause the dependency.", + .handler = {&precise, true}, + }); } std::string description() override @@ -158,11 +165,19 @@ struct CmdWhyDepends : SourceExprCommand auto pathS = store->printStorePath(node.path); assert(node.dist != inf); - logger->cout("%s%s%s%s" ANSI_NORMAL, - firstPad, - node.visited ? "\e[38;5;244m" : "", - firstPad != "" ? "→ " : "", - pathS); + if (precise) { + logger->cout("%s%s%s%s" ANSI_NORMAL, + firstPad, + node.visited ? "\e[38;5;244m" : "", + firstPad != "" ? "→ " : "", + pathS); + } else { + logger->cout("%s%s%s%s" ANSI_NORMAL, + firstPad, + node.visited ? "\e[38;5;244m" : "", + firstPad != "" ? treeLast : "", + pathS); + } if (node.path == dependencyPath && !all && packagePath != dependencyPath) @@ -237,9 +252,8 @@ struct CmdWhyDepends : SourceExprCommand // FIXME: should use scanForReferences(). - visitPath(pathS); + if (precise) visitPath(pathS); - RunPager pager; for (auto & ref : refs) { std::string hash(ref.second->path.hashPart()); @@ -259,6 +273,7 @@ struct CmdWhyDepends : SourceExprCommand } }; + RunPager pager; try { printNode(graph.at(packagePath), "", ""); } catch (BailOut & ) { } diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh index d7bc1507b..2368884f7 100644 --- a/tests/binary-cache.sh +++ b/tests/binary-cache.sh @@ -14,6 +14,17 @@ outPath=$(nix-build dependencies.nix --no-out-link) nix copy --to file://$cacheDir $outPath +# Test copying build logs to the binary cache. +nix log --store file://$cacheDir $outPath 2>&1 | grep 'is not available' +nix store copy-log --to file://$cacheDir $outPath +nix log --store file://$cacheDir $outPath | grep FOO +rm -rf $TEST_ROOT/var/log/nix +nix log $outPath 2>&1 | grep 'is not available' +nix log --substituters file://$cacheDir $outPath | grep FOO + +# Test copying build logs from the binary cache. +nix store copy-log --from file://$cacheDir $(nix-store -qd $outPath) +nix log $outPath | grep FOO basicDownloadTests() { # No uploading tests bcause upload with force HTTP doesn't work. diff --git a/tests/dependencies.builder0.sh b/tests/dependencies.builder0.sh index c37bf909a..9b11576e0 100644 --- a/tests/dependencies.builder0.sh +++ b/tests/dependencies.builder0.sh @@ -4,7 +4,7 @@ mkdir $out echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar -ln -s $input2 $out/input-2 +ln -s $input2 $out/reference-to-input-2 # Self-reference. ln -s $out $out/self diff --git a/tests/dependencies.nix b/tests/dependencies.nix index e320d81c9..45aca1793 100644 --- a/tests/dependencies.nix +++ b/tests/dependencies.nix @@ -27,6 +27,8 @@ let { input1 = input1 + "/."; input2 = "${input2}/."; input1_drv = input1; + input2_drv = input2; + input0_drv = input0; meta.description = "Random test package"; }; diff --git a/tests/flake-searching.sh b/tests/flake-searching.sh new file mode 100644 index 000000000..6558aff9a --- /dev/null +++ b/tests/flake-searching.sh @@ -0,0 +1,45 @@ +source common.sh + +clearStore + +cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME +cd $TEST_HOME +mkdir -p foo/subdir +echo '{ outputs = _: {}; }' > foo/flake.nix +cat < flake.nix +{ + inputs.foo.url = "$PWD/foo"; + outputs = a: { + defaultPackage.$system = import ./simple.nix; + packages.$system.test = import ./simple.nix; + }; +} +EOF +mkdir subdir +pushd subdir + +success=("" . .# .#test ../subdir ../subdir#test "$PWD") +failure=("path:$PWD") + +for i in "${success[@]}"; do + nix build $i || fail "flake should be found by searching up directories" +done + +for i in "${failure[@]}"; do + ! nix build $i || fail "flake should not search up directories when using 'path:'" +done + +popd + +nix build --override-input foo . || fail "flake should search up directories when not an installable" + +sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix +! nix build || fail "flake should not search upwards when part of inputs" + +pushd subdir +git init +for i in "${success[@]}" "${failure[@]}"; do + ! nix build $i || fail "flake should not search past a git repository" +done +rm -rf .git +popd diff --git a/tests/gc.sh b/tests/gc.sh index a736b63db..ad09a8b39 100644 --- a/tests/gc.sh +++ b/tests/gc.sh @@ -18,7 +18,7 @@ if nix-store --gc --print-dead | grep -E $outPath$; then false; fi nix-store --gc --print-dead -inUse=$(readLink $outPath/input-2) +inUse=$(readLink $outPath/reference-to-input-2) if nix-store --delete $inUse; then false; fi test -e $inUse @@ -35,7 +35,7 @@ nix-collect-garbage # Check that the root and its dependencies haven't been deleted. cat $outPath/foobar -cat $outPath/input-2/bar +cat $outPath/reference-to-input-2/bar # Check that the derivation has been GC'd. if test -e $drvPath; then false; fi diff --git a/tests/local.mk b/tests/local.mk index d2a46f22b..93cf20d43 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -47,6 +47,7 @@ nix_tests = \ describe-stores.sh \ flakes.sh \ flake-local-settings.sh \ + flake-searching.sh \ build.sh \ repl.sh ca/repl.sh \ ca/build.sh \ @@ -60,7 +61,8 @@ nix_tests = \ ca/concurrent-builds.sh \ ca/nix-copy.sh \ eval-store.sh \ - readfile-context.sh + readfile-context.sh \ + why-depends.sh # parallel.sh ifeq ($(HAVE_LIBCPUID), 1) diff --git a/tests/why-depends.sh b/tests/why-depends.sh new file mode 100644 index 000000000..c12941e76 --- /dev/null +++ b/tests/why-depends.sh @@ -0,0 +1,21 @@ +source common.sh + +clearStore + +cp ./dependencies.nix ./dependencies.builder0.sh ./config.nix $TEST_HOME + +cd $TEST_HOME + +nix-build ./dependencies.nix -A input0_drv -o dep +nix-build ./dependencies.nix -o toplevel + +FAST_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep) +PRECISE_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep --precise) + +# Both outputs should show that `input-2` is in the dependency chain +echo "$FAST_WHY_DEPENDS_OUTPUT" | grep -q input-2 +echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q input-2 + +# But only the “precise” one should refere to `reference-to-input-2` +echo "$FAST_WHY_DEPENDS_OUTPUT" | (! grep -q reference-to-input-2) +echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q reference-to-input-2