Merge remote-tracking branch 'origin/master' into push-docker-image-to-docker-hub

This commit is contained in:
Rok Garbas 2022-01-21 14:31:17 +01:00
commit a078a645da
No known key found for this signature in database
GPG key ID: A0E01EF44C27BF00
82 changed files with 941 additions and 372 deletions

View file

@ -284,6 +284,10 @@ The points of interest are:
function is called with the `localServer` argument set to `true` but
the `db4` argument set to `null`, then the evaluation fails.
Note that `->` is the [logical
implication](https://en.wikipedia.org/wiki/Truth_table#Logical_implication)
Boolean operation.
2. This is a more subtle condition: if Subversion is built with Apache
(`httpServer`) support, then the Expat library (an XML library) used
by Subversion should be same as the one used by Apache. This is

View file

@ -276,6 +276,9 @@ more than 2800 commits from 195 contributors since release 2.3.
* Plugins can now register `nix` subcommands.
* The `--indirect` flag to `nix-store --add-root` has become a no-op.
`--add-root` will always generate indirect GC roots from now on.
## Incompatible changes
* The `nix` command is now marked as an experimental feature. This

View file

@ -1,8 +1,15 @@
# Release X.Y (202?-??-??)
* The Nix cli now searches for a flake.nix up until the root of the current git repository or a filesystem boundary rather than just in the current directory
* The TOML parser used by `builtins.fromTOML` has been replaced by [a
more compliant one](https://github.com/ToruNiina/toml11).
* Added `:st`/`:show-trace` commands to nix repl, which are used to
set or toggle display of error traces.
* New builtin function `builtins.zipAttrsWith` with same functionality
as `lib.zipAttrsWith` from nixpkgs, but much more efficient.
* New command `nix store copy-log` to copy build logs from one store
to another.
* The `commit-lockfile-summary` option can be set to a non-empty string
to override the commit summary used when commiting an updated lockfile.
This may be used in conjunction with the nixConfig attribute in
`flake.nix` to better conform to repository conventions.

View file

@ -21,6 +21,7 @@ let
cacert.out
findutils
iana-etc
git
];
users = {
@ -200,6 +201,8 @@ let
mkdir $out/tmp
mkdir -p $out/var/tmp
mkdir -p $out/etc/nix
cat $nixConfContentsPath > $out/etc/nix/nix.conf
@ -235,6 +238,7 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
'';
fakeRootCommands = ''
chmod 1777 tmp
chmod 1777 var/tmp
'';
config = {

View file

@ -299,6 +299,8 @@
propagatedBuildInputs = propagatedDeps;
disallowedReferences = [ boost ];
preConfigure =
''
# Copy libboost_context so we don't get all of Boost in our closure.
@ -310,6 +312,13 @@
chmod u+w $out/lib/*.so.*
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
${lib.optionalString currentStdenv.isDarwin ''
for LIB in $out/lib/*.dylib; do
chmod u+w $LIB
install_name_tool -id $LIB $LIB
done
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
''}
'';
configureFlags = configureFlags ++
@ -326,6 +335,12 @@
postInstall = ''
mkdir -p $doc/nix-support
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
${lib.optionalString currentStdenv.isDarwin ''
install_name_tool \
-change ${boost}/lib/libboost_context.dylib \
$out/lib/libboost_context.dylib \
$out/lib/libnixutil.dylib
''}
'';
doInstallCheck = true;

View file

@ -54,6 +54,36 @@ void StoreCommand::run()
run(getStore());
}
CopyCommand::CopyCommand()
{
addFlag({
.longName = "from",
.description = "URL of the source Nix store.",
.labels = {"store-uri"},
.handler = {&srcUri},
});
addFlag({
.longName = "to",
.description = "URL of the destination Nix store.",
.labels = {"store-uri"},
.handler = {&dstUri},
});
}
ref<Store> CopyCommand::createStore()
{
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
}
ref<Store> CopyCommand::getDstStore()
{
if (srcUri.empty() && dstUri.empty())
throw UsageError("you must pass '--from' and/or '--to'");
return dstUri.empty() ? openStore() : openStore(dstUri);
}
EvalCommand::EvalCommand()
{
}
@ -73,13 +103,16 @@ ref<Store> EvalCommand::getEvalStore()
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState) evalState =
#if HAVE_BOEHMGC
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
#else
std::make_shared<EvalState>(
#endif
searchPath, getEvalStore(), getStore());
if (!evalState)
evalState =
#if HAVE_BOEHMGC
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
searchPath, getEvalStore(), getStore())
#else
std::make_shared<EvalState>(
searchPath, getEvalStore(), getStore())
#endif
;
return ref<EvalState>(evalState);
}

View file

@ -43,6 +43,19 @@ private:
std::shared_ptr<Store> _store;
};
/* A command that copies something between `--from` and `--to`
stores. */
struct CopyCommand : virtual StoreCommand
{
std::string srcUri, dstUri;
CopyCommand();
ref<Store> createStore() override;
ref<Store> getDstStore();
};
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
EvalCommand();

View file

@ -345,6 +345,18 @@ Installable::getCursor(EvalState & state)
return cursors[0];
}
static StorePath getDeriver(
ref<Store> store,
const Installable & i,
const StorePath & drvPath)
{
auto derivers = store->queryValidDerivers(drvPath);
if (derivers.empty())
throw Error("'%s' does not have a known deriver", i.what());
// FIXME: use all derivers?
return *derivers.begin();
}
struct InstallableStorePath : Installable
{
ref<Store> store;
@ -353,7 +365,7 @@ struct InstallableStorePath : Installable
InstallableStorePath(ref<Store> store, StorePath && storePath)
: store(store), storePath(std::move(storePath)) { }
std::string what() override { return store->printStorePath(storePath); }
std::string what() const override { return store->printStorePath(storePath); }
DerivedPaths toDerivedPaths() override
{
@ -374,6 +386,15 @@ struct InstallableStorePath : Installable
}
}
StorePathSet toDrvPaths(ref<Store> store) override
{
if (storePath.isDerivation()) {
return {storePath};
} else {
return {getDeriver(store, *this, storePath)};
}
}
std::optional<StorePath> getStorePath() override
{
return storePath;
@ -402,6 +423,14 @@ DerivedPaths InstallableValue::toDerivedPaths()
return res;
}
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
{
StorePathSet res;
for (auto & drv : toDerivations())
res.insert(drv.drvPath);
return res;
}
struct InstallableAttrPath : InstallableValue
{
SourceExprCommand & cmd;
@ -412,7 +441,7 @@ struct InstallableAttrPath : InstallableValue
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
{ }
std::string what() override { return attrPath; }
std::string what() const override { return attrPath; }
std::pair<Value *, Pos> toValue(EvalState & state) override
{
@ -836,11 +865,7 @@ StorePathSet toDerivations(
[&](const DerivedPath::Opaque & bo) {
if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what());
auto derivers = store->queryValidDerivers(bo.path);
if (derivers.empty())
throw Error("'%s' does not have a known deriver", i->what());
// FIXME: use all derivers?
drvPaths.insert(*derivers.begin());
drvPaths.insert(getDeriver(store, *i, bo.path));
},
[&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);

View file

@ -33,10 +33,15 @@ struct Installable
{
virtual ~Installable() { }
virtual std::string what() = 0;
virtual std::string what() const = 0;
virtual DerivedPaths toDerivedPaths() = 0;
virtual StorePathSet toDrvPaths(ref<Store> store)
{
throw Error("'%s' cannot be converted to a derivation path", what());
}
DerivedPath toDerivedPath();
UnresolvedApp toApp(EvalState & state);
@ -81,6 +86,8 @@ struct InstallableValue : Installable
virtual std::vector<DerivationInfo> toDerivations() = 0;
DerivedPaths toDerivedPaths() override;
StorePathSet toDrvPaths(ref<Store> store) override;
};
struct InstallableFlake : InstallableValue
@ -99,7 +106,7 @@ struct InstallableFlake : InstallableValue
Strings && prefixes,
const flake::LockFlags & lockFlags);
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
std::vector<std::string> getActualAttrPaths();

View file

@ -121,6 +121,8 @@ class BindingsBuilder
Bindings * bindings;
public:
// needed by std::back_inserter
using value_type = Attr;
EvalState & state;
@ -134,6 +136,11 @@ public:
}
void insert(const Attr & attr)
{
push_back(attr);
}
void push_back(const Attr & attr)
{
bindings->push_back(attr);
}

View file

@ -36,6 +36,19 @@
namespace nix {
static char * allocString(size_t size)
{
char * t;
#if HAVE_BOEHMGC
t = (char *) GC_MALLOC_ATOMIC(size);
#else
t = malloc(size);
#endif
if (!t) throw std::bad_alloc();
return t;
}
static char * dupString(const char * s)
{
char * t;
@ -412,6 +425,11 @@ EvalState::EvalState(
, sDescription(symbols.create("description"))
, sSelf(symbols.create("self"))
, sEpsilon(symbols.create(""))
, sStartSet(symbols.create("startSet"))
, sOperator(symbols.create("operator"))
, sKey(symbols.create("key"))
, sPath(symbols.create("path"))
, sPrefix(symbols.create("prefix"))
, repair(NoRepair)
, emptyBindings(0)
, store(store)
@ -771,17 +789,28 @@ void Value::mkString(std::string_view s)
}
static void copyContextToValue(Value & v, const PathSet & context)
{
if (!context.empty()) {
size_t n = 0;
v.string.context = (const char * *)
allocBytes((context.size() + 1) * sizeof(char *));
for (auto & i : context)
v.string.context[n++] = dupString(i.c_str());
v.string.context[n] = 0;
}
}
void Value::mkString(std::string_view s, const PathSet & context)
{
mkString(s);
if (!context.empty()) {
size_t n = 0;
string.context = (const char * *)
allocBytes((context.size() + 1) * sizeof(char *));
for (auto & i : context)
string.context[n++] = dupString(i.c_str());
string.context[n] = 0;
}
copyContextToValue(*this, context);
}
void Value::mkStringMove(const char * s, const PathSet & context)
{
mkString(s);
copyContextToValue(*this, context);
}
@ -1660,13 +1689,34 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po
void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
{
PathSet context;
std::ostringstream s;
std::vector<std::string> s;
size_t sSize = 0;
NixInt n = 0;
NixFloat nf = 0;
bool first = !forceString;
ValueType firstType = nString;
const auto str = [&] {
std::string result;
result.reserve(sSize);
for (const auto & part : s) result += part;
return result;
};
/* c_str() is not str().c_str() because we want to create a string
Value. allocating a GC'd string directly and moving it into a
Value lets us avoid an allocation and copy. */
const auto c_str = [&] {
char * result = allocString(sSize + 1);
char * tmp = result;
for (const auto & part : s) {
memcpy(tmp, part.c_str(), part.size());
tmp += part.size();
}
*tmp = 0;
return result;
};
for (auto & [i_pos, i] : *es) {
Value vTmp;
i->eval(state, env, vTmp);
@ -1696,11 +1746,15 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
nf += vTmp.fpoint;
} else
throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp));
} else
} else {
if (s.empty()) s.reserve(es->size());
/* skip canonization of first path, which would only be not
canonized in the first place if it's coming from a ./${foo} type
path */
s << state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first);
s.emplace_back(
state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first));
sSize += s.back().size();
}
first = false;
}
@ -1712,9 +1766,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
else if (firstType == nPath) {
if (!context.empty())
throwEvalError(pos, "a string that refers to a store path cannot be appended to a path");
v.mkPath(canonPath(s.str()));
v.mkPath(canonPath(str()));
} else
v.mkString(s.str(), context);
v.mkStringMove(c_str(), context);
}

View file

@ -80,7 +80,8 @@ public:
sContentAddressed,
sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations,
sDescription, sSelf, sEpsilon;
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
sPrefix;
Symbol sDerivationNix;
/* If set, force copying files to the Nix store even if they
@ -179,8 +180,8 @@ public:
Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv);
/* Parse a Nix expression from the specified string. */
Expr * parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv);
Expr * parseExprFromString(std::string_view s, const Path & basePath);
Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv);
Expr * parseExprFromString(std::string s, const Path & basePath);
Expr * parseStdin();
@ -308,7 +309,7 @@ private:
friend struct ExprAttrs;
friend struct ExprLet;
Expr * parse(const char * text, FileOrigin origin, const Path & path,
Expr * parse(char * text, size_t length, FileOrigin origin, const Path & path,
const Path & basePath, StaticEnv & staticEnv);
public:
@ -399,6 +400,7 @@ private:
friend struct ExprSelect;
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);
friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v);
friend struct Value;
};

View file

@ -633,12 +633,24 @@ LockedFlake lockFlake(
newLockFile.write(path);
std::optional<std::string> commitMessage = std::nullopt;
if (lockFlags.commitLockFile) {
std::string cm;
cm = settings.commitLockFileSummary.get();
if (cm == "") {
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
}
cm += "\n\nFlake lock file updates:\n\n";
cm += filterANSIEscapes(diff, true);
commitMessage = cm;
}
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
lockFlags.commitLockFile
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
: std::nullopt);
commitMessage);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could

View file

@ -122,6 +122,28 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
if (isFlake) {
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
// Save device to detect filesystem boundary
dev_t device = lstat(path).st_dev;
bool found = false;
while (path != "/") {
if (pathExists(path + "/flake.nix")) {
found = true;
break;
} else if (pathExists(path + "/.git"))
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
else {
if (lstat(path).st_dev != device)
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
}
path = dirOf(path);
}
if (!found)
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);

View file

@ -64,29 +64,32 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
}
// FIXME: optimize
static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length)
// we make use of the fact that the parser receives a private copy of the input
// string and can munge around in it.
static Expr * unescapeStr(SymbolTable & symbols, char * s, size_t length)
{
string t;
t.reserve(length);
char * result = s;
char * t = s;
char c;
// the input string is terminated with *two* NULs, so we can safely take
// *one* character after the one being checked against.
while ((c = *s++)) {
if (c == '\\') {
assert(*s);
c = *s++;
if (c == 'n') t += '\n';
else if (c == 'r') t += '\r';
else if (c == 't') t += '\t';
else t += c;
if (c == 'n') *t = '\n';
else if (c == 'r') *t = '\r';
else if (c == 't') *t = '\t';
else *t = c;
}
else if (c == '\r') {
/* Normalise CR and CR/LF into LF. */
t += '\n';
*t = '\n';
if (*s == '\n') s++; /* cr/lf */
}
else t += c;
else *t = c;
t++;
}
return new ExprString(symbols.create(t));
return new ExprString(symbols.create({result, size_t(t - result)}));
}
@ -139,7 +142,7 @@ or { return OR_KW; }
\/\/ { return UPDATE; }
\+\+ { return CONCAT; }
{ID} { yylval->id = strdup(yytext); return ID; }
{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; }
{INT} { errno = 0;
try {
yylval->n = boost::lexical_cast<int64_t>(yytext);
@ -221,14 +224,14 @@ or { return OR_KW; }
<PATH_START>{PATH_SEG} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
yylval->path = {yytext, (size_t) yyleng};
return PATH;
}
<PATH_START>{HPATH_START} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
yylval->path = {yytext, (size_t) yyleng};
return HPATH;
}
@ -237,7 +240,7 @@ or { return OR_KW; }
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
yylval->path = {yytext, (size_t) yyleng};
return PATH;
}
{HPATH} {
@ -245,7 +248,7 @@ or { return OR_KW; }
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
yylval->path = {yytext, (size_t) yyleng};
return HPATH;
}
@ -280,8 +283,8 @@ or { return OR_KW; }
throw ParseError("path has a trailing slash");
}
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
{URI} { yylval->uri = strdup(yytext); return URI; }
{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; }
{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; }
[ \t\r\n]+ /* eat up whitespace */
\#[^\r\n]* /* single-line comments */

View file

@ -473,7 +473,7 @@ string ExprLambda::showNamePos() const
size_t SymbolTable::totalSize() const
{
size_t n = 0;
for (auto & i : symbols)
for (auto & i : store)
n += i.size();
return n;
}

View file

@ -273,9 +273,16 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
nix::Formal * formal;
nix::NixInt n;
nix::NixFloat nf;
const char * id; // !!! -> Symbol
char * path;
char * uri;
// using C a struct allows us to avoid having to define the special
// members that using string_view here would implicitly delete.
struct StringToken {
const char * p;
size_t l;
operator std::string_view() const { return {p, l}; }
};
StringToken id; // !!! -> Symbol
StringToken path;
StringToken uri;
std::vector<nix::AttrName> * attrNames;
std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts;
}
@ -397,7 +404,7 @@ expr_select
expr_simple
: ID {
if (strcmp($1, "__curPos") == 0)
if (strncmp($1.p, "__curPos", $1.l) == 0)
$$ = new ExprPos(CUR_POS);
else
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
@ -414,7 +421,7 @@ expr_simple
$$ = new ExprConcatStrings(CUR_POS, false, $2);
}
| SPATH {
string path($1 + 1, strlen($1) - 2);
string path($1.p + 1, $1.l - 2);
$$ = new ExprCall(CUR_POS,
new ExprVar(data->symbols.create("__findFile")),
{new ExprVar(data->symbols.create("__nixPath")),
@ -460,14 +467,14 @@ string_parts_interpolated
path_start
: PATH {
Path path(absPath($1, data->basePath));
Path path(absPath({$1.p, $1.l}, data->basePath));
/* add back in the trailing '/' to the first segment */
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
if ($1.p[$1.l-1] == '/' && $1.l > 1)
path += "/";
$$ = new ExprPath(path);
}
| HPATH {
Path path(getHome() + string($1 + 1));
Path path(getHome() + string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
}
;
@ -543,7 +550,7 @@ attrpath
attr
: ID { $$ = $1; }
| OR_KW { $$ = "or"; }
| OR_KW { $$ = {"or", 2}; }
;
string_attr
@ -589,7 +596,7 @@ formal
namespace nix {
Expr * EvalState::parse(const char * text, FileOrigin origin,
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
const Path & path, const Path & basePath, StaticEnv & staticEnv)
{
yyscan_t scanner;
@ -609,7 +616,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
data.basePath = basePath;
yylex_init(&scanner);
yy_scan_string(text, scanner);
yy_scan_buffer(text, length, scanner);
int res = yyparse(scanner, &data);
yylex_destroy(scanner);
@ -655,26 +662,33 @@ Expr * EvalState::parseExprFromFile(const Path & path)
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
{
return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
auto buffer = readFile(path);
// readFile should have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
}
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv)
{
return parse(s.data(), foString, "", basePath, staticEnv);
s.append("\0\0", 2);
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
}
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
{
return parseExprFromString(s, basePath, staticBaseEnv);
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
}
Expr * EvalState::parseStdin()
{
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
}

View file

@ -12,6 +12,8 @@
#include "value-to-xml.hh"
#include "primops.hh"
#include <boost/container/small_vector.hpp>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
@ -378,7 +380,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
auto output = runProgram(program, true, commandArgs);
Expr * parsed;
try {
parsed = state.parseExprFromString(output, pos.file);
parsed = state.parseExprFromString(std::move(output), pos.file);
} catch (Error & e) {
e.addTrace(pos, "While parsing the output from '%1%'", program);
throw;
@ -592,16 +594,16 @@ typedef list<Value *> ValueList;
static Bindings::iterator getAttr(
EvalState & state,
string funcName,
string attrName,
std::string_view funcName,
Symbol attrSym,
Bindings * attrSet,
const Pos & pos)
{
Bindings::iterator value = attrSet->find(state.symbols.create(attrName));
Bindings::iterator value = attrSet->find(attrSym);
if (value == attrSet->end()) {
hintformat errorMsg = hintfmt(
"attribute '%s' missing for call to '%s'",
attrName,
attrSym,
funcName
);
@ -635,7 +637,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator startSet = getAttr(
state,
"genericClosure",
"startSet",
state.sStartSet,
args[0]->attrs,
pos
);
@ -650,7 +652,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
Bindings::iterator op = getAttr(
state,
"genericClosure",
"operator",
state.sOperator,
args[0]->attrs,
pos
);
@ -672,7 +674,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
state.forceAttrs(*e, pos);
Bindings::iterator key =
e->attrs->find(state.symbols.create("key"));
e->attrs->find(state.sKey);
if (key == e->attrs->end())
throw EvalError({
.msg = hintfmt("attribute 'key' required"),
@ -1079,10 +1081,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
} else {
auto s = state.coerceToString(*i->pos, *i->value, context, true);
drv.env.emplace(key, s);
if (i->name == state.sBuilder) drv.builder = s;
else if (i->name == state.sSystem) drv.platform = s;
else if (i->name == state.sOutputHash) outputHash = s;
else if (i->name == state.sOutputHashAlgo) outputHashAlgo = s;
if (i->name == state.sBuilder) drv.builder = std::move(s);
else if (i->name == state.sSystem) drv.platform = std::move(s);
else if (i->name == state.sOutputHash) outputHash = std::move(s);
else if (i->name == state.sOutputHashAlgo) outputHashAlgo = std::move(s);
else if (i->name == state.sOutputHashMode) handleHashMode(s);
else if (i->name == state.sOutputs)
handleOutputs(tokenizeString<Strings>(s));
@ -1498,14 +1500,14 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
state.forceAttrs(*v2, pos);
string prefix;
Bindings::iterator i = v2->attrs->find(state.symbols.create("prefix"));
Bindings::iterator i = v2->attrs->find(state.sPrefix);
if (i != v2->attrs->end())
prefix = state.forceStringNoCtx(*i->value, pos);
i = getAttr(
state,
"findFile",
"path",
state.sPath,
v2->attrs,
pos
);
@ -2163,7 +2165,10 @@ static void prim_attrValues(EvalState & state, const Pos & pos, Value * * args,
v.listElems()[n++] = (Value *) &i;
std::sort(v.listElems(), v.listElems() + n,
[](Value * v1, Value * v2) { return (string) ((Attr *) v1)->name < (string) ((Attr *) v2)->name; });
[](Value * v1, Value * v2) {
std::string_view s1 = ((Attr *) v1)->name, s2 = ((Attr *) v2)->name;
return s1 < s2;
});
for (unsigned int i = 0; i < n; ++i)
v.listElems()[i] = ((Attr *) v.listElems()[i])->value;
@ -2184,11 +2189,10 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
string attr = state.forceStringNoCtx(*args[0], pos);
state.forceAttrs(*args[1], pos);
// !!! Should we create a symbol here or just do a lookup?
Bindings::iterator i = getAttr(
state,
"getAttr",
attr,
state.symbols.create(attr),
args[1]->attrs,
pos
);
@ -2268,21 +2272,25 @@ static void prim_removeAttrs(EvalState & state, const Pos & pos, Value * * args,
state.forceAttrs(*args[0], pos);
state.forceList(*args[1], pos);
/* Get the attribute names to be removed. */
std::set<Symbol> names;
/* Get the attribute names to be removed.
We keep them as Attrs instead of Symbols so std::set_difference
can be used to remove them from attrs[0]. */
boost::container::small_vector<Attr, 64> names;
names.reserve(args[1]->listSize());
for (auto elem : args[1]->listItems()) {
state.forceStringNoCtx(*elem, pos);
names.insert(state.symbols.create(elem->string.s));
names.emplace_back(state.symbols.create(elem->string.s), nullptr);
}
std::sort(names.begin(), names.end());
/* Copy all attributes not in that set. Note that we don't need
to sort v.attrs because it's a subset of an already sorted
vector. */
auto attrs = state.buildBindings(args[0]->attrs->size());
for (auto & i : *args[0]->attrs) {
if (!names.count(i.name))
attrs.insert(i);
}
std::set_difference(
args[0]->attrs->begin(), args[0]->attrs->end(),
names.begin(), names.end(),
std::back_inserter(attrs));
v.mkAttrs(attrs.alreadySorted());
}
@ -3520,18 +3528,20 @@ static RegisterPrimOp primop_match({
/* Split a string with a regular expression, and return a list of the
non-matching parts interleaved by the lists of the matching groups. */
static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v)
void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto re = state.forceStringNoCtx(*args[0], pos);
try {
std::regex regex(re, std::regex::extended);
auto regex = state.regexCache->cache.find(re);
if (regex == state.regexCache->cache.end())
regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first;
PathSet context;
const std::string str = state.forceString(*args[1], context, pos);
auto begin = std::sregex_iterator(str.begin(), str.end(), regex);
auto begin = std::sregex_iterator(str.begin(), str.end(), regex->second);
auto end = std::sregex_iterator();
// Any matches results are surrounded by non-matching results.
@ -3915,9 +3925,12 @@ void EvalState::createBaseEnv()
/* Note: we have to initialize the 'derivation' constant *after*
building baseEnv/staticBaseEnv because it uses 'builtins'. */
eval(parse(
char code[] =
#include "primops/derivation.nix.gen.hh"
, foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation);
// the parser needs two NUL bytes as terminators; one of them
// is implied by being a C string.
"\0";
eval(parse(code, sizeof(code), foFile, sDerivationNix, "/", staticBaseEnv), *vDerivation);
}

View file

@ -1,7 +1,8 @@
#pragma once
#include <list>
#include <map>
#include <unordered_set>
#include <unordered_map>
#include "types.hh"
@ -70,15 +71,21 @@ public:
class SymbolTable
{
private:
typedef std::unordered_set<string> Symbols;
Symbols symbols;
std::unordered_map<std::string_view, Symbol> symbols;
std::list<string> store;
public:
Symbol create(std::string_view s)
{
// FIXME: avoid allocation if 's' already exists in the symbol table.
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
return Symbol(&*res.first);
// Most symbols are looked up more than once, so we trade off insertion performance
// for lookup performance.
// TODO: could probably be done more efficiently with transparent Hash and Equals
// on the original implementation using unordered_set
auto it = symbols.find(s);
if (it != symbols.end()) return it->second;
const string & rawSym = store.emplace_back(s);
return symbols.emplace(rawSym, Symbol(&rawSym)).first->second;
}
size_t size() const
@ -91,7 +98,7 @@ public:
template<typename T>
void dump(T callback)
{
for (auto & s : symbols)
for (auto & s : store)
callback(s);
}
};

View file

@ -241,6 +241,8 @@ public:
void mkString(std::string_view s, const PathSet & context);
void mkStringMove(const char * s, const PathSet & context);
inline void mkString(const Symbol & s)
{
mkString(((const std::string &) s).c_str());

View file

@ -67,18 +67,18 @@ DownloadFileResult downloadFile(
storePath = std::move(cached->storePath);
} else {
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data);
dumpString(res.data, sink);
auto hash = hashString(htSHA256, res.data);
ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
hashString(htSHA256, *sink.s),
hashString(htSHA256, sink.s),
};
info.narSize = sink.s->size();
info.narSize = sink.s.size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Flat,
.hash = hash,
};
auto source = StringSource { *sink.s };
auto source = StringSource(sink.s);
store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path);
}

View file

@ -31,7 +31,7 @@ BinaryCacheStore::BinaryCacheStore(const Params & params)
StringSink sink;
sink << narVersionMagic1;
narMagic = *sink.s;
narMagic = sink.s;
}
void BinaryCacheStore::init()
@ -68,7 +68,7 @@ void BinaryCacheStore::upsertFile(const std::string & path,
}
void BinaryCacheStore::getFile(const std::string & path,
Callback<std::shared_ptr<std::string>> callback) noexcept
Callback<std::optional<std::string>> callback) noexcept
{
try {
callback(getFile(path));
@ -77,9 +77,9 @@ void BinaryCacheStore::getFile(const std::string & path,
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
{
std::promise<std::shared_ptr<std::string>> promise;
std::promise<std::optional<std::string>> promise;
getFile(path,
{[&](std::future<std::shared_ptr<std::string>> result) {
{[&](std::future<std::optional<std::string>> result) {
try {
promise.set_value(result.get());
} catch (...) {
@ -89,15 +89,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
sink(*promise.get_future().get());
}
std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
{
StringSink sink;
try {
getFile(path, sink);
} catch (NoSuchBinaryCacheFile &) {
return nullptr;
return std::nullopt;
}
return sink.s;
return std::move(sink.s);
}
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
@ -367,11 +367,11 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
getFile(narInfoFile,
{[=](std::future<std::shared_ptr<std::string>> fut) {
{[=](std::future<std::optional<std::string>> fut) {
try {
auto data = fut.get();
if (!data) return (*callbackPtr)(nullptr);
if (!data) return (*callbackPtr)({});
stats.narInfoRead++;
@ -429,7 +429,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
StringSink sink;
dumpString(s, sink);
auto source = StringSource { *sink.s };
StringSource source(sink.s);
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { path, nar.first };
info.narSize = nar.second;
@ -446,11 +446,11 @@ void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
Callback<std::shared_ptr<std::string>> newCallback = {
[=](std::future<std::shared_ptr<std::string>> fut) {
Callback<std::optional<std::string>> newCallback = {
[=](std::future<std::optional<std::string>> fut) {
try {
auto data = fut.get();
if (!data) return (*callbackPtr)(nullptr);
if (!data) return (*callbackPtr)({});
auto realisation = Realisation::fromJSON(
nlohmann::json::parse(*data), outputInfoFilePath);
@ -490,7 +490,7 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
writeNarInfo(narInfo);
}
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
{
auto drvPath = path;
@ -498,10 +498,10 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
try {
auto info = queryPathInfo(path);
// FIXME: add a "Log" field to .narinfo
if (!info->deriver) return nullptr;
if (!info->deriver) return std::nullopt;
drvPath = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
return std::nullopt;
}
}
@ -512,4 +512,14 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
return getFile(logPath);
}
void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{
assert(drvPath.isDerivation());
upsertFile(
"log/" + std::string(drvPath.to_string()),
(std::string) log, // FIXME: don't copy
"text/plain; charset=utf-8");
}
}

View file

@ -51,6 +51,7 @@ public:
const std::string & mimeType) = 0;
void upsertFile(const std::string & path,
// FIXME: use std::string_view
std::string && data,
const std::string & mimeType);
@ -62,10 +63,11 @@ public:
/* Fetch the specified file and call the specified callback with
the result. A subclass may implement this asynchronously. */
virtual void getFile(const std::string & path,
Callback<std::shared_ptr<std::string>> callback) noexcept;
virtual void getFile(
const std::string & path,
Callback<std::optional<std::string>> callback) noexcept;
std::shared_ptr<std::string> getFile(const std::string & path);
std::optional<std::string> getFile(const std::string & path);
public:
@ -117,7 +119,9 @@ public:
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
std::optional<std::string> getBuildLog(const StorePath & path) override;
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
};

View file

@ -278,7 +278,7 @@ void DerivationGoal::outputsSubstitutionTried()
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
done(BuildResult::TransientFailure,
fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
worker.store.printStorePath(drvPath)));
return;
}

View file

@ -2226,8 +2226,8 @@ void LocalDerivationGoal::registerOutputs()
StringSink sink;
dumpPath(actualPath, sink);
deletePath(actualPath);
sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
StringSource source(*sink.s);
sink.s = rewriteStrings(sink.s, outputRewrites);
StringSource source(sink.s);
restorePath(actualPath, source);
}
};
@ -2295,7 +2295,7 @@ void LocalDerivationGoal::registerOutputs()
StringSink sink;
dumpPath(actualPath, sink);
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
rsink2(*sink.s);
rsink2(sink.s);
rsink2.flush();
});
Path tmpPath = actualPath + ".tmp";

View file

@ -69,7 +69,7 @@ struct TunnelLogger : public Logger
StringSink buf;
buf << STDERR_NEXT << (fs.s + "\n");
enqueueMsg(*buf.s);
enqueueMsg(buf.s);
}
void logEI(const ErrorInfo & ei) override
@ -81,7 +81,7 @@ struct TunnelLogger : public Logger
StringSink buf;
buf << STDERR_NEXT << oss.str();
enqueueMsg(*buf.s);
enqueueMsg(buf.s);
}
/* startWork() means that we're starting an operation for which we
@ -129,7 +129,7 @@ struct TunnelLogger : public Logger
StringSink buf;
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
enqueueMsg(*buf.s);
enqueueMsg(buf.s);
}
void stopActivity(ActivityId act) override
@ -137,7 +137,7 @@ struct TunnelLogger : public Logger
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
StringSink buf;
buf << STDERR_STOP_ACTIVITY << act;
enqueueMsg(*buf.s);
enqueueMsg(buf.s);
}
void result(ActivityId act, ResultType type, const Fields & fields) override
@ -145,7 +145,7 @@ struct TunnelLogger : public Logger
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
StringSink buf;
buf << STDERR_RESULT << act << type << fields;
enqueueMsg(*buf.s);
enqueueMsg(buf.s);
}
};
@ -468,10 +468,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
dontCheckSigs = false;
logger->startWork();
FramedSource source(from);
store->addMultipleToStore(source,
RepairFlag{repair},
dontCheckSigs ? NoCheckSigs : CheckSigs);
{
FramedSource source(from);
store->addMultipleToStore(source,
RepairFlag{repair},
dontCheckSigs ? NoCheckSigs : CheckSigs);
}
logger->stopWork();
break;
}
@ -852,14 +854,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
else {
std::unique_ptr<Source> source;
StringSink saved;
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
source = std::make_unique<TunnelSource>(from, to);
else {
StringSink saved;
TeeSource tee { from, saved };
ParseSink ether;
parseDump(ether, tee);
source = std::make_unique<StringSource>(std::move(*saved.s));
source = std::make_unique<StringSource>(saved.s);
}
logger->startWork();
@ -920,6 +922,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
case wopAddBuildLog: {
StorePath path{readString(from)};
logger->startWork();
if (!trusted)
throw Error("you are not privileged to add logs");
{
FramedSource source(from);
StringSink sink;
source.drainInto(sink);
store->addBuildLog(path, sink.s);
}
logger->stopWork();
to << 1;
break;
}
default:
throw Error("invalid operation %1%", op);
}

View file

@ -75,20 +75,20 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s);
auto narHash = hashString(htSHA256, saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = parseStorePath(deriver);
info.references = references;
info.narSize = saved.s->size();
info.narSize = saved.s.size();
// Ignore optional legacy signature.
if (readInt(source) == 1)
readString(source);
// Can't use underlying source, which would have been exhausted
auto source = StringSource { *saved.s };
auto source = StringSource(saved.s);
addToStore(info, source, NoRepair, checkSigs);
res.push_back(info.path);

View file

@ -106,7 +106,7 @@ struct curlFileTransfer : public FileTransfer
this->request.dataCallback(data);
}
} else
this->result.data->append(data);
this->result.data.append(data);
})
{
if (!request.expectedETag.empty())
@ -195,7 +195,7 @@ struct curlFileTransfer : public FileTransfer
std::smatch match;
if (std::regex_match(line, match, statusLine)) {
result.etag = "";
result.data = std::make_shared<std::string>();
result.data.clear();
result.bodySize = 0;
statusMsg = trim(match[1]);
acceptRanges = false;
@ -340,7 +340,7 @@ struct curlFileTransfer : public FileTransfer
if (writtenToSink)
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
result.data = std::make_shared<std::string>();
result.data.clear();
result.bodySize = 0;
}
@ -434,21 +434,21 @@ struct curlFileTransfer : public FileTransfer
attempt++;
std::shared_ptr<std::string> response;
std::optional<std::string> response;
if (errorSink)
response = errorSink->s;
response = std::move(errorSink->s);
auto exc =
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
: httpStatus != 0
? FileTransferError(err,
response,
std::move(response),
fmt("unable to %s '%s': HTTP error %d ('%s')",
request.verb(), request.uri, httpStatus, statusMsg)
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
)
: FileTransferError(err,
response,
std::move(response),
fmt("unable to %s '%s': %s (%d)",
request.verb(), request.uri, curl_easy_strerror(code), code));
@ -705,7 +705,7 @@ struct curlFileTransfer : public FileTransfer
FileTransferResult res;
if (!s3Res.data)
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
res.data = s3Res.data;
res.data = std::move(*s3Res.data);
callback(std::move(res));
#else
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
@ -859,7 +859,7 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
}
template<typename... Args>
FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args)
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
: Error(args...), error(error), response(response)
{
const auto hf = hintfmt(args...);

View file

@ -59,7 +59,7 @@ struct FileTransferRequest
unsigned int baseRetryTimeMs = 250;
ActivityId parentAct;
bool decompress = true;
std::shared_ptr<std::string> data;
std::optional<std::string> data;
std::string mimeType;
std::function<void(std::string_view data)> dataCallback;
@ -77,7 +77,7 @@ struct FileTransferResult
bool cached = false;
std::string etag;
std::string effectiveUri;
std::shared_ptr<std::string> data;
std::string data;
uint64_t bodySize = 0;
};
@ -119,10 +119,10 @@ class FileTransferError : public Error
{
public:
FileTransfer::Error error;
std::shared_ptr<string> response; // intentionally optional
std::optional<string> response; // intentionally optional
template<typename... Args>
FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args);
FileTransferError(FileTransfer::Error error, std::optional<string> response, const Args & ... args);
virtual const char* sname() const override { return "FileTransferError"; }
};

View file

@ -966,6 +966,13 @@ public:
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
"Whether to accept nix configuration from a flake without prompting."};
Setting<std::string> commitLockFileSummary{
this, "", "commit-lockfile-summary",
R"(
The commit summary to use when commiting changed flake lock files. If
empty, the summary is generated based on the action performed.
)"};
};

View file

@ -126,7 +126,7 @@ protected:
const std::string & mimeType) override
{
auto req = makeRequest(path);
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
req.data = StreamToSourceAdapter(istream).drain();
req.mimeType = mimeType;
try {
getFileTransfer()->upload(req);
@ -159,7 +159,7 @@ protected:
}
void getFile(const std::string & path,
Callback<std::shared_ptr<std::string>> callback) noexcept override
Callback<std::optional<std::string>> callback) noexcept override
{
checkEnabled();
@ -170,10 +170,10 @@ protected:
getFileTransfer()->enqueueFileTransfer(request,
{[callbackPtr, this](std::future<FileTransferResult> result) {
try {
(*callbackPtr)(result.get().data);
(*callbackPtr)(std::move(result.get().data));
} catch (FileTransferError & e) {
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
return (*callbackPtr)(std::shared_ptr<std::string>());
return (*callbackPtr)({});
maybeDisable();
callbackPtr->rethrow();
} catch (...) {

View file

@ -94,7 +94,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->sshConn->in.close();
auto msg = conn->from.drain();
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
host, chomp(*saved.s + msg));
host, chomp(saved.s + msg));
}
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)

View file

@ -96,6 +96,7 @@ void LocalBinaryCacheStore::init()
createDirs(binaryCacheDir + "/" + realisationsPrefix);
if (writeDebugInfo)
createDirs(binaryCacheDir + "/debuginfo");
createDirs(binaryCacheDir + "/log");
BinaryCacheStore::init();
}

View file

@ -87,34 +87,32 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
const string LocalFSStore::drvsLogDir = "drvs";
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
{
auto path = path_;
if (!path.isDerivation()) {
try {
auto info = queryPathInfo(path);
if (!info->deriver) return nullptr;
if (!info->deriver) return std::nullopt;
path = *info->deriver;
} catch (InvalidPath &) {
return nullptr;
return std::nullopt;
}
}
auto baseName = std::string(baseNameOf(printStorePath(path)));
auto baseName = path.to_string();
for (int j = 0; j < 2; j++) {
Path logPath =
j == 0
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2))
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2))
: fmt("%s/%s/%s", logDir, drvsLogDir, baseName);
Path logBz2Path = logPath + ".bz2";
if (pathExists(logPath))
return std::make_shared<std::string>(readFile(logPath));
return readFile(logPath);
else if (pathExists(logBz2Path)) {
try {
@ -124,7 +122,7 @@ std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
}
return nullptr;
return std::nullopt;
}
}

View file

@ -45,7 +45,8 @@ public:
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
}
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
std::optional<std::string> getBuildLog(const StorePath & path) override;
};
}

View file

@ -9,6 +9,7 @@
#include "callback.hh"
#include "topo-sort.hh"
#include "finally.hh"
#include "compression.hh"
#include <iostream>
#include <algorithm>
@ -1461,12 +1462,12 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
StringSink sink;
dumpString(s, sink);
auto narHash = hashString(htSHA256, *sink.s);
auto narHash = hashString(htSHA256, sink.s);
optimisePath(realPath, repair);
ValidPathInfo info { dstPath, narHash };
info.narSize = sink.s->size();
info.narSize = sink.s.size();
info.references = references;
info.ca = TextHash { .hash = hash };
registerValidPath(info);
@ -1898,4 +1899,24 @@ FixedOutputHash LocalStore::hashCAPath(
};
}
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{
assert(drvPath.isDerivation());
auto baseName = drvPath.to_string();
auto logPath = fmt("%s/%s/%s/%s.bz2", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2));
if (pathExists(logPath)) return;
createDirs(dirOf(logPath));
auto tmpFile = fmt("%s.tmp.%d", logPath, getpid());
writeFile(tmpFile, compress("bzip2", log));
if (rename(tmpFile.c_str(), logPath.c_str()) != 0)
throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath);
}
} // namespace nix

View file

@ -280,6 +280,8 @@ private:
const std::string_view pathHash
);
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
friend struct LocalDerivationGoal;
friend struct PathSubstitutionGoal;
friend struct SubstitutionGoal;

View file

@ -28,7 +28,7 @@ struct NarMember
struct NarAccessor : public FSAccessor
{
std::shared_ptr<const std::string> nar;
std::optional<const std::string> nar;
GetNarBytes getNarBytes;
@ -104,7 +104,7 @@ struct NarAccessor : public FSAccessor
}
};
NarAccessor(ref<const std::string> nar) : nar(nar)
NarAccessor(std::string && _nar) : nar(_nar)
{
StringSource source(*nar);
NarIndexer indexer(*this, source);
@ -224,9 +224,9 @@ struct NarAccessor : public FSAccessor
}
};
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar)
ref<FSAccessor> makeNarAccessor(std::string && nar)
{
return make_ref<NarAccessor>(nar);
return make_ref<NarAccessor>(std::move(nar));
}
ref<FSAccessor> makeNarAccessor(Source & source)

View file

@ -10,7 +10,7 @@ struct Source;
/* Return an object that provides access to the contents of a NAR
file. */
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar);
ref<FSAccessor> makeNarAccessor(std::string && nar);
ref<FSAccessor> makeNarAccessor(Source & source);

View file

@ -78,7 +78,7 @@ Realisation Realisation::fromJSON(
auto fieldIterator = json.find(fieldName);
if (fieldIterator == json.end())
return std::nullopt;
return *fieldIterator;
return {*fieldIterator};
};
auto getField = [&](std::string fieldName) -> std::string {
if (auto field = getOptionalField(fieldName))

View file

@ -22,9 +22,18 @@ Path RemoteFSAccessor::makeCacheFile(std::string_view hashPart, const std::strin
return fmt("%s/%s.%s", cacheDir, hashPart, ext);
}
void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string & nar,
ref<FSAccessor> narAccessor)
ref<FSAccessor> RemoteFSAccessor::addToCache(std::string_view hashPart, std::string && nar)
{
if (cacheDir != "") {
try {
/* FIXME: do this asynchronously. */
writeFile(makeCacheFile(hashPart, "nar"), nar);
} catch (...) {
ignoreException();
}
}
auto narAccessor = makeNarAccessor(std::move(nar));
nars.emplace(hashPart, narAccessor);
if (cacheDir != "") {
@ -33,14 +42,12 @@ void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string &
JSONPlaceholder jsonRoot(str);
listNar(jsonRoot, narAccessor, "", true);
writeFile(makeCacheFile(hashPart, "ls"), str.str());
/* FIXME: do this asynchronously. */
writeFile(makeCacheFile(hashPart, "nar"), nar);
} catch (...) {
ignoreException();
}
}
return narAccessor;
}
std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, bool requireValidPath)
@ -55,7 +62,6 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, boo
auto i = nars.find(std::string(storePath.hashPart()));
if (i != nars.end()) return {i->second, restPath};
StringSink sink;
std::string listing;
Path cacheFile;
@ -86,19 +92,15 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, boo
} catch (SysError &) { }
try {
*sink.s = nix::readFile(cacheFile);
auto narAccessor = makeNarAccessor(sink.s);
auto narAccessor = makeNarAccessor(nix::readFile(cacheFile));
nars.emplace(storePath.hashPart(), narAccessor);
return {narAccessor, restPath};
} catch (SysError &) { }
}
StringSink sink;
store->narFromPath(storePath, sink);
auto narAccessor = makeNarAccessor(sink.s);
addToCache(storePath.hashPart(), *sink.s, narAccessor);
return {narAccessor, restPath};
return {addToCache(storePath.hashPart(), std::move(sink.s)), restPath};
}
FSAccessor::Stat RemoteFSAccessor::stat(const Path & path)

View file

@ -20,8 +20,7 @@ class RemoteFSAccessor : public FSAccessor
Path makeCacheFile(std::string_view hashPart, const std::string & ext);
void addToCache(std::string_view hashPart, const std::string & nar,
ref<FSAccessor> narAccessor);
ref<FSAccessor> addToCache(std::string_view hashPart, std::string && nar);
public:

View file

@ -172,7 +172,7 @@ void RemoteStore::initConnection(Connection & conn)
it. */
conn.closeWrite();
auto msg = conn.from.drain();
throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg));
throw Error("protocol mismatch, got '%s'", chomp(saved.s + msg));
}
conn.from >> conn.daemonVersion;
@ -908,6 +908,18 @@ void RemoteStore::queryMissing(const std::vector<DerivedPath> & targets,
}
void RemoteStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{
auto conn(getConnection());
conn->to << wopAddBuildLog << drvPath.to_string();
StringSource source(log);
conn.withFramedSink([&](Sink & sink) {
source.drainInto(sink);
});
readInt(conn->from);
}
void RemoteStore::connect()
{
auto conn(getConnection());

View file

@ -116,6 +116,8 @@ public:
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
uint64_t & downloadSize, uint64_t & narSize) override;
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
void connect() override;
unsigned int getProtocol() override;

View file

@ -385,7 +385,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
auto compress = [&](std::string compression)
{
auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain());
return std::make_shared<std::stringstream>(std::move(*compressed));
return std::make_shared<std::stringstream>(std::move(compressed));
};
if (narinfoCompression != "" && hasSuffix(path, ".narinfo"))

View file

@ -4,6 +4,8 @@
#include "ref.hh"
#include <optional>
namespace Aws { namespace Client { class ClientConfiguration; } }
namespace Aws { namespace S3 { class S3Client; } }
@ -20,7 +22,7 @@ struct S3Helper
struct FileTransferResult
{
std::shared_ptr<std::string> data;
std::optional<std::string> data;
unsigned int durationMs;
};

View file

@ -1109,6 +1109,21 @@ void copyClosure(
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
}
void copyClosure(
Store & srcStore,
Store & dstStore,
const StorePathSet & storePaths,
RepairFlag repair,
CheckSigsFlag checkSigs,
SubstituteFlag substitute)
{
if (&srcStore == &dstStore) return;
StorePathSet closure;
srcStore.computeFSClosure(storePaths, closure);
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
}
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
{
std::string path;

View file

@ -724,8 +724,11 @@ public:
/* Return the build log of the specified store path, if available,
or null otherwise. */
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
{ return nullptr; }
virtual std::optional<std::string> getBuildLog(const StorePath & path)
{ return std::nullopt; }
virtual void addBuildLog(const StorePath & path, std::string_view log)
{ unsupported("addBuildLog"); }
/* Hack to allow long-running processes like hydra-queue-runner to
occasionally flush their path info cache. */
@ -812,6 +815,13 @@ void copyClosure(
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
void copyClosure(
Store & srcStore, Store & dstStore,
const StorePathSet & paths,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
/* Remove the temporary roots file for this process. Any temporary
root becomes garbage after this point unless it has been registered
as a (permanent) root. */

View file

@ -56,6 +56,7 @@ typedef enum {
wopRegisterDrvOutput = 42,
wopQueryRealisation = 43,
wopAddMultipleToStore = 44,
wopAddBuildLog = 45,
} WorkerOp;

View file

@ -190,13 +190,13 @@ struct BrotliDecompressionSink : ChunkedCompressionSink
}
};
ref<std::string> decompress(const std::string & method, const std::string & in)
std::string decompress(const std::string & method, std::string_view in)
{
StringSink ssink;
auto sink = makeDecompressionSink(method, ssink);
(*sink)(in);
sink->finish();
return ssink.s;
return std::move(ssink.s);
}
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink)
@ -281,13 +281,13 @@ ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & next
throw UnknownCompressionMethod("unknown compression method '%s'", method);
}
ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel, int level)
std::string compress(const std::string & method, std::string_view in, const bool parallel, int level)
{
StringSink ssink;
auto sink = makeCompressionSink(method, ssink, parallel, level);
(*sink)(in);
sink->finish();
return ssink.s;
return std::move(ssink.s);
}
}

View file

@ -15,11 +15,11 @@ struct CompressionSink : BufferedSink, FinishSink
using FinishSink::finish;
};
ref<std::string> decompress(const std::string & method, const std::string & in);
std::string decompress(const std::string & method, std::string_view in);
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink);
ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel = false, int level = -1);
std::string compress(const std::string & method, std::string_view in, const bool parallel = false, int level = -1);
ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false, int level = -1);

View file

@ -137,7 +137,7 @@ public:
{ }
template<typename... Args>
BaseError(const std::string & fs, const Args & ... args)
explicit BaseError(const std::string & fs, const Args & ... args)
: err { .level = lvlError, .msg = hintfmt(fs, args...) }
{ }

View file

@ -7,16 +7,38 @@ namespace nix {
void toJSON(std::ostream & str, const char * start, const char * end)
{
str << '"';
for (auto i = start; i != end; i++)
if (*i == '\"' || *i == '\\') str << '\\' << *i;
else if (*i == '\n') str << "\\n";
else if (*i == '\r') str << "\\r";
else if (*i == '\t') str << "\\t";
else if (*i >= 0 && *i < 32)
str << "\\u" << std::setfill('0') << std::setw(4) << std::hex << (uint16_t) *i << std::dec;
else str << *i;
str << '"';
constexpr size_t BUF_SIZE = 4096;
char buf[BUF_SIZE + 7]; // BUF_SIZE + largest single sequence of puts
size_t bufPos = 0;
const auto flush = [&] {
str.write(buf, bufPos);
bufPos = 0;
};
const auto put = [&] (char c) {
buf[bufPos++] = c;
};
put('"');
for (auto i = start; i != end; i++) {
if (bufPos >= BUF_SIZE) flush();
if (*i == '\"' || *i == '\\') { put('\\'); put(*i); }
else if (*i == '\n') { put('\\'); put('n'); }
else if (*i == '\r') { put('\\'); put('r'); }
else if (*i == '\t') { put('\\'); put('t'); }
else if (*i >= 0 && *i < 32) {
const char hex[17] = "0123456789abcdef";
put('\\');
put('u');
put(hex[(uint16_t(*i) >> 12) & 0xf]);
put(hex[(uint16_t(*i) >> 8) & 0xf]);
put(hex[(uint16_t(*i) >> 4) & 0xf]);
put(hex[(uint16_t(*i) >> 0) & 0xf]);
}
else put(*i);
}
put('"');
flush();
}
void toJSON(std::ostream & str, const char * s)

View file

@ -110,7 +110,7 @@ std::string Source::drain()
{
StringSink s;
drainInto(s);
return *s.s;
return std::move(s.s);
}
@ -325,7 +325,7 @@ void writeString(std::string_view data, Sink & sink)
}
Sink & operator << (Sink & sink, const string & s)
Sink & operator << (Sink & sink, std::string_view s)
{
writeString(s, sink);
return sink;
@ -450,11 +450,11 @@ Error readError(Source & source)
void StringSink::operator () (std::string_view data)
{
static bool warned = false;
if (!warned && s->size() > threshold) {
if (!warned && s.size() > threshold) {
warnLargeDump();
warned = true;
}
s->append(data);
s.append(data);
}
size_t ChainSource::read(char * data, size_t len)

View file

@ -154,12 +154,13 @@ private:
/* A sink that writes data to a string. */
struct StringSink : Sink
{
ref<std::string> s;
StringSink() : s(make_ref<std::string>()) { };
explicit StringSink(const size_t reservedSize) : s(make_ref<std::string>()) {
s->reserve(reservedSize);
std::string s;
StringSink() { }
explicit StringSink(const size_t reservedSize)
{
s.reserve(reservedSize);
};
StringSink(ref<std::string> s) : s(s) { };
StringSink(std::string && s) : s(std::move(s)) { };
void operator () (std::string_view data) override;
};
@ -167,9 +168,9 @@ struct StringSink : Sink
/* A source that reads data from a string. */
struct StringSource : Source
{
const string & s;
std::string_view s;
size_t pos;
StringSource(const string & _s) : s(_s), pos(0) { }
StringSource(std::string_view s) : s(s), pos(0) { }
size_t read(char * data, size_t len) override;
};
@ -317,10 +318,10 @@ inline Sink & operator << (Sink & sink, uint64_t n)
return sink;
}
Sink & operator << (Sink & sink, const string & s);
Sink & operator << (Sink & in, const Error & ex);
Sink & operator << (Sink & sink, std::string_view s);
Sink & operator << (Sink & sink, const Strings & s);
Sink & operator << (Sink & sink, const StringSet & s);
Sink & operator << (Sink & in, const Error & ex);
MakeError(SerialisationError, Error);

View file

@ -12,17 +12,17 @@ namespace nix {
}
TEST(compress, noneMethodDoesNothingToTheInput) {
ref<std::string> o = compress("none", "this-is-a-test");
auto o = compress("none", "this-is-a-test");
ASSERT_EQ(*o, "this-is-a-test");
ASSERT_EQ(o, "this-is-a-test");
}
TEST(decompress, decompressNoneCompressed) {
auto method = "none";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, str);
auto o = decompress(method, str);
ASSERT_EQ(*o, str);
ASSERT_EQ(o, str);
}
TEST(decompress, decompressEmptyCompressed) {
@ -30,33 +30,33 @@ namespace nix {
// (Content-Encoding == "").
auto method = "";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, str);
auto o = decompress(method, str);
ASSERT_EQ(*o, str);
ASSERT_EQ(o, str);
}
TEST(decompress, decompressXzCompressed) {
auto method = "xz";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
auto o = decompress(method, compress(method, str));
ASSERT_EQ(*o, str);
ASSERT_EQ(o, str);
}
TEST(decompress, decompressBzip2Compressed) {
auto method = "bzip2";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
auto o = decompress(method, compress(method, str));
ASSERT_EQ(*o, str);
ASSERT_EQ(o, str);
}
TEST(decompress, decompressBrCompressed) {
auto method = "br";
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
ref<std::string> o = decompress(method, *compress(method, str));
auto o = decompress(method, compress(method, str));
ASSERT_EQ(*o, str);
ASSERT_EQ(o, str);
}
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
@ -77,7 +77,7 @@ namespace nix {
(*sink)(inputString);
sink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
ASSERT_STREQ(strSink.s.c_str(), inputString);
}
TEST(makeCompressionSink, compressAndDecompress) {
@ -90,7 +90,7 @@ namespace nix {
sink->finish();
decompressionSink->finish();
ASSERT_STREQ((*strSink.s).c_str(), inputString);
ASSERT_STREQ(strSink.s.c_str(), inputString);
}
}

View file

@ -22,6 +22,7 @@ typedef std::map<string, string> StringMap;
/* Paths are just strings. */
typedef string Path;
typedef std::string_view PathView;
typedef list<Path> Paths;
typedef set<Path> PathSet;

View file

@ -106,16 +106,16 @@ Path absPath(Path path, std::optional<Path> dir, bool resolveSymlinks)
}
Path canonPath(const Path & path, bool resolveSymlinks)
Path canonPath(PathView path, bool resolveSymlinks)
{
assert(path != "");
string s;
s.reserve(256);
if (path[0] != '/')
throw Error("not an absolute path: '%1%'", path);
string::const_iterator i = path.begin(), end = path.end();
string temp;
/* Count the number of times we follow a symlink and stop at some
@ -125,33 +125,37 @@ Path canonPath(const Path & path, bool resolveSymlinks)
while (1) {
/* Skip slashes. */
while (i != end && *i == '/') i++;
if (i == end) break;
while (!path.empty() && path[0] == '/') path.remove_prefix(1);
if (path.empty()) break;
/* Ignore `.'. */
if (*i == '.' && (i + 1 == end || i[1] == '/'))
i++;
if (path == "." || path.substr(0, 2) == "./")
path.remove_prefix(1);
/* If `..', delete the last component. */
else if (*i == '.' && i + 1 < end && i[1] == '.' &&
(i + 2 == end || i[2] == '/'))
else if (path == ".." || path.substr(0, 3) == "../")
{
if (!s.empty()) s.erase(s.rfind('/'));
i += 2;
path.remove_prefix(2);
}
/* Normal component; copy it. */
else {
s += '/';
while (i != end && *i != '/') s += *i++;
if (const auto slash = path.find('/'); slash == string::npos) {
s += path;
path = {};
} else {
s += path.substr(0, slash);
path = path.substr(slash + 1);
}
/* If s points to a symlink, resolve it and continue from there */
if (resolveSymlinks && isLink(s)) {
if (++followCount >= maxFollow)
throw Error("infinite symlink recursion in path '%1%'", path);
temp = readLink(s) + string(i, end);
i = temp.begin();
end = temp.end();
temp = concatStrings(readLink(s), path);
path = temp;
if (!temp.empty() && temp[0] == '/') {
s.clear(); /* restart for symlinks pointing to absolute path */
} else {
@ -164,7 +168,7 @@ Path canonPath(const Path & path, bool resolveSymlinks)
}
}
return s.empty() ? "/" : s;
return s.empty() ? "/" : std::move(s);
}
@ -668,9 +672,11 @@ void writeFull(int fd, std::string_view s, bool allowInterrupts)
string drainFD(int fd, bool block, const size_t reserveSize)
{
StringSink sink(reserveSize);
// the parser needs two extra bytes to append terminating characters, other users will
// not care very much about the extra memory.
StringSink sink(reserveSize + 2);
drainFD(fd, sink, block);
return std::move(*sink.s);
return std::move(sink.s);
}
@ -1055,7 +1061,7 @@ std::pair<int, std::string> runProgram(RunOptions && options)
status = e.status;
}
return {status, std::move(*sink.s)};
return {status, std::move(sink.s)};
}
void runProgram2(const RunOptions & options)
@ -1229,23 +1235,22 @@ void _interrupted()
//////////////////////////////////////////////////////////////////////
template<class C> C tokenizeString(std::string_view s, const string & separators)
template<class C> C tokenizeString(std::string_view s, std::string_view separators)
{
C result;
string::size_type pos = s.find_first_not_of(separators, 0);
while (pos != string::npos) {
string::size_type end = s.find_first_of(separators, pos + 1);
if (end == string::npos) end = s.size();
string token(s, pos, end - pos);
result.insert(result.end(), token);
result.insert(result.end(), string(s, pos, end - pos));
pos = s.find_first_not_of(separators, end);
}
return result;
}
template Strings tokenizeString(std::string_view s, const string & separators);
template StringSet tokenizeString(std::string_view s, const string & separators);
template vector<string> tokenizeString(std::string_view s, const string & separators);
template Strings tokenizeString(std::string_view s, std::string_view separators);
template StringSet tokenizeString(std::string_view s, std::string_view separators);
template vector<string> tokenizeString(std::string_view s, std::string_view separators);
string chomp(std::string_view s)

View file

@ -11,6 +11,8 @@
#include <unistd.h>
#include <signal.h>
#include <boost/lexical_cast.hpp>
#include <atomic>
#include <functional>
#include <map>
@ -54,7 +56,7 @@ Path absPath(Path path,
double or trailing slashes. Optionally resolves all symlink
components such that each component of the resulting path is *not*
a symbolic link. */
Path canonPath(const Path & path, bool resolveSymlinks = false);
Path canonPath(PathView path, bool resolveSymlinks = false);
/* Return the directory part of the given canonical path, i.e.,
everything before the final `/'. If the path is the root or an
@ -366,15 +368,19 @@ MakeError(FormatError, Error);
/* String tokenizer. */
template<class C> C tokenizeString(std::string_view s, const string & separators = " \t\n\r");
template<class C> C tokenizeString(std::string_view s, std::string_view separators = " \t\n\r");
/* Concatenate the given strings with a separator between the
elements. */
template<class C>
string concatStringsSep(const string & sep, const C & ss)
string concatStringsSep(const std::string_view sep, const C & ss)
{
size_t size = 0;
// need a cast to string_view since this is also called with Symbols
for (const auto & s : ss) size += sep.size() + std::string_view(s).size();
string s;
s.reserve(size);
for (auto & i : ss) {
if (s.size() != 0) s += sep;
s += i;
@ -382,6 +388,14 @@ string concatStringsSep(const string & sep, const C & ss)
return s;
}
template<class ... Parts>
auto concatStrings(Parts && ... parts)
-> std::enable_if_t<(... && std::is_convertible_v<Parts, std::string_view>), string>
{
std::string_view views[sizeof...(parts)] = { parts... };
return concatStringsSep({}, views);
}
/* Add quotes around a collection of strings. */
template<class C> Strings quoteStrings(const C & c)
@ -419,21 +433,21 @@ bool statusOk(int status);
/* Parse a string into an integer. */
template<class N>
std::optional<N> string2Int(const std::string & s)
std::optional<N> string2Int(const std::string_view s)
{
if (s.substr(0, 1) == "-" && !std::numeric_limits<N>::is_signed)
return std::nullopt;
std::istringstream str(s);
N n;
str >> n;
if (str && str.get() == EOF) return n;
return std::nullopt;
try {
return boost::lexical_cast<N>(s.data(), s.size());
} catch (const boost::bad_lexical_cast &) {
return std::nullopt;
}
}
/* Like string2Int(), but support an optional suffix 'K', 'M', 'G' or
'T' denoting a binary unit prefix. */
template<class N>
N string2IntWithUnitPrefix(std::string s)
N string2IntWithUnitPrefix(std::string_view s)
{
N multiplier = 1;
if (!s.empty()) {
@ -444,7 +458,7 @@ N string2IntWithUnitPrefix(std::string s)
else if (u == 'G') multiplier = 1ULL << 30;
else if (u == 'T') multiplier = 1ULL << 40;
else throw UsageError("invalid unit specifier '%1%'", u);
s.resize(s.size() - 1);
s.remove_suffix(1);
}
}
if (auto n = string2Int<N>(s))
@ -454,13 +468,13 @@ N string2IntWithUnitPrefix(std::string s)
/* Parse a string into a float. */
template<class N>
std::optional<N> string2Float(const string & s)
std::optional<N> string2Float(const std::string_view s)
{
std::istringstream str(s);
N n;
str >> n;
if (str && str.get() == EOF) return n;
return std::nullopt;
try {
return boost::lexical_cast<N>(s.data(), s.size());
} catch (const boost::bad_lexical_cast &) {
return std::nullopt;
}
}

View file

@ -292,7 +292,7 @@ static void main_nix_build(int argc, char * * argv)
else
for (auto i : left) {
if (fromArgs)
exprs.push_back(state->parseExprFromString(i, absPath(".")));
exprs.push_back(state->parseExprFromString(std::move(i), absPath(".")));
else {
auto absolute = i;
try {

View file

@ -32,7 +32,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
StringSink sink;
dumpPath(path, sink);
auto narHash = hashString(htSHA256, *sink.s);
auto narHash = hashString(htSHA256, sink.s);
Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) {
@ -45,14 +45,14 @@ struct CmdAddToStore : MixDryRun, StoreCommand
store->makeFixedOutputPath(ingestionMethod, hash, *namePart),
narHash,
};
info.narSize = sink.s->size();
info.narSize = sink.s.size();
info.ca = std::optional { FixedOutputHash {
.method = ingestionMethod,
.hash = hash,
} };
if (!dryRun) {
auto source = StringSource { *sink.s };
auto source = StringSource(sink.s);
store->addToStore(info, source);
}

View file

@ -19,7 +19,7 @@ struct InstallableDerivedPath : Installable
}
std::string what() override { return derivedPath.to_string(*store); }
std::string what() const override { return derivedPath.to_string(*store); }
DerivedPaths toDerivedPaths() override
{

View file

@ -78,7 +78,7 @@ struct CmdCatNar : StoreCommand, MixCat
void run(ref<Store> store) override
{
cat(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
cat(makeNarAccessor(readFile(narPath)));
}
};

View file

@ -1,17 +1,11 @@
#include "command.hh"
#include "shared.hh"
#include "store-api.hh"
#include "sync.hh"
#include "thread-pool.hh"
#include <atomic>
using namespace nix;
struct CmdCopy : BuiltPathsCommand
struct CmdCopy : virtual CopyCommand, virtual BuiltPathsCommand
{
std::string srcUri, dstUri;
CheckSigsFlag checkSigs = CheckSigs;
SubstituteFlag substitute = NoSubstitute;
@ -21,20 +15,6 @@ struct CmdCopy : BuiltPathsCommand
CmdCopy()
: BuiltPathsCommand(true)
{
addFlag({
.longName = "from",
.description = "URL of the source Nix store.",
.labels = {"store-uri"},
.handler = {&srcUri},
});
addFlag({
.longName = "to",
.description = "URL of the destination Nix store.",
.labels = {"store-uri"},
.handler = {&dstUri},
});
addFlag({
.longName = "no-check-sigs",
.description = "Do not require that paths are signed by trusted keys.",
@ -65,22 +45,9 @@ struct CmdCopy : BuiltPathsCommand
Category category() override { return catSecondary; }
ref<Store> createStore() override
{
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
}
void run(ref<Store> store) override
{
if (srcUri.empty() && dstUri.empty())
throw UsageError("you must pass '--from' and/or '--to'");
BuiltPathsCommand::run(store);
}
void run(ref<Store> srcStore, BuiltPaths && paths) override
{
ref<Store> dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
auto dstStore = getDstStore();
RealisedPath::Set stuffToCopy;

View file

@ -137,15 +137,6 @@ Currently the `type` attribute can be one of the following:
*path* must be a directory in the file system containing a file
named `flake.nix`.
If the directory or any of its parents is a Git repository, then
this is essentially equivalent to `git+file://<path>` (see below),
except that the `dir` parameter is derived automatically. For
example, if `/foo/bar` is a Git repository, then the flake reference
`/foo/bar/flake` is equivalent to `/foo/bar?dir=flake`.
If the directory is not inside a Git repository, then the flake
contents is the entire contents of *path*.
*path* generally must be an absolute path. However, on the command
line, it can be a relative path (e.g. `.` or `./foo`) which is
interpreted as relative to the current directory. In this case, it

View file

@ -157,7 +157,7 @@ struct CmdLsNar : Command, MixLs
void run() override
{
list(makeNarAccessor(make_ref<std::string>(readFile(narPath))));
list(makeNarAccessor(readFile(narPath)));
}
};

View file

@ -59,7 +59,6 @@ struct HelpRequested { };
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
{
bool printBuildLogs = false;
bool useNet = true;
bool refresh = false;
bool showVersion = false;
@ -270,11 +269,15 @@ void mainWrapped(int argc, char * * argv)
if (legacy) return legacy(argc, argv);
}
verbosity = lvlNotice;
settings.verboseBuild = false;
evalSettings.pureEval = true;
setLogFormat("bar");
settings.verboseBuild = false;
if (isatty(STDERR_FILENO)) {
verbosity = lvlNotice;
} else {
verbosity = lvlInfo;
}
Finally f([] { logger->stop(); });

View file

@ -61,10 +61,10 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
}
}
*sink.s = rewriteStrings(*sink.s, rewrites);
sink.s = rewriteStrings(sink.s, rewrites);
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
hashModuloSink(*sink.s);
hashModuloSink(sink.s);
auto narHash = hashModuloSink.finish().first;
@ -74,7 +74,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
};
info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path);
info.narSize = sink.s->size();
info.narSize = sink.s.size();
info.ca = FixedOutputHash {
.method = FileIngestionMethod::Recursive,
.hash = info.narHash,
@ -85,7 +85,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto source = sinkToSource([&](Sink & nextSink) {
RewritingSink rsink2(oldHashPart, std::string(info.path.hashPart()), nextSink);
rsink2(*sink.s);
rsink2(sink.s);
rsink2.flush();
});

View file

@ -57,9 +57,47 @@ the Nix store. Here are the recognised types of installables:
These have the form *flakeref*[`#`*attrpath*], where *flakeref* is a
flake reference and *attrpath* is an optional attribute path. For
more information on flakes, see [the `nix flake` manual
page](./nix3-flake.md). Flake references are most commonly a flake
identifier in the flake registry (e.g. `nixpkgs`) or a path
(e.g. `/path/to/my-flake` or `.`).
page](./nix3-flake.md). Flake references are most commonly a flake
identifier in the flake registry (e.g. `nixpkgs`), or a raw path
(e.g. `/path/to/my-flake` or `.` or `../foo`), or a full URL
(e.g. `github:nixos/nixpkgs` or `path:.`)
When the flake reference is a raw path (a path without any URL
scheme), it is interpreted as a `path:` or `git+file:` url in the following
way:
- If the path is within a Git repository, then the url will be of the form
`git+file://[GIT_REPO_ROOT]?dir=[RELATIVE_FLAKE_DIR_PATH]`
where `GIT_REPO_ROOT` is the path to the root of the git repository,
and `RELATIVE_FLAKE_DIR_PATH` is the path (relative to the directory
root) of the closest parent of the given path that contains a `flake.nix` within
the git repository.
If no such directory exists, then Nix will error-out.
Note that the search will only include files indexed by git. In particular, files
which are matched by `.gitignore` or have never been `git add`-ed will not be
available in the flake. If this is undesireable, specify `path:<directory>` explicitly;
For example, if `/foo/bar` is a git repository with the following structure:
```
.
└── baz
├── blah
│  └── file.txt
└── flake.nix
```
Then `/foo/bar/baz/blah` will resolve to `git+file:///foo/bar?dir=baz`
- If the supplied path is not a git repository, then the url will have the form
`path:FLAKE_DIR_PATH` where `FLAKE_DIR_PATH` is the closest parent
of the supplied path that contains a `flake.nix` file (within the same file-system).
If no such directory exists, then Nix will error-out.
For example, if `/foo/bar/flake.nix` exists, then `/foo/bar/baz/` will resolve to
`path:/foo/bar`
If *attrpath* is omitted, Nix tries some default values; for most
subcommands, the default is `defaultPackage.`*system*

View file

@ -157,17 +157,17 @@ struct ProfileManifest
StringSink sink;
dumpPath(tempDir, sink);
auto narHash = hashString(htSHA256, *sink.s);
auto narHash = hashString(htSHA256, sink.s);
ValidPathInfo info {
store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references),
narHash,
};
info.references = std::move(references);
info.narSize = sink.s->size();
info.narSize = sink.s.size();
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = info.narHash };
auto source = StringSource { *sink.s };
StringSource source(sink.s);
store->addToStore(info, source);
return std::move(info.path);

View file

@ -703,7 +703,7 @@ void NixRepl::addVarToScope(const Symbol & name, Value & v)
Expr * NixRepl::parseString(string s)
{
Expr * e = state->parseExprFromString(s, curDir, staticEnv);
Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv);
return e;
}

46
src/nix/store-copy-log.cc Normal file
View file

@ -0,0 +1,46 @@
#include "command.hh"
#include "shared.hh"
#include "store-api.hh"
#include "sync.hh"
#include "thread-pool.hh"
#include <atomic>
using namespace nix;
struct CmdCopyLog : virtual CopyCommand, virtual InstallablesCommand
{
std::string description() override
{
return "copy build logs between Nix stores";
}
std::string doc() override
{
return
#include "store-copy-log.md"
;
}
Category category() override { return catUtility; }
void run(ref<Store> srcStore) override
{
auto dstStore = getDstStore();
StorePathSet drvPaths;
for (auto & i : installables)
for (auto & drvPath : i->toDrvPaths(getEvalStore()))
drvPaths.insert(drvPath);
for (auto & drvPath : drvPaths) {
if (auto log = srcStore->getBuildLog(drvPath))
dstStore->addBuildLog(drvPath, *log);
else
throw Error("build log for '%s' is not available", srcStore->printStorePath(drvPath));
}
}
};
static auto rCmdCopyLog = registerCommand2<CmdCopyLog>({"store", "copy-log"});

33
src/nix/store-copy-log.md Normal file
View file

@ -0,0 +1,33 @@
R""(
# Examples
* To copy the build log of the `hello` package from
https://cache.nixos.org to the local store:
```console
# nix store copy-log --from https://cache.nixos.org --eval-store auto nixpkgs#hello
```
You can verify that the log is available locally:
```console
# nix log --substituters '' nixpkgs#hello
```
(The flag `--substituters ''` avoids querying
`https://cache.nixos.org` for the log.)
* To copy the log for a specific store derivation via SSH:
```console
# nix store copy-log --to ssh-ng://machine /nix/store/ilgm50plpmcgjhcp33z6n4qbnpqfhxym-glibc-2.33-59.drv
```
# Description
`nix store copy-log` copies build logs between two Nix stores. The
source store is specified using `--from` and the destination using
`--to`. If one of these is omitted, it defaults to the local store.
)""

View file

@ -140,7 +140,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
auto state = std::make_unique<EvalState>(Strings(), store);
auto v = state->allocValue();
state->eval(state->parseExprFromString(*res.data, "/no-such-path"), *v);
state->eval(state->parseExprFromString(res.data, "/no-such-path"), *v);
Bindings & bindings(*state->allocBindings(0));
auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first;

View file

@ -31,6 +31,7 @@ struct CmdWhyDepends : SourceExprCommand
{
std::string _package, _dependency;
bool all = false;
bool precise = false;
CmdWhyDepends()
{
@ -56,6 +57,12 @@ struct CmdWhyDepends : SourceExprCommand
.description = "Show all edges in the dependency graph leading from *package* to *dependency*, rather than just a shortest path.",
.handler = {&all, true},
});
addFlag({
.longName = "precise",
.description = "For each edge in the dependency graph, show the files in the parent that cause the dependency.",
.handler = {&precise, true},
});
}
std::string description() override
@ -158,11 +165,19 @@ struct CmdWhyDepends : SourceExprCommand
auto pathS = store->printStorePath(node.path);
assert(node.dist != inf);
logger->cout("%s%s%s%s" ANSI_NORMAL,
firstPad,
node.visited ? "\e[38;5;244m" : "",
firstPad != "" ? "" : "",
pathS);
if (precise) {
logger->cout("%s%s%s%s" ANSI_NORMAL,
firstPad,
node.visited ? "\e[38;5;244m" : "",
firstPad != "" ? "" : "",
pathS);
} else {
logger->cout("%s%s%s%s" ANSI_NORMAL,
firstPad,
node.visited ? "\e[38;5;244m" : "",
firstPad != "" ? treeLast : "",
pathS);
}
if (node.path == dependencyPath && !all
&& packagePath != dependencyPath)
@ -237,9 +252,8 @@ struct CmdWhyDepends : SourceExprCommand
// FIXME: should use scanForReferences().
visitPath(pathS);
if (precise) visitPath(pathS);
RunPager pager;
for (auto & ref : refs) {
std::string hash(ref.second->path.hashPart());
@ -259,6 +273,7 @@ struct CmdWhyDepends : SourceExprCommand
}
};
RunPager pager;
try {
printNode(graph.at(packagePath), "", "");
} catch (BailOut & ) { }

View file

@ -14,6 +14,17 @@ outPath=$(nix-build dependencies.nix --no-out-link)
nix copy --to file://$cacheDir $outPath
# Test copying build logs to the binary cache.
nix log --store file://$cacheDir $outPath 2>&1 | grep 'is not available'
nix store copy-log --to file://$cacheDir $outPath
nix log --store file://$cacheDir $outPath | grep FOO
rm -rf $TEST_ROOT/var/log/nix
nix log $outPath 2>&1 | grep 'is not available'
nix log --substituters file://$cacheDir $outPath | grep FOO
# Test copying build logs from the binary cache.
nix store copy-log --from file://$cacheDir $(nix-store -qd $outPath)
nix log $outPath | grep FOO
basicDownloadTests() {
# No uploading tests bcause upload with force HTTP doesn't work.

View file

@ -4,7 +4,7 @@
mkdir $out
echo $(cat $input1/foo)$(cat $input2/bar) > $out/foobar
ln -s $input2 $out/input-2
ln -s $input2 $out/reference-to-input-2
# Self-reference.
ln -s $out $out/self

View file

@ -27,6 +27,8 @@ let {
input1 = input1 + "/.";
input2 = "${input2}/.";
input1_drv = input1;
input2_drv = input2;
input0_drv = input0;
meta.description = "Random test package";
};

45
tests/flake-searching.sh Normal file
View file

@ -0,0 +1,45 @@
source common.sh
clearStore
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
cd $TEST_HOME
mkdir -p foo/subdir
echo '{ outputs = _: {}; }' > foo/flake.nix
cat <<EOF > flake.nix
{
inputs.foo.url = "$PWD/foo";
outputs = a: {
defaultPackage.$system = import ./simple.nix;
packages.$system.test = import ./simple.nix;
};
}
EOF
mkdir subdir
pushd subdir
success=("" . .# .#test ../subdir ../subdir#test "$PWD")
failure=("path:$PWD")
for i in "${success[@]}"; do
nix build $i || fail "flake should be found by searching up directories"
done
for i in "${failure[@]}"; do
! nix build $i || fail "flake should not search up directories when using 'path:'"
done
popd
nix build --override-input foo . || fail "flake should search up directories when not an installable"
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
! nix build || fail "flake should not search upwards when part of inputs"
pushd subdir
git init
for i in "${success[@]}" "${failure[@]}"; do
! nix build $i || fail "flake should not search past a git repository"
done
rm -rf .git
popd

View file

@ -18,7 +18,7 @@ if nix-store --gc --print-dead | grep -E $outPath$; then false; fi
nix-store --gc --print-dead
inUse=$(readLink $outPath/input-2)
inUse=$(readLink $outPath/reference-to-input-2)
if nix-store --delete $inUse; then false; fi
test -e $inUse
@ -35,7 +35,7 @@ nix-collect-garbage
# Check that the root and its dependencies haven't been deleted.
cat $outPath/foobar
cat $outPath/input-2/bar
cat $outPath/reference-to-input-2/bar
# Check that the derivation has been GC'd.
if test -e $drvPath; then false; fi

View file

@ -47,6 +47,7 @@ nix_tests = \
describe-stores.sh \
flakes.sh \
flake-local-settings.sh \
flake-searching.sh \
build.sh \
repl.sh ca/repl.sh \
ca/build.sh \
@ -60,7 +61,8 @@ nix_tests = \
ca/concurrent-builds.sh \
ca/nix-copy.sh \
eval-store.sh \
readfile-context.sh
readfile-context.sh \
why-depends.sh
# parallel.sh
ifeq ($(HAVE_LIBCPUID), 1)

21
tests/why-depends.sh Normal file
View file

@ -0,0 +1,21 @@
source common.sh
clearStore
cp ./dependencies.nix ./dependencies.builder0.sh ./config.nix $TEST_HOME
cd $TEST_HOME
nix-build ./dependencies.nix -A input0_drv -o dep
nix-build ./dependencies.nix -o toplevel
FAST_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep)
PRECISE_WHY_DEPENDS_OUTPUT=$(nix why-depends ./toplevel ./dep --precise)
# Both outputs should show that `input-2` is in the dependency chain
echo "$FAST_WHY_DEPENDS_OUTPUT" | grep -q input-2
echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q input-2
# But only the “precise” one should refere to `reference-to-input-2`
echo "$FAST_WHY_DEPENDS_OUTPUT" | (! grep -q reference-to-input-2)
echo "$PRECISE_WHY_DEPENDS_OUTPUT" | grep -q reference-to-input-2