forked from lix-project/lix
Merge remote-tracking branch 'origin/master' into flakes
This commit is contained in:
commit
6fadb3fc03
|
@ -37,6 +37,9 @@ channel is just a URL that points to a place containing a set of Nix
|
|||
expressions. <phrase condition="manual">See also <xref
|
||||
linkend="sec-channels" />.</phrase></para>
|
||||
|
||||
<para>To see the list of official NixOS channels, visit <link
|
||||
xlink:href="https://nixos.org/channels" />.</para>
|
||||
|
||||
<para>This command has the following operations:
|
||||
|
||||
<variablelist>
|
||||
|
|
|
@ -18,6 +18,9 @@ linkend="sec-nix-channel"><command>nix-channel</command></link> you
|
|||
can automatically stay up to date with whatever is available at that
|
||||
URL.</para>
|
||||
|
||||
<para>To see the list of official NixOS channels, visit <link
|
||||
xlink:href="https://nixos.org/channels" />.</para>
|
||||
|
||||
<para>You can “subscribe” to a channel using
|
||||
<command>nix-channel --add</command>, e.g.,
|
||||
|
||||
|
|
|
@ -33,9 +33,13 @@ incompatible changes:</para>
|
|||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>The installer now enables sandboxing by default on
|
||||
Linux. The <literal>max-jobs</literal> setting now defaults to
|
||||
1.</para>
|
||||
<para>The installer now enables sandboxing by default on Linux when the
|
||||
system has the necessary kernel support.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>The <literal>max-jobs</literal> setting now defaults to 1.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
|
@ -82,11 +86,6 @@ incompatible changes:</para>
|
|||
the duration of Nix function calls to stderr.</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>On Linux, sandboxing is now disabled by default on systems
|
||||
that don’t have the necessary kernel support.</para>
|
||||
</listitem>
|
||||
|
||||
</itemizedlist>
|
||||
|
||||
</section>
|
||||
|
|
|
@ -354,6 +354,8 @@
|
|||
name = "nix-build";
|
||||
src = self.hydraJobs.tarball;
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
buildInputs = buildDeps;
|
||||
|
||||
dontInstall = false;
|
||||
|
@ -366,6 +368,9 @@
|
|||
# syntax-check generated dot files, it still requires some
|
||||
# fonts. So provide those.
|
||||
FONTCONFIG_FILE = texFunctions.fontsConf;
|
||||
|
||||
# To test building without precompiled headers.
|
||||
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
|
||||
};
|
||||
|
||||
# System tests.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{ useClang ? false }:
|
||||
|
||||
with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/bb1013511e1e5edcf314df8321acf2f3c536df0d.tar.gz) {};
|
||||
with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-19.09.tar.gz) {};
|
||||
|
||||
with import ./release-common.nix { inherit pkgs; };
|
||||
|
||||
|
|
|
@ -566,9 +566,9 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const ExprLambda & fun
|
|||
throw TypeError(format(s) % fun.showNamePos() % s2 % pos);
|
||||
}
|
||||
|
||||
LocalNoInlineNoReturn(void throwAssertionError(const char * s, const Pos & pos))
|
||||
LocalNoInlineNoReturn(void throwAssertionError(const char * s, const string & s1, const Pos & pos))
|
||||
{
|
||||
throw AssertionError(format(s) % pos);
|
||||
throw AssertionError(format(s) % s1 % pos);
|
||||
}
|
||||
|
||||
LocalNoInlineNoReturn(void throwUndefinedVarError(const char * s, const string & s1, const Pos & pos))
|
||||
|
@ -1302,8 +1302,11 @@ void ExprIf::eval(EvalState & state, Env & env, Value & v)
|
|||
|
||||
void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
if (!state.evalBool(env, cond, pos))
|
||||
throwAssertionError("assertion failed at %1%", pos);
|
||||
if (!state.evalBool(env, cond, pos)) {
|
||||
std::ostringstream out;
|
||||
cond->show(out);
|
||||
throwAssertionError("assertion '%1%' failed at %2%", out.str(), pos);
|
||||
}
|
||||
body->eval(state, env, v);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,149 +1,161 @@
|
|||
#include "json-to-value.hh"
|
||||
|
||||
#include <cstring>
|
||||
#include <variant>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using json = nlohmann::json;
|
||||
using std::unique_ptr;
|
||||
|
||||
namespace nix {
|
||||
|
||||
// for more information, refer to
|
||||
// https://github.com/nlohmann/json/blob/master/include/nlohmann/detail/input/json_sax.hpp
|
||||
class JSONSax : nlohmann::json_sax<json> {
|
||||
class JSONState {
|
||||
protected:
|
||||
unique_ptr<JSONState> parent;
|
||||
Value * v;
|
||||
public:
|
||||
virtual unique_ptr<JSONState> resolve(EvalState &)
|
||||
{
|
||||
throw std::logic_error("tried to close toplevel json parser state");
|
||||
};
|
||||
explicit JSONState(unique_ptr<JSONState>&& p) : parent(std::move(p)), v(nullptr) {};
|
||||
explicit JSONState(Value* v) : v(v) {};
|
||||
JSONState(JSONState& p) = delete;
|
||||
Value& value(EvalState & state)
|
||||
{
|
||||
if (v == nullptr)
|
||||
v = state.allocValue();
|
||||
return *v;
|
||||
};
|
||||
virtual ~JSONState() {};
|
||||
virtual void add() {};
|
||||
};
|
||||
|
||||
static void skipWhitespace(const char * & s)
|
||||
{
|
||||
while (*s == ' ' || *s == '\t' || *s == '\n' || *s == '\r') s++;
|
||||
}
|
||||
|
||||
|
||||
static string parseJSONString(const char * & s)
|
||||
{
|
||||
string res;
|
||||
if (*s++ != '"') throw JSONParseError("expected JSON string");
|
||||
while (*s != '"') {
|
||||
if (!*s) throw JSONParseError("got end-of-string in JSON string");
|
||||
if (*s == '\\') {
|
||||
s++;
|
||||
if (*s == '"') res += '"';
|
||||
else if (*s == '\\') res += '\\';
|
||||
else if (*s == '/') res += '/';
|
||||
else if (*s == '/') res += '/';
|
||||
else if (*s == 'b') res += '\b';
|
||||
else if (*s == 'f') res += '\f';
|
||||
else if (*s == 'n') res += '\n';
|
||||
else if (*s == 'r') res += '\r';
|
||||
else if (*s == 't') res += '\t';
|
||||
else if (*s == 'u') throw JSONParseError("\\u characters in JSON strings are currently not supported");
|
||||
else throw JSONParseError("invalid escaped character in JSON string");
|
||||
s++;
|
||||
} else
|
||||
res += *s++;
|
||||
}
|
||||
s++;
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
static void parseJSON(EvalState & state, const char * & s, Value & v)
|
||||
{
|
||||
skipWhitespace(s);
|
||||
|
||||
if (!*s) throw JSONParseError("expected JSON value");
|
||||
|
||||
if (*s == '[') {
|
||||
s++;
|
||||
ValueVector values;
|
||||
values.reserve(128);
|
||||
skipWhitespace(s);
|
||||
while (1) {
|
||||
if (values.empty() && *s == ']') break;
|
||||
Value * v2 = state.allocValue();
|
||||
parseJSON(state, s, *v2);
|
||||
values.push_back(v2);
|
||||
skipWhitespace(s);
|
||||
if (*s == ']') break;
|
||||
if (*s != ',') throw JSONParseError("expected ',' or ']' after JSON array element");
|
||||
s++;
|
||||
class JSONObjectState : public JSONState {
|
||||
using JSONState::JSONState;
|
||||
ValueMap attrs = ValueMap();
|
||||
virtual unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
{
|
||||
Value& v = parent->value(state);
|
||||
state.mkAttrs(v, attrs.size());
|
||||
for (auto & i : attrs)
|
||||
v.attrs->push_back(Attr(i.first, i.second));
|
||||
return std::move(parent);
|
||||
}
|
||||
s++;
|
||||
state.mkList(v, values.size());
|
||||
for (size_t n = 0; n < values.size(); ++n)
|
||||
v.listElems()[n] = values[n];
|
||||
}
|
||||
|
||||
else if (*s == '{') {
|
||||
s++;
|
||||
ValueMap attrs;
|
||||
while (1) {
|
||||
skipWhitespace(s);
|
||||
if (attrs.empty() && *s == '}') break;
|
||||
string name = parseJSONString(s);
|
||||
skipWhitespace(s);
|
||||
if (*s != ':') throw JSONParseError("expected ':' in JSON object");
|
||||
s++;
|
||||
Value * v2 = state.allocValue();
|
||||
parseJSON(state, s, *v2);
|
||||
attrs[state.symbols.create(name)] = v2;
|
||||
skipWhitespace(s);
|
||||
if (*s == '}') break;
|
||||
if (*s != ',') throw JSONParseError("expected ',' or '}' after JSON member");
|
||||
s++;
|
||||
virtual void add() override { v = nullptr; };
|
||||
public:
|
||||
void key(string_t& name, EvalState & state)
|
||||
{
|
||||
attrs[state.symbols.create(name)] = &value(state);
|
||||
}
|
||||
state.mkAttrs(v, attrs.size());
|
||||
for (auto & i : attrs)
|
||||
v.attrs->push_back(Attr(i.first, i.second));
|
||||
v.attrs->sort();
|
||||
s++;
|
||||
}
|
||||
};
|
||||
|
||||
else if (*s == '"') {
|
||||
mkString(v, parseJSONString(s));
|
||||
}
|
||||
|
||||
else if (isdigit(*s) || *s == '-' || *s == '.' ) {
|
||||
// Buffer into a string first, then use built-in C++ conversions
|
||||
std::string tmp_number;
|
||||
ValueType number_type = tInt;
|
||||
|
||||
while (isdigit(*s) || *s == '-' || *s == '.' || *s == 'e' || *s == 'E') {
|
||||
if (*s == '.' || *s == 'e' || *s == 'E')
|
||||
number_type = tFloat;
|
||||
tmp_number += *s++;
|
||||
class JSONListState : public JSONState {
|
||||
ValueVector values = ValueVector();
|
||||
virtual unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
{
|
||||
Value& v = parent->value(state);
|
||||
state.mkList(v, values.size());
|
||||
for (size_t n = 0; n < values.size(); ++n) {
|
||||
v.listElems()[n] = values[n];
|
||||
}
|
||||
return std::move(parent);
|
||||
}
|
||||
|
||||
try {
|
||||
if (number_type == tFloat)
|
||||
mkFloat(v, stod(tmp_number));
|
||||
else
|
||||
mkInt(v, stol(tmp_number));
|
||||
} catch (std::invalid_argument & e) {
|
||||
throw JSONParseError("invalid JSON number");
|
||||
} catch (std::out_of_range & e) {
|
||||
throw JSONParseError("out-of-range JSON number");
|
||||
virtual void add() override {
|
||||
values.push_back(v);
|
||||
v = nullptr;
|
||||
};
|
||||
public:
|
||||
JSONListState(unique_ptr<JSONState>&& p, std::size_t reserve) : JSONState(std::move(p))
|
||||
{
|
||||
values.reserve(reserve);
|
||||
}
|
||||
};
|
||||
|
||||
EvalState & state;
|
||||
unique_ptr<JSONState> rs;
|
||||
|
||||
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
|
||||
{
|
||||
f(rs->value(state), args...);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
else if (strncmp(s, "true", 4) == 0) {
|
||||
s += 4;
|
||||
mkBool(v, true);
|
||||
public:
|
||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||
|
||||
bool null()
|
||||
{
|
||||
return handle_value(mkNull);
|
||||
}
|
||||
|
||||
else if (strncmp(s, "false", 5) == 0) {
|
||||
s += 5;
|
||||
mkBool(v, false);
|
||||
bool boolean(bool val)
|
||||
{
|
||||
return handle_value(mkBool, val);
|
||||
}
|
||||
|
||||
else if (strncmp(s, "null", 4) == 0) {
|
||||
s += 4;
|
||||
mkNull(v);
|
||||
bool number_integer(number_integer_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
}
|
||||
|
||||
else throw JSONParseError("unrecognised JSON value");
|
||||
}
|
||||
bool number_unsigned(number_unsigned_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
}
|
||||
|
||||
bool number_float(number_float_t val, const string_t& s)
|
||||
{
|
||||
return handle_value(mkFloat, val);
|
||||
}
|
||||
|
||||
bool string(string_t& val)
|
||||
{
|
||||
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
|
||||
}
|
||||
|
||||
bool start_object(std::size_t len)
|
||||
{
|
||||
rs = std::make_unique<JSONObjectState>(std::move(rs));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool key(string_t& name)
|
||||
{
|
||||
dynamic_cast<JSONObjectState*>(rs.get())->key(name, state);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_object() {
|
||||
rs = rs->resolve(state);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_array() {
|
||||
return end_object();
|
||||
}
|
||||
|
||||
bool start_array(size_t len) {
|
||||
rs = std::make_unique<JSONListState>(std::move(rs),
|
||||
len != std::numeric_limits<size_t>::max() ? len : 128);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
||||
throw JSONParseError(ex.what());
|
||||
}
|
||||
};
|
||||
|
||||
void parseJSON(EvalState & state, const string & s_, Value & v)
|
||||
{
|
||||
const char * s = s_.c_str();
|
||||
parseJSON(state, s, v);
|
||||
skipWhitespace(s);
|
||||
if (*s) throw JSONParseError(format("expected end-of-string while parsing JSON value: %1%") % s);
|
||||
JSONSax parser(state, v);
|
||||
bool res = json::sax_parse(s_, &parser);
|
||||
if (!res)
|
||||
throw JSONParseError("Invalid JSON Value");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -732,6 +732,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
the hash. */
|
||||
for (auto & i : outputs) {
|
||||
if (!jsonObject) drv.env[i] = "";
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput(StorePath::dummy.clone(), "", ""));
|
||||
}
|
||||
|
||||
Hash h = hashDerivationModulo(*state.store, Derivation(drv), true);
|
||||
|
|
|
@ -1017,7 +1017,7 @@ DerivationGoal::DerivationGoal(StorePath && drvPath, const StringSet & wantedOut
|
|||
, buildMode(buildMode)
|
||||
{
|
||||
state = &DerivationGoal::getDerivation;
|
||||
name = fmt("building of '%s'", worker.store.printStorePath(drvPath));
|
||||
name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath));
|
||||
trace("created");
|
||||
|
||||
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
||||
|
@ -1042,7 +1042,7 @@ DerivationGoal::DerivationGoal(StorePath && drvPath, const BasicDerivation & drv
|
|||
|
||||
/* Prevent the .chroot directory from being
|
||||
garbage-collected. (See isActiveTempFile() in gc.cc.) */
|
||||
worker.store.addTempRoot(drvPath);
|
||||
worker.store.addTempRoot(this->drvPath);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3341,7 +3341,7 @@ void DerivationGoal::runChild()
|
|||
;
|
||||
}
|
||||
#if __APPLE__
|
||||
else if (getEnv("_NIX_TEST_NO_SANDBOX") == "") {
|
||||
else {
|
||||
/* This has to appear before import statements. */
|
||||
std::string sandboxProfile = "(version 1)\n";
|
||||
|
||||
|
@ -3450,25 +3450,31 @@ void DerivationGoal::runChild()
|
|||
/* They don't like trailing slashes on subpath directives */
|
||||
if (globalTmpDir.back() == '/') globalTmpDir.pop_back();
|
||||
|
||||
builder = "/usr/bin/sandbox-exec";
|
||||
args.push_back("sandbox-exec");
|
||||
args.push_back("-f");
|
||||
args.push_back(sandboxFile);
|
||||
args.push_back("-D");
|
||||
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
|
||||
args.push_back("-D");
|
||||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
||||
if (allowLocalNetworking) {
|
||||
if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") {
|
||||
builder = "/usr/bin/sandbox-exec";
|
||||
args.push_back("sandbox-exec");
|
||||
args.push_back("-f");
|
||||
args.push_back(sandboxFile);
|
||||
args.push_back("-D");
|
||||
args.push_back(string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
|
||||
args.push_back("-D");
|
||||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
||||
if (allowLocalNetworking) {
|
||||
args.push_back("-D");
|
||||
args.push_back(string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
}
|
||||
args.push_back(drv->builder);
|
||||
} else {
|
||||
builder = drv->builder.c_str();
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
args.push_back(drv->builder);
|
||||
}
|
||||
#endif
|
||||
#else
|
||||
else {
|
||||
builder = drv->builder.c_str();
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
#endif
|
||||
|
||||
for (auto & i : drv->args)
|
||||
args.push_back(rewriteStrings(i, inputRewrites));
|
||||
|
@ -4253,7 +4259,7 @@ SubstitutionGoal::SubstitutionGoal(StorePath && storePath, Worker & worker, Repa
|
|||
, repair(repair)
|
||||
{
|
||||
state = &SubstitutionGoal::init;
|
||||
name = fmt("substitution of '%s'", worker.store.printStorePath(storePath));
|
||||
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
|
||||
trace("created");
|
||||
maintainExpectedSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.expectedSubstitutions);
|
||||
}
|
||||
|
|
|
@ -246,30 +246,18 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
s.reserve(65536);
|
||||
s += "Derive([";
|
||||
|
||||
StringSet maskedOutputs;
|
||||
|
||||
if (maskOutputs) {
|
||||
bool first = true;
|
||||
maskedOutputs = tokenizeString<StringSet>(get(env, "outputs").value_or("out"), " ");
|
||||
for (auto & i : maskedOutputs) {
|
||||
if (first) first = false; else s += ',';
|
||||
s += '('; printString(s, i);
|
||||
s += ",\"\",\"\",\"\")";
|
||||
}
|
||||
} else {
|
||||
bool first = true;
|
||||
for (auto & i : outputs) {
|
||||
if (first) first = false; else s += ',';
|
||||
s += '('; printString(s, i.first);
|
||||
s += ','; printString(s, store.printStorePath(i.second.path));
|
||||
s += ','; printString(s, i.second.hashAlgo);
|
||||
s += ','; printString(s, i.second.hash);
|
||||
s += ')';
|
||||
}
|
||||
bool first = true;
|
||||
for (auto & i : outputs) {
|
||||
if (first) first = false; else s += ',';
|
||||
s += '('; printString(s, i.first);
|
||||
s += ','; printString(s, maskOutputs ? "" : store.printStorePath(i.second.path));
|
||||
s += ','; printString(s, i.second.hashAlgo);
|
||||
s += ','; printString(s, i.second.hash);
|
||||
s += ')';
|
||||
}
|
||||
|
||||
s += "],[";
|
||||
bool first = true;
|
||||
first = true;
|
||||
if (actualInputs) {
|
||||
for (auto & i : *actualInputs) {
|
||||
if (first) first = false; else s += ',';
|
||||
|
@ -299,7 +287,7 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
for (auto & i : env) {
|
||||
if (first) first = false; else s += ',';
|
||||
s += '('; printString(s, i.first);
|
||||
s += ','; printString(s, maskOutputs && maskedOutputs.count(i.first) ? "" : i.second);
|
||||
s += ','; printString(s, maskOutputs && outputs.count(i.first) ? "" : i.second);
|
||||
s += ')';
|
||||
}
|
||||
|
||||
|
|
|
@ -443,7 +443,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
|||
// lsof is really slow on OS X. This actually causes the gc-concurrent.sh test to fail.
|
||||
// See: https://github.com/NixOS/nix/issues/3011
|
||||
// Because of this we disable lsof when running the tests.
|
||||
if (getEnv("_NIX_TEST_NO_LSOF") == "") {
|
||||
if (getEnv("_NIX_TEST_NO_LSOF") != "1") {
|
||||
try {
|
||||
std::regex lsofRegex(R"(^n(/.*)$)");
|
||||
auto lsofLines =
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include <map>
|
||||
#include <thread>
|
||||
#include <dlfcn.h>
|
||||
#include <sys/utsname.h>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
namespace nix {
|
||||
|
||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath::make((unsigned char *) "xxxxxxxxxxxxxxxxxxxx", "x")) // FIXME: hack
|
||||
: ValidPathInfo(StorePath::dummy.clone()) // FIXME: hack
|
||||
{
|
||||
auto corrupt = [&]() {
|
||||
throw Error(format("NAR info file '%1%' is corrupt") % whence);
|
||||
|
|
|
@ -46,12 +46,15 @@ std::string_view StorePath::name() const
|
|||
return ffi_StorePath_name(*this);
|
||||
}
|
||||
|
||||
StorePath StorePath::dummy(
|
||||
StorePath::make(
|
||||
(unsigned char *) "xxxxxxxxxxxxxxxxxxxx", "x"));
|
||||
|
||||
StorePath Store::parseStorePath(std::string_view path) const
|
||||
{
|
||||
return StorePath::make(path, storeDir);
|
||||
}
|
||||
|
||||
|
||||
StorePathSet Store::parseStorePathSet(const PathSet & paths) const
|
||||
{
|
||||
StorePathSet res;
|
||||
|
|
|
@ -53,6 +53,8 @@ struct StorePath : rust::Value<3 * sizeof(void *) + 24, ffi_StorePath_drop>
|
|||
{
|
||||
return ffi_StorePath_hash_data(*this);
|
||||
}
|
||||
|
||||
static StorePath dummy;
|
||||
};
|
||||
|
||||
typedef std::set<StorePath> StorePathSet;
|
||||
|
|
|
@ -53,7 +53,7 @@ static int _main(int argc, char * * argv)
|
|||
{
|
||||
HashType ht = htSHA256;
|
||||
std::vector<string> args;
|
||||
bool printPath = getEnv("PRINT_PATH") != "";
|
||||
bool printPath = getEnv("PRINT_PATH") == "1";
|
||||
bool fromExpr = false;
|
||||
string attrPath;
|
||||
bool unpack = false;
|
||||
|
|
1
tests/lang/eval-okay-fromjson-escapes.exp
Normal file
1
tests/lang/eval-okay-fromjson-escapes.exp
Normal file
|
@ -0,0 +1 @@
|
|||
"quote \" reverse solidus \\ solidus / backspace formfeed newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace 1 char unicode encoded e with accent é 2 char unicode encoded s with caron š 3 char unicode encoded rightwards arrow →"
|
3
tests/lang/eval-okay-fromjson-escapes.nix
Normal file
3
tests/lang/eval-okay-fromjson-escapes.nix
Normal file
|
@ -0,0 +1,3 @@
|
|||
# This string contains all supported escapes in a JSON string, per json.org
|
||||
# \b and \f are not supported by Nix
|
||||
builtins.fromJSON ''"quote \" reverse solidus \\ solidus \/ backspace \b formfeed \f newline \n carriage return \r horizontal tab \t 1 char unicode encoded backspace \u0008 1 char unicode encoded e with accent \u00e9 2 char unicode encoded s with caron \u0161 3 char unicode encoded rightwards arrow \u2192"''
|
|
@ -16,6 +16,8 @@ mkDerivation {
|
|||
|
||||
__structuredAttrs = true;
|
||||
|
||||
outputs = [ "out" "dev" ];
|
||||
|
||||
buildCommand = ''
|
||||
set -x
|
||||
|
||||
|
@ -30,8 +32,9 @@ mkDerivation {
|
|||
[[ -v nothing ]]
|
||||
[[ -z $nothing ]]
|
||||
|
||||
mkdir ''${outputs[out]}
|
||||
mkdir ''${outputs[out]} ''${outputs[dev]}
|
||||
echo bar > $dest
|
||||
echo foo > $dest2
|
||||
|
||||
json=$(cat .attrs.json)
|
||||
[[ $json =~ '"narHash":"sha256:1r7yc43zqnzl5b0als5vnyp649gk17i37s7mj00xr8kc47rjcybk"' ]]
|
||||
|
@ -57,6 +60,7 @@ mkDerivation {
|
|||
nothing = null;
|
||||
|
||||
dest = "${placeholder "out"}/foo";
|
||||
dest2 = "${placeholder "dev"}/foo";
|
||||
|
||||
"foo bar" = "BAD";
|
||||
"1foobar" = "BAD";
|
||||
|
|
|
@ -2,6 +2,7 @@ source common.sh
|
|||
|
||||
clearStore
|
||||
|
||||
outPath=$(nix-build structured-attrs.nix --no-out-link)
|
||||
nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
|
||||
|
||||
[[ $(cat $outPath/foo) = bar ]]
|
||||
[[ $(cat $TEST_ROOT/result/foo) = bar ]]
|
||||
[[ $(cat $TEST_ROOT/result-dev/foo) = foo ]]
|
||||
|
|
Loading…
Reference in a new issue