Source complete env in nix-shell
with __structuredAttrs = true;
This is needed to push the adoption of structured attrs[1] forward. It's now checked if a `__json` exists in the environment-map of the derivation to be openend in a `nix-shell`. Derivations with structured attributes enabled also make use of a file named `.attrs.json` containing every environment variable represented as JSON which is useful for e.g. `exportReferencesGraph`[2]. To provide an environment similar to the build sandbox, `nix-shell` now adds a `.attrs.json` to `cwd` (which is mostly equal to the one in the build sandbox) and removes it using an exit hook when closing the shell. To avoid leaking internals of the build-process to the `nix-shell`, the entire logic to generate JSON and shell code for structured attrs was moved into the `ParsedDerivation` class. [1] https://nixos.mayflower.consulting/blog/2020/01/20/structured-attrs/ [2] https://nixos.org/manual/nix/unstable/expressions/advanced-attributes.html#advanced-attributes
This commit is contained in:
parent
4b23bf797a
commit
3b5429aec1
|
@ -143,7 +143,6 @@ void DerivationGoal::work()
|
||||||
(this->*state)();
|
(this->*state)();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
||||||
{
|
{
|
||||||
/* If we already want all outputs, there is nothing to do. */
|
/* If we already want all outputs, there is nothing to do. */
|
||||||
|
@ -1048,42 +1047,6 @@ HookReply DerivationGoal::tryBuildHook()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
|
||||||
{
|
|
||||||
StorePathSet paths;
|
|
||||||
|
|
||||||
for (auto & storePath : storePaths) {
|
|
||||||
if (!inputPaths.count(storePath))
|
|
||||||
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath));
|
|
||||||
|
|
||||||
worker.store.computeFSClosure({storePath}, paths);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If there are derivations in the graph, then include their
|
|
||||||
outputs as well. This is useful if you want to do things
|
|
||||||
like passing all build-time dependencies of some path to a
|
|
||||||
derivation that builds a NixOS DVD image. */
|
|
||||||
auto paths2 = paths;
|
|
||||||
|
|
||||||
for (auto & j : paths2) {
|
|
||||||
if (j.isDerivation()) {
|
|
||||||
Derivation drv = worker.store.derivationFromPath(j);
|
|
||||||
for (auto & k : drv.outputsAndOptPaths(worker.store)) {
|
|
||||||
if (!k.second.second)
|
|
||||||
/* FIXME: I am confused why we are calling
|
|
||||||
`computeFSClosure` on the output path, rather than
|
|
||||||
derivation itself. That doesn't seem right to me, so I
|
|
||||||
won't try to implemented this for CA derivations. */
|
|
||||||
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
|
||||||
worker.store.computeFSClosure(*k.second.second, paths);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::registerOutputs()
|
void DerivationGoal::registerOutputs()
|
||||||
{
|
{
|
||||||
/* When using a build hook, the build hook can register the output
|
/* When using a build hook, the build hook can register the output
|
||||||
|
|
|
@ -518,7 +518,7 @@ void LocalDerivationGoal::startBuilder()
|
||||||
/* Write closure info to <fileName>. */
|
/* Write closure info to <fileName>. */
|
||||||
writeFile(tmpDir + "/" + fileName,
|
writeFile(tmpDir + "/" + fileName,
|
||||||
worker.store.makeValidityRegistration(
|
worker.store.makeValidityRegistration(
|
||||||
exportReferences({storePath}), false, false));
|
worker.store.exportReferences({storePath}, inputPaths), false, false));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1084,113 +1084,18 @@ void LocalDerivationGoal::initEnv()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
|
||||||
|
|
||||||
|
|
||||||
void LocalDerivationGoal::writeStructuredAttrs()
|
void LocalDerivationGoal::writeStructuredAttrs()
|
||||||
{
|
{
|
||||||
auto structuredAttrs = parsedDrv->getStructuredAttrs();
|
if (auto structAttrs = parsedDrv->generateStructuredAttrs(inputRewrites, worker.store, inputPaths)) {
|
||||||
if (!structuredAttrs) return;
|
auto value = structAttrs.value();
|
||||||
|
auto jsonSh = value.first;
|
||||||
|
auto json = value.second;
|
||||||
|
|
||||||
auto json = *structuredAttrs;
|
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
||||||
|
chownToBuilder(tmpDir + "/.attrs.sh");
|
||||||
/* Add an "outputs" object containing the output paths. */
|
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
||||||
nlohmann::json outputs;
|
chownToBuilder(tmpDir + "/.attrs.json");
|
||||||
for (auto & i : drv->outputs) {
|
|
||||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
|
||||||
cases where we know or don't know the output path ahead of time. */
|
|
||||||
outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites);
|
|
||||||
}
|
}
|
||||||
json["outputs"] = outputs;
|
|
||||||
|
|
||||||
/* Handle exportReferencesGraph. */
|
|
||||||
auto e = json.find("exportReferencesGraph");
|
|
||||||
if (e != json.end() && e->is_object()) {
|
|
||||||
for (auto i = e->begin(); i != e->end(); ++i) {
|
|
||||||
std::ostringstream str;
|
|
||||||
{
|
|
||||||
JSONPlaceholder jsonRoot(str, true);
|
|
||||||
StorePathSet storePaths;
|
|
||||||
for (auto & p : *i)
|
|
||||||
storePaths.insert(worker.store.parseStorePath(p.get<std::string>()));
|
|
||||||
worker.store.pathInfoToJSON(jsonRoot,
|
|
||||||
exportReferences(storePaths), false, true);
|
|
||||||
}
|
|
||||||
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
|
||||||
chownToBuilder(tmpDir + "/.attrs.json");
|
|
||||||
|
|
||||||
/* As a convenience to bash scripts, write a shell file that
|
|
||||||
maps all attributes that are representable in bash -
|
|
||||||
namely, strings, integers, nulls, Booleans, and arrays and
|
|
||||||
objects consisting entirely of those values. (So nested
|
|
||||||
arrays or objects are not supported.) */
|
|
||||||
|
|
||||||
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
|
||||||
if (value.is_string())
|
|
||||||
return shellEscape(value);
|
|
||||||
|
|
||||||
if (value.is_number()) {
|
|
||||||
auto f = value.get<float>();
|
|
||||||
if (std::ceil(f) == f)
|
|
||||||
return std::to_string(value.get<int>());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value.is_null())
|
|
||||||
return std::string("''");
|
|
||||||
|
|
||||||
if (value.is_boolean())
|
|
||||||
return value.get<bool>() ? std::string("1") : std::string("");
|
|
||||||
|
|
||||||
return {};
|
|
||||||
};
|
|
||||||
|
|
||||||
std::string jsonSh;
|
|
||||||
|
|
||||||
for (auto i = json.begin(); i != json.end(); ++i) {
|
|
||||||
|
|
||||||
if (!std::regex_match(i.key(), shVarName)) continue;
|
|
||||||
|
|
||||||
auto & value = i.value();
|
|
||||||
|
|
||||||
auto s = handleSimpleType(value);
|
|
||||||
if (s)
|
|
||||||
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
|
|
||||||
|
|
||||||
else if (value.is_array()) {
|
|
||||||
std::string s2;
|
|
||||||
bool good = true;
|
|
||||||
|
|
||||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
|
||||||
auto s3 = handleSimpleType(i.value());
|
|
||||||
if (!s3) { good = false; break; }
|
|
||||||
s2 += *s3; s2 += ' ';
|
|
||||||
}
|
|
||||||
|
|
||||||
if (good)
|
|
||||||
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (value.is_object()) {
|
|
||||||
std::string s2;
|
|
||||||
bool good = true;
|
|
||||||
|
|
||||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
|
||||||
auto s3 = handleSimpleType(i.value());
|
|
||||||
if (!s3) { good = false; break; }
|
|
||||||
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (good)
|
|
||||||
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
|
||||||
chownToBuilder(tmpDir + "/.attrs.sh");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
#include "parsed-derivations.hh"
|
#include "parsed-derivations.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <regex>
|
||||||
|
#include "json.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -121,4 +123,112 @@ bool ParsedDerivation::substitutesAllowed() const
|
||||||
return getBoolAttr("allowSubstitutes", true);
|
return getBoolAttr("allowSubstitutes", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||||
|
std::optional<StructuredAttrsWithShellRC> ParsedDerivation::generateStructuredAttrs(
|
||||||
|
std::optional<StringMap> inputRewrites, Store & store, const StorePathSet & inputPaths)
|
||||||
|
{
|
||||||
|
auto structuredAttrs = getStructuredAttrs();
|
||||||
|
if (!structuredAttrs) return std::nullopt;
|
||||||
|
|
||||||
|
auto json = *structuredAttrs;
|
||||||
|
|
||||||
|
/* Add an "outputs" object containing the output paths. */
|
||||||
|
nlohmann::json outputs;
|
||||||
|
for (auto & i : drv.outputs) {
|
||||||
|
if (inputRewrites) {
|
||||||
|
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||||
|
cases where we know or don't know the output path ahead of time. */
|
||||||
|
outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites.value());
|
||||||
|
} else {
|
||||||
|
/* This case is only relevant for the nix-shell */
|
||||||
|
outputs[i.first] = hashPlaceholder(i.first);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
json["outputs"] = outputs;
|
||||||
|
|
||||||
|
/* Handle exportReferencesGraph. */
|
||||||
|
auto e = json.find("exportReferencesGraph");
|
||||||
|
if (e != json.end() && e->is_object()) {
|
||||||
|
for (auto i = e->begin(); i != e->end(); ++i) {
|
||||||
|
std::ostringstream str;
|
||||||
|
{
|
||||||
|
JSONPlaceholder jsonRoot(str, true);
|
||||||
|
StorePathSet storePaths;
|
||||||
|
for (auto & p : *i)
|
||||||
|
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
||||||
|
store.pathInfoToJSON(jsonRoot,
|
||||||
|
store.exportReferences(storePaths, inputPaths), false, true);
|
||||||
|
}
|
||||||
|
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* As a convenience to bash scripts, write a shell file that
|
||||||
|
maps all attributes that are representable in bash -
|
||||||
|
namely, strings, integers, nulls, Booleans, and arrays and
|
||||||
|
objects consisting entirely of those values. (So nested
|
||||||
|
arrays or objects are not supported.) */
|
||||||
|
|
||||||
|
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
||||||
|
if (value.is_string())
|
||||||
|
return shellEscape(value);
|
||||||
|
|
||||||
|
if (value.is_number()) {
|
||||||
|
auto f = value.get<float>();
|
||||||
|
if (std::ceil(f) == f)
|
||||||
|
return std::to_string(value.get<int>());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.is_null())
|
||||||
|
return std::string("''");
|
||||||
|
|
||||||
|
if (value.is_boolean())
|
||||||
|
return value.get<bool>() ? std::string("1") : std::string("");
|
||||||
|
|
||||||
|
return {};
|
||||||
|
};
|
||||||
|
|
||||||
|
std::string jsonSh;
|
||||||
|
|
||||||
|
for (auto i = json.begin(); i != json.end(); ++i) {
|
||||||
|
|
||||||
|
if (!std::regex_match(i.key(), shVarName)) continue;
|
||||||
|
|
||||||
|
auto & value = i.value();
|
||||||
|
|
||||||
|
auto s = handleSimpleType(value);
|
||||||
|
if (s)
|
||||||
|
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
|
||||||
|
|
||||||
|
else if (value.is_array()) {
|
||||||
|
std::string s2;
|
||||||
|
bool good = true;
|
||||||
|
|
||||||
|
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||||
|
auto s3 = handleSimpleType(i.value());
|
||||||
|
if (!s3) { good = false; break; }
|
||||||
|
s2 += *s3; s2 += ' ';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (good)
|
||||||
|
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (value.is_object()) {
|
||||||
|
std::string s2;
|
||||||
|
bool good = true;
|
||||||
|
|
||||||
|
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||||
|
auto s3 = handleSimpleType(i.value());
|
||||||
|
if (!s3) { good = false; break; }
|
||||||
|
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (good)
|
||||||
|
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::make_pair(jsonSh, json);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
typedef std::pair<std::string, nlohmann::json> StructuredAttrsWithShellRC;
|
||||||
|
|
||||||
class ParsedDerivation
|
class ParsedDerivation
|
||||||
{
|
{
|
||||||
StorePath drvPath;
|
StorePath drvPath;
|
||||||
|
@ -36,6 +38,8 @@ public:
|
||||||
bool willBuildLocally(Store & localStore) const;
|
bool willBuildLocally(Store & localStore) const;
|
||||||
|
|
||||||
bool substitutesAllowed() const;
|
bool substitutesAllowed() const;
|
||||||
|
|
||||||
|
std::optional<StructuredAttrsWithShellRC> generateStructuredAttrs(std::optional<StringMap> inputRewrites, Store & store, const StorePathSet & inputPaths);
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -627,6 +627,42 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
StorePathSet Store::exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths)
|
||||||
|
{
|
||||||
|
StorePathSet paths;
|
||||||
|
|
||||||
|
for (auto & storePath : storePaths) {
|
||||||
|
if (!inputPaths.count(storePath))
|
||||||
|
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath));
|
||||||
|
|
||||||
|
computeFSClosure({storePath}, paths);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If there are derivations in the graph, then include their
|
||||||
|
outputs as well. This is useful if you want to do things
|
||||||
|
like passing all build-time dependencies of some path to a
|
||||||
|
derivation that builds a NixOS DVD image. */
|
||||||
|
auto paths2 = paths;
|
||||||
|
|
||||||
|
for (auto & j : paths2) {
|
||||||
|
if (j.isDerivation()) {
|
||||||
|
Derivation drv = derivationFromPath(j);
|
||||||
|
for (auto & k : drv.outputsAndOptPaths(*this)) {
|
||||||
|
if (!k.second.second)
|
||||||
|
/* FIXME: I am confused why we are calling
|
||||||
|
`computeFSClosure` on the output path, rather than
|
||||||
|
derivation itself. That doesn't seem right to me, so I
|
||||||
|
won't try to implemented this for CA derivations. */
|
||||||
|
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
||||||
|
computeFSClosure(*k.second.second, paths);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||||
bool includeImpureInfo, bool showClosureSize,
|
bool includeImpureInfo, bool showClosureSize,
|
||||||
Base hashBase,
|
Base hashBase,
|
||||||
|
|
|
@ -695,6 +695,8 @@ public:
|
||||||
|
|
||||||
const Stats & getStats();
|
const Stats & getStats();
|
||||||
|
|
||||||
|
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
||||||
|
|
||||||
/* Return the build log of the specified store path, if available,
|
/* Return the build log of the specified store path, if available,
|
||||||
or null otherwise. */
|
or null otherwise. */
|
||||||
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
#include <filesystem>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <map>
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
#include "parsed-derivations.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -422,12 +427,41 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
} else
|
} else
|
||||||
env[var.first] = var.second;
|
env[var.first] = var.second;
|
||||||
|
|
||||||
|
std::string structuredAttrsRC;
|
||||||
|
std::string exitCmd;
|
||||||
|
|
||||||
|
if (env.count("__json")) {
|
||||||
|
StorePathSet inputs;
|
||||||
|
for (auto & [depDrvPath, wantedDepOutputs] : drv.inputDrvs) {
|
||||||
|
auto outputs = store->queryPartialDerivationOutputMap(depDrvPath);
|
||||||
|
for (auto & i : wantedDepOutputs) {
|
||||||
|
auto o = outputs.at(i);
|
||||||
|
store->computeFSClosure(*o, inputs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedDerivation parsedDrv(
|
||||||
|
StorePath(store->parseStorePath(drvInfo.queryDrvPath())),
|
||||||
|
drv
|
||||||
|
);
|
||||||
|
|
||||||
|
if (auto structAttrs = parsedDrv.generateStructuredAttrs(std::nullopt, *store, inputs)) {
|
||||||
|
auto val = structAttrs.value();
|
||||||
|
structuredAttrsRC = val.first;
|
||||||
|
auto attrsJSON = std::filesystem::current_path().string() + "/.attrs.json";
|
||||||
|
writeFile(attrsJSON, val.second.dump());
|
||||||
|
exitCmd = "\n_rm_attrs_json() { rm -f " + attrsJSON + "; }"
|
||||||
|
+ "\nexitHooks+=(_rm_attrs_json)"
|
||||||
|
+ "\nfailureHooks+=(_rm_attrs_json)\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Run a shell using the derivation's environment. For
|
/* Run a shell using the derivation's environment. For
|
||||||
convenience, source $stdenv/setup to setup additional
|
convenience, source $stdenv/setup to setup additional
|
||||||
environment variables and shell functions. Also don't
|
environment variables and shell functions. Also don't
|
||||||
lose the current $PATH directories. */
|
lose the current $PATH directories. */
|
||||||
auto rcfile = (Path) tmpDir + "/rc";
|
auto rcfile = (Path) tmpDir + "/rc";
|
||||||
writeFile(rcfile, fmt(
|
std::string rc = fmt(
|
||||||
R"(_nix_shell_clean_tmpdir() { rm -rf %1%; }; )"s +
|
R"(_nix_shell_clean_tmpdir() { rm -rf %1%; }; )"s +
|
||||||
(keepTmp ?
|
(keepTmp ?
|
||||||
"trap _nix_shell_clean_tmpdir EXIT; "
|
"trap _nix_shell_clean_tmpdir EXIT; "
|
||||||
|
@ -436,8 +470,9 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
"_nix_shell_clean_tmpdir; ") +
|
"_nix_shell_clean_tmpdir; ") +
|
||||||
(pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") +
|
(pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") +
|
||||||
"%2%"
|
"%2%"
|
||||||
"dontAddDisableDepTrack=1; "
|
"dontAddDisableDepTrack=1;\n"
|
||||||
"[ -e $stdenv/setup ] && source $stdenv/setup; "
|
+ structuredAttrsRC + exitCmd +
|
||||||
|
"\n[ -e $stdenv/setup ] && source $stdenv/setup; "
|
||||||
"%3%"
|
"%3%"
|
||||||
"PATH=%4%:\"$PATH\"; "
|
"PATH=%4%:\"$PATH\"; "
|
||||||
"SHELL=%5%; "
|
"SHELL=%5%; "
|
||||||
|
@ -455,7 +490,9 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
shellEscape(dirOf(*shell)),
|
shellEscape(dirOf(*shell)),
|
||||||
shellEscape(*shell),
|
shellEscape(*shell),
|
||||||
(getenv("TZ") ? (string("export TZ=") + shellEscape(getenv("TZ")) + "; ") : ""),
|
(getenv("TZ") ? (string("export TZ=") + shellEscape(getenv("TZ")) + "; ") : ""),
|
||||||
envCommand));
|
envCommand);
|
||||||
|
vomit("Sourcing nix-shell with file %s and contents:\n%s", rcfile, rc);
|
||||||
|
writeFile(rcfile, rc);
|
||||||
|
|
||||||
Strings envStrs;
|
Strings envStrs;
|
||||||
for (auto & i : env)
|
for (auto & i : env)
|
||||||
|
|
19
tests/structured-attrs-shell.nix
Normal file
19
tests/structured-attrs-shell.nix
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
with import ./config.nix;
|
||||||
|
let
|
||||||
|
dep = mkDerivation {
|
||||||
|
name = "dep";
|
||||||
|
buildCommand = ''
|
||||||
|
mkdir $out; echo bla > $out/bla
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
in
|
||||||
|
mkDerivation {
|
||||||
|
name = "structured2";
|
||||||
|
__structuredAttrs = true;
|
||||||
|
outputs = [ "out" "dev" ];
|
||||||
|
my.list = [ "a" "b" "c" ];
|
||||||
|
exportReferencesGraph.refs = [ dep ];
|
||||||
|
buildCommand = ''
|
||||||
|
touch ''${outputs[out]}; touch ''${outputs[dev]}
|
||||||
|
'';
|
||||||
|
}
|
|
@ -8,3 +8,9 @@ nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
|
||||||
|
|
||||||
[[ $(cat $TEST_ROOT/result/foo) = bar ]]
|
[[ $(cat $TEST_ROOT/result/foo) = bar ]]
|
||||||
[[ $(cat $TEST_ROOT/result-dev/foo) = foo ]]
|
[[ $(cat $TEST_ROOT/result-dev/foo) = foo ]]
|
||||||
|
|
||||||
|
export NIX_BUILD_SHELL=$SHELL
|
||||||
|
[[ ! -e '.attrs.json' ]]
|
||||||
|
env NIX_PATH=nixpkgs=shell.nix nix-shell structured-attrs-shell.nix \
|
||||||
|
--run 'test -e .attrs.json; test "3" = "$(jq ".my.list|length" < .attrs.json)"'
|
||||||
|
[[ ! -e '.attrs.json' ]]
|
||||||
|
|
Loading…
Reference in a new issue