forked from lix-project/lix
Merge branch 'structured-attrs-shell' of https://github.com/Ma27/nix
This commit is contained in:
commit
e06c272c12
|
@ -143,7 +143,6 @@ void DerivationGoal::work()
|
||||||
(this->*state)();
|
(this->*state)();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
||||||
{
|
{
|
||||||
/* If we already want all outputs, there is nothing to do. */
|
/* If we already want all outputs, there is nothing to do. */
|
||||||
|
@ -1074,42 +1073,6 @@ HookReply DerivationGoal::tryBuildHook()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
|
||||||
{
|
|
||||||
StorePathSet paths;
|
|
||||||
|
|
||||||
for (auto & storePath : storePaths) {
|
|
||||||
if (!inputPaths.count(storePath))
|
|
||||||
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath));
|
|
||||||
|
|
||||||
worker.store.computeFSClosure({storePath}, paths);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If there are derivations in the graph, then include their
|
|
||||||
outputs as well. This is useful if you want to do things
|
|
||||||
like passing all build-time dependencies of some path to a
|
|
||||||
derivation that builds a NixOS DVD image. */
|
|
||||||
auto paths2 = paths;
|
|
||||||
|
|
||||||
for (auto & j : paths2) {
|
|
||||||
if (j.isDerivation()) {
|
|
||||||
Derivation drv = worker.store.derivationFromPath(j);
|
|
||||||
for (auto & k : drv.outputsAndOptPaths(worker.store)) {
|
|
||||||
if (!k.second.second)
|
|
||||||
/* FIXME: I am confused why we are calling
|
|
||||||
`computeFSClosure` on the output path, rather than
|
|
||||||
derivation itself. That doesn't seem right to me, so I
|
|
||||||
won't try to implemented this for CA derivations. */
|
|
||||||
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
|
||||||
worker.store.computeFSClosure(*k.second.second, paths);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::registerOutputs()
|
void DerivationGoal::registerOutputs()
|
||||||
{
|
{
|
||||||
/* When using a build hook, the build hook can register the output
|
/* When using a build hook, the build hook can register the output
|
||||||
|
|
|
@ -518,7 +518,7 @@ void LocalDerivationGoal::startBuilder()
|
||||||
/* Write closure info to <fileName>. */
|
/* Write closure info to <fileName>. */
|
||||||
writeFile(tmpDir + "/" + fileName,
|
writeFile(tmpDir + "/" + fileName,
|
||||||
worker.store.makeValidityRegistration(
|
worker.store.makeValidityRegistration(
|
||||||
exportReferences({storePath}), false, false));
|
worker.store.exportReferences({storePath}, inputPaths), false, false));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1084,113 +1084,28 @@ void LocalDerivationGoal::initEnv()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
|
||||||
|
|
||||||
|
|
||||||
void LocalDerivationGoal::writeStructuredAttrs()
|
void LocalDerivationGoal::writeStructuredAttrs()
|
||||||
{
|
{
|
||||||
auto structuredAttrs = parsedDrv->getStructuredAttrs();
|
if (auto structAttrsJson = parsedDrv->prepareStructuredAttrs(worker.store, inputPaths)) {
|
||||||
if (!structuredAttrs) return;
|
auto json = structAttrsJson.value();
|
||||||
|
nlohmann::json rewritten;
|
||||||
auto json = *structuredAttrs;
|
for (auto & [i, v] : json["outputs"].get<nlohmann::json::object_t>()) {
|
||||||
|
|
||||||
/* Add an "outputs" object containing the output paths. */
|
|
||||||
nlohmann::json outputs;
|
|
||||||
for (auto & i : drv->outputs) {
|
|
||||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||||
cases where we know or don't know the output path ahead of time. */
|
cases where we know or don't know the output path ahead of time. */
|
||||||
outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites);
|
rewritten[i] = rewriteStrings(v, inputRewrites);
|
||||||
}
|
|
||||||
json["outputs"] = outputs;
|
|
||||||
|
|
||||||
/* Handle exportReferencesGraph. */
|
|
||||||
auto e = json.find("exportReferencesGraph");
|
|
||||||
if (e != json.end() && e->is_object()) {
|
|
||||||
for (auto i = e->begin(); i != e->end(); ++i) {
|
|
||||||
std::ostringstream str;
|
|
||||||
{
|
|
||||||
JSONPlaceholder jsonRoot(str, true);
|
|
||||||
StorePathSet storePaths;
|
|
||||||
for (auto & p : *i)
|
|
||||||
storePaths.insert(worker.store.parseStorePath(p.get<std::string>()));
|
|
||||||
worker.store.pathInfoToJSON(jsonRoot,
|
|
||||||
exportReferences(storePaths), false, true);
|
|
||||||
}
|
|
||||||
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
json["outputs"] = rewritten;
|
||||||
chownToBuilder(tmpDir + "/.attrs.json");
|
|
||||||
|
|
||||||
/* As a convenience to bash scripts, write a shell file that
|
auto jsonSh = writeStructuredAttrsShell(json);
|
||||||
maps all attributes that are representable in bash -
|
|
||||||
namely, strings, integers, nulls, Booleans, and arrays and
|
|
||||||
objects consisting entirely of those values. (So nested
|
|
||||||
arrays or objects are not supported.) */
|
|
||||||
|
|
||||||
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
|
||||||
if (value.is_string())
|
|
||||||
return shellEscape(value);
|
|
||||||
|
|
||||||
if (value.is_number()) {
|
|
||||||
auto f = value.get<float>();
|
|
||||||
if (std::ceil(f) == f)
|
|
||||||
return std::to_string(value.get<int>());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (value.is_null())
|
|
||||||
return std::string("''");
|
|
||||||
|
|
||||||
if (value.is_boolean())
|
|
||||||
return value.get<bool>() ? std::string("1") : std::string("");
|
|
||||||
|
|
||||||
return {};
|
|
||||||
};
|
|
||||||
|
|
||||||
std::string jsonSh;
|
|
||||||
|
|
||||||
for (auto i = json.begin(); i != json.end(); ++i) {
|
|
||||||
|
|
||||||
if (!std::regex_match(i.key(), shVarName)) continue;
|
|
||||||
|
|
||||||
auto & value = i.value();
|
|
||||||
|
|
||||||
auto s = handleSimpleType(value);
|
|
||||||
if (s)
|
|
||||||
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
|
|
||||||
|
|
||||||
else if (value.is_array()) {
|
|
||||||
std::string s2;
|
|
||||||
bool good = true;
|
|
||||||
|
|
||||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
|
||||||
auto s3 = handleSimpleType(i.value());
|
|
||||||
if (!s3) { good = false; break; }
|
|
||||||
s2 += *s3; s2 += ' ';
|
|
||||||
}
|
|
||||||
|
|
||||||
if (good)
|
|
||||||
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (value.is_object()) {
|
|
||||||
std::string s2;
|
|
||||||
bool good = true;
|
|
||||||
|
|
||||||
for (auto i = value.begin(); i != value.end(); ++i) {
|
|
||||||
auto s3 = handleSimpleType(i.value());
|
|
||||||
if (!s3) { good = false; break; }
|
|
||||||
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (good)
|
|
||||||
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
|
||||||
chownToBuilder(tmpDir + "/.attrs.sh");
|
chownToBuilder(tmpDir + "/.attrs.sh");
|
||||||
|
env["NIX_ATTRS_SH_FILE"] = tmpDir + "/.attrs.sh";
|
||||||
|
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
|
||||||
|
chownToBuilder(tmpDir + "/.attrs.json");
|
||||||
|
env["NIX_ATTRS_JSON_FILE"] = tmpDir + "/.attrs.json";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
#include "parsed-derivations.hh"
|
#include "parsed-derivations.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <regex>
|
||||||
|
#include "json.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -123,4 +125,109 @@ bool ParsedDerivation::substitutesAllowed() const
|
||||||
return getBoolAttr("allowSubstitutes", true);
|
return getBoolAttr("allowSubstitutes", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||||
|
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
|
||||||
|
{
|
||||||
|
auto structuredAttrs = getStructuredAttrs();
|
||||||
|
if (!structuredAttrs) return std::nullopt;
|
||||||
|
|
||||||
|
auto json = *structuredAttrs;
|
||||||
|
|
||||||
|
/* Add an "outputs" object containing the output paths. */
|
||||||
|
nlohmann::json outputs;
|
||||||
|
for (auto & i : drv.outputs) {
|
||||||
|
outputs[i.first] = hashPlaceholder(i.first);
|
||||||
|
}
|
||||||
|
json["outputs"] = outputs;
|
||||||
|
|
||||||
|
/* Handle exportReferencesGraph. */
|
||||||
|
auto e = json.find("exportReferencesGraph");
|
||||||
|
if (e != json.end() && e->is_object()) {
|
||||||
|
for (auto i = e->begin(); i != e->end(); ++i) {
|
||||||
|
std::ostringstream str;
|
||||||
|
{
|
||||||
|
JSONPlaceholder jsonRoot(str, true);
|
||||||
|
StorePathSet storePaths;
|
||||||
|
for (auto & p : *i)
|
||||||
|
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
||||||
|
store.pathInfoToJSON(jsonRoot,
|
||||||
|
store.exportReferences(storePaths, inputPaths), false, true);
|
||||||
|
}
|
||||||
|
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* As a convenience to bash scripts, write a shell file that
|
||||||
|
maps all attributes that are representable in bash -
|
||||||
|
namely, strings, integers, nulls, Booleans, and arrays and
|
||||||
|
objects consisting entirely of those values. (So nested
|
||||||
|
arrays or objects are not supported.) */
|
||||||
|
std::string writeStructuredAttrsShell(nlohmann::json & json)
|
||||||
|
{
|
||||||
|
|
||||||
|
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
||||||
|
if (value.is_string())
|
||||||
|
return shellEscape(value);
|
||||||
|
|
||||||
|
if (value.is_number()) {
|
||||||
|
auto f = value.get<float>();
|
||||||
|
if (std::ceil(f) == f)
|
||||||
|
return std::to_string(value.get<int>());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.is_null())
|
||||||
|
return std::string("''");
|
||||||
|
|
||||||
|
if (value.is_boolean())
|
||||||
|
return value.get<bool>() ? std::string("1") : std::string("");
|
||||||
|
|
||||||
|
return {};
|
||||||
|
};
|
||||||
|
|
||||||
|
std::string jsonSh;
|
||||||
|
|
||||||
|
for (auto i = json.begin(); i != json.end(); ++i) {
|
||||||
|
|
||||||
|
if (!std::regex_match(i.key(), shVarName)) continue;
|
||||||
|
|
||||||
|
auto & value = i.value();
|
||||||
|
|
||||||
|
auto s = handleSimpleType(value);
|
||||||
|
if (s)
|
||||||
|
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
|
||||||
|
|
||||||
|
else if (value.is_array()) {
|
||||||
|
std::string s2;
|
||||||
|
bool good = true;
|
||||||
|
|
||||||
|
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||||
|
auto s3 = handleSimpleType(i.value());
|
||||||
|
if (!s3) { good = false; break; }
|
||||||
|
s2 += *s3; s2 += ' ';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (good)
|
||||||
|
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (value.is_object()) {
|
||||||
|
std::string s2;
|
||||||
|
bool good = true;
|
||||||
|
|
||||||
|
for (auto i = value.begin(); i != value.end(); ++i) {
|
||||||
|
auto s3 = handleSimpleType(i.value());
|
||||||
|
if (!s3) { good = false; break; }
|
||||||
|
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (good)
|
||||||
|
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonSh;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,10 @@ public:
|
||||||
bool willBuildLocally(Store & localStore) const;
|
bool willBuildLocally(Store & localStore) const;
|
||||||
|
|
||||||
bool substitutesAllowed() const;
|
bool substitutesAllowed() const;
|
||||||
|
|
||||||
|
std::optional<nlohmann::json> prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
std::string writeStructuredAttrsShell(nlohmann::json & json);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -634,6 +634,42 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
StorePathSet Store::exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths)
|
||||||
|
{
|
||||||
|
StorePathSet paths;
|
||||||
|
|
||||||
|
for (auto & storePath : storePaths) {
|
||||||
|
if (!inputPaths.count(storePath))
|
||||||
|
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath));
|
||||||
|
|
||||||
|
computeFSClosure({storePath}, paths);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If there are derivations in the graph, then include their
|
||||||
|
outputs as well. This is useful if you want to do things
|
||||||
|
like passing all build-time dependencies of some path to a
|
||||||
|
derivation that builds a NixOS DVD image. */
|
||||||
|
auto paths2 = paths;
|
||||||
|
|
||||||
|
for (auto & j : paths2) {
|
||||||
|
if (j.isDerivation()) {
|
||||||
|
Derivation drv = derivationFromPath(j);
|
||||||
|
for (auto & k : drv.outputsAndOptPaths(*this)) {
|
||||||
|
if (!k.second.second)
|
||||||
|
/* FIXME: I am confused why we are calling
|
||||||
|
`computeFSClosure` on the output path, rather than
|
||||||
|
derivation itself. That doesn't seem right to me, so I
|
||||||
|
won't try to implemented this for CA derivations. */
|
||||||
|
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
|
||||||
|
computeFSClosure(*k.second.second, paths);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||||
bool includeImpureInfo, bool showClosureSize,
|
bool includeImpureInfo, bool showClosureSize,
|
||||||
Base hashBase,
|
Base hashBase,
|
||||||
|
|
|
@ -697,6 +697,11 @@ public:
|
||||||
|
|
||||||
const Stats & getStats();
|
const Stats & getStats();
|
||||||
|
|
||||||
|
/* Computes the full closure of of a set of store-paths for e.g.
|
||||||
|
derivations that need this information for `exportReferencesGraph`.
|
||||||
|
*/
|
||||||
|
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
||||||
|
|
||||||
/* Return the build log of the specified store path, if available,
|
/* Return the build log of the specified store path, if available,
|
||||||
or null otherwise. */
|
or null otherwise. */
|
||||||
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
#include <filesystem>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <map>
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
#include "parsed-derivations.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -428,12 +433,45 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
} else
|
} else
|
||||||
env[var.first] = var.second;
|
env[var.first] = var.second;
|
||||||
|
|
||||||
|
std::string structuredAttrsRC;
|
||||||
|
|
||||||
|
if (env.count("__json")) {
|
||||||
|
StorePathSet inputs;
|
||||||
|
for (auto & [depDrvPath, wantedDepOutputs] : drv.inputDrvs) {
|
||||||
|
auto outputs = store->queryPartialDerivationOutputMap(depDrvPath);
|
||||||
|
for (auto & i : wantedDepOutputs) {
|
||||||
|
auto o = outputs.at(i);
|
||||||
|
store->computeFSClosure(*o, inputs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedDerivation parsedDrv(
|
||||||
|
StorePath(store->parseStorePath(drvInfo.queryDrvPath())),
|
||||||
|
drv
|
||||||
|
);
|
||||||
|
|
||||||
|
if (auto structAttrs = parsedDrv.prepareStructuredAttrs(*store, inputs)) {
|
||||||
|
auto json = structAttrs.value();
|
||||||
|
structuredAttrsRC = writeStructuredAttrsShell(json);
|
||||||
|
|
||||||
|
auto attrsJSON = (Path) tmpDir + "/.attrs.json";
|
||||||
|
writeFile(attrsJSON, json.dump());
|
||||||
|
|
||||||
|
auto attrsSH = (Path) tmpDir + "/.attrs.sh";
|
||||||
|
writeFile(attrsSH, structuredAttrsRC);
|
||||||
|
|
||||||
|
env["NIX_ATTRS_SH_FILE"] = attrsSH;
|
||||||
|
env["NIX_ATTRS_JSON_FILE"] = attrsJSON;
|
||||||
|
keepTmp = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Run a shell using the derivation's environment. For
|
/* Run a shell using the derivation's environment. For
|
||||||
convenience, source $stdenv/setup to setup additional
|
convenience, source $stdenv/setup to setup additional
|
||||||
environment variables and shell functions. Also don't
|
environment variables and shell functions. Also don't
|
||||||
lose the current $PATH directories. */
|
lose the current $PATH directories. */
|
||||||
auto rcfile = (Path) tmpDir + "/rc";
|
auto rcfile = (Path) tmpDir + "/rc";
|
||||||
writeFile(rcfile, fmt(
|
std::string rc = fmt(
|
||||||
R"(_nix_shell_clean_tmpdir() { rm -rf %1%; }; )"s +
|
R"(_nix_shell_clean_tmpdir() { rm -rf %1%; }; )"s +
|
||||||
(keepTmp ?
|
(keepTmp ?
|
||||||
"trap _nix_shell_clean_tmpdir EXIT; "
|
"trap _nix_shell_clean_tmpdir EXIT; "
|
||||||
|
@ -442,8 +480,9 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
"_nix_shell_clean_tmpdir; ") +
|
"_nix_shell_clean_tmpdir; ") +
|
||||||
(pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") +
|
(pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") +
|
||||||
"%2%"
|
"%2%"
|
||||||
"dontAddDisableDepTrack=1; "
|
"dontAddDisableDepTrack=1;\n"
|
||||||
"[ -e $stdenv/setup ] && source $stdenv/setup; "
|
+ structuredAttrsRC +
|
||||||
|
"\n[ -e $stdenv/setup ] && source $stdenv/setup; "
|
||||||
"%3%"
|
"%3%"
|
||||||
"PATH=%4%:\"$PATH\"; "
|
"PATH=%4%:\"$PATH\"; "
|
||||||
"SHELL=%5%; "
|
"SHELL=%5%; "
|
||||||
|
@ -461,7 +500,9 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
shellEscape(dirOf(*shell)),
|
shellEscape(dirOf(*shell)),
|
||||||
shellEscape(*shell),
|
shellEscape(*shell),
|
||||||
(getenv("TZ") ? (string("export TZ=") + shellEscape(getenv("TZ")) + "; ") : ""),
|
(getenv("TZ") ? (string("export TZ=") + shellEscape(getenv("TZ")) + "; ") : ""),
|
||||||
envCommand));
|
envCommand);
|
||||||
|
vomit("Sourcing nix-shell with file %s and contents:\n%s", rcfile, rc);
|
||||||
|
writeFile(rcfile, rc);
|
||||||
|
|
||||||
Strings envStrs;
|
Strings envStrs;
|
||||||
for (auto & i : env)
|
for (auto & i : env)
|
||||||
|
|
|
@ -147,6 +147,10 @@ struct BuildEnvironment
|
||||||
return tokenizeString<Array>(str->value);
|
return tokenizeString<Array>(str->value);
|
||||||
else if (auto arr = std::get_if<Array>(&value)) {
|
else if (auto arr = std::get_if<Array>(&value)) {
|
||||||
return *arr;
|
return *arr;
|
||||||
|
} else if (auto assoc = std::get_if<Associative>(&value)) {
|
||||||
|
Array assocKeys;
|
||||||
|
std::for_each(assoc->begin(), assoc->end(), [&](auto & n) { assocKeys.push_back(n.first); });
|
||||||
|
return assocKeys;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
throw Error("bash variable is not a string or array");
|
throw Error("bash variable is not a string or array");
|
||||||
|
|
|
@ -111,12 +111,20 @@ __escapeString() {
|
||||||
printf '"%s"' "$__s"
|
printf '"%s"' "$__s"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Dump the bash environment as JSON.
|
# In case of `__structuredAttrs = true;` the list of outputs is an associative
|
||||||
for __output in $outputs; do
|
# array with a format like `outname => /nix/store/hash-drvname-outname`, so `__olist`
|
||||||
|
# must contain the array's keys (hence `${!...[@]}`) in this case.
|
||||||
|
if [ -e .attrs.sh ]; then
|
||||||
|
__olist="${!outputs[@]}"
|
||||||
|
else
|
||||||
|
__olist=$outputs
|
||||||
|
fi
|
||||||
|
|
||||||
|
for __output in $__olist; do
|
||||||
if [[ -z $__done ]]; then
|
if [[ -z $__done ]]; then
|
||||||
__dumpEnv > ${!__output}
|
__dumpEnv > ${!__output}
|
||||||
__done=1
|
__done=1
|
||||||
else
|
else
|
||||||
echo -n >> ${!__output}
|
echo -n >> "${!__output}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
|
@ -20,6 +20,15 @@ let pkgs = rec {
|
||||||
for pkg in $buildInputs; do
|
for pkg in $buildInputs; do
|
||||||
export PATH=$PATH:$pkg/bin
|
export PATH=$PATH:$pkg/bin
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# mimic behavior of stdenv for `$out` etc. for structured attrs.
|
||||||
|
if [ -n "''${NIX_ATTRS_SH_FILE}" ]; then
|
||||||
|
for o in "''${!outputs[@]}"; do
|
||||||
|
eval "''${o}=''${outputs[$o]}"
|
||||||
|
export "''${o}"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
declare -a arr1=(1 2 "3 4" 5)
|
declare -a arr1=(1 2 "3 4" 5)
|
||||||
declare -a arr2=(x $'\n' $'x\ny')
|
declare -a arr2=(x $'\n' $'x\ny')
|
||||||
fun() {
|
fun() {
|
||||||
|
|
21
tests/structured-attrs-shell.nix
Normal file
21
tests/structured-attrs-shell.nix
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
with import ./config.nix;
|
||||||
|
let
|
||||||
|
dep = mkDerivation {
|
||||||
|
name = "dep";
|
||||||
|
buildCommand = ''
|
||||||
|
mkdir $out; echo bla > $out/bla
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
inherit (import ./shell.nix { inNixShell = true; }) stdenv;
|
||||||
|
in
|
||||||
|
mkDerivation {
|
||||||
|
name = "structured2";
|
||||||
|
__structuredAttrs = true;
|
||||||
|
inherit stdenv;
|
||||||
|
outputs = [ "out" "dev" ];
|
||||||
|
my.list = [ "a" "b" "c" ];
|
||||||
|
exportReferencesGraph.refs = [ dep ];
|
||||||
|
buildCommand = ''
|
||||||
|
touch ''${outputs[out]}; touch ''${outputs[dev]}
|
||||||
|
'';
|
||||||
|
}
|
|
@ -36,7 +36,7 @@ mkDerivation {
|
||||||
echo bar > $dest
|
echo bar > $dest
|
||||||
echo foo > $dest2
|
echo foo > $dest2
|
||||||
|
|
||||||
json=$(cat .attrs.json)
|
json=$(cat $NIX_ATTRS_JSON_FILE)
|
||||||
[[ $json =~ '"narHash":"sha256:1r7yc43zqnzl5b0als5vnyp649gk17i37s7mj00xr8kc47rjcybk"' ]]
|
[[ $json =~ '"narHash":"sha256:1r7yc43zqnzl5b0als5vnyp649gk17i37s7mj00xr8kc47rjcybk"' ]]
|
||||||
[[ $json =~ '"narSize":288' ]]
|
[[ $json =~ '"narSize":288' ]]
|
||||||
[[ $json =~ '"closureSize":288' ]]
|
[[ $json =~ '"closureSize":288' ]]
|
||||||
|
|
|
@ -8,3 +8,12 @@ nix-build structured-attrs.nix -A all -o $TEST_ROOT/result
|
||||||
|
|
||||||
[[ $(cat $TEST_ROOT/result/foo) = bar ]]
|
[[ $(cat $TEST_ROOT/result/foo) = bar ]]
|
||||||
[[ $(cat $TEST_ROOT/result-dev/foo) = foo ]]
|
[[ $(cat $TEST_ROOT/result-dev/foo) = foo ]]
|
||||||
|
|
||||||
|
export NIX_BUILD_SHELL=$SHELL
|
||||||
|
env NIX_PATH=nixpkgs=shell.nix nix-shell structured-attrs-shell.nix \
|
||||||
|
--run 'test -e .attrs.json; test "3" = "$(jq ".my.list|length" < $NIX_ATTRS_JSON_FILE)"'
|
||||||
|
|
||||||
|
# `nix develop` is a slightly special way of dealing with environment vars, it parses
|
||||||
|
# these from a shell-file exported from a derivation. This is to test especially `outputs`
|
||||||
|
# (which is an associative array in thsi case) being fine.
|
||||||
|
nix develop -f structured-attrs-shell.nix -c bash -c 'test -n "$out"'
|
||||||
|
|
Loading…
Reference in a new issue