forked from lix-project/lix
Merge branch 'master' into error-format
This commit is contained in:
commit
216263c36f
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -47,7 +47,7 @@ perl/Makefile.config
|
|||
/src/libexpr/nix.tbl
|
||||
|
||||
# /src/libstore/
|
||||
/src/libstore/*.gen.hh
|
||||
*.gen.*
|
||||
|
||||
/src/nix/nix
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@ prefix = @prefix@
|
|||
sandbox_shell = @sandbox_shell@
|
||||
storedir = @storedir@
|
||||
sysconfdir = @sysconfdir@
|
||||
system = @system@
|
||||
doc_generate = @doc_generate@
|
||||
xmllint = @xmllint@
|
||||
xsltproc = @xsltproc@
|
||||
|
|
|
@ -360,7 +360,6 @@ EOF
|
|||
<arg choice='plain'><option>--print-roots</option></arg>
|
||||
<arg choice='plain'><option>--print-live</option></arg>
|
||||
<arg choice='plain'><option>--print-dead</option></arg>
|
||||
<arg choice='plain'><option>--delete</option></arg>
|
||||
</group>
|
||||
<arg><option>--max-freed</option> <replaceable>bytes</replaceable></arg>
|
||||
</cmdsynopsis>
|
||||
|
@ -407,14 +406,6 @@ the Nix store not reachable via file system references from a set of
|
|||
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry><term><option>--delete</option></term>
|
||||
|
||||
<listitem><para>This operation performs an actual garbage
|
||||
collection. All dead paths are removed from the
|
||||
store. This is the default.</para></listitem>
|
||||
|
||||
</varlistentry>
|
||||
|
||||
</variablelist>
|
||||
|
||||
<para>By default, all unreachable paths are deleted. The following
|
||||
|
@ -444,10 +435,10 @@ and <link
|
|||
linkend="conf-keep-derivations"><literal>keep-derivations</literal></link>
|
||||
variables in the Nix configuration file.</para>
|
||||
|
||||
<para>With <option>--delete</option>, the collector prints the total
|
||||
number of freed bytes when it finishes (or when it is interrupted).
|
||||
With <option>--print-dead</option>, it prints the number of bytes that
|
||||
would be freed.</para>
|
||||
<para>By default, the collector prints the total number of freed bytes
|
||||
when it finishes (or when it is interrupted). With
|
||||
<option>--print-dead</option>, it prints the number of bytes that would
|
||||
be freed.</para>
|
||||
|
||||
</refsection>
|
||||
|
||||
|
@ -1148,7 +1139,7 @@ the information that Nix considers important. For instance,
|
|||
timestamps are elided because all files in the Nix store have their
|
||||
timestamp set to 0 anyway. Likewise, all permissions are left out
|
||||
except for the execute bit, because all files in the Nix store have
|
||||
644 or 755 permission.</para>
|
||||
444 or 555 permission.</para>
|
||||
|
||||
<para>Also, a NAR archive is <emphasis>canonical</emphasis>, meaning
|
||||
that “equal” paths always produce the same NAR archive. For instance,
|
||||
|
|
4
local.mk
4
local.mk
|
@ -6,9 +6,11 @@ dist-files += configure config.h.in perl/configure
|
|||
|
||||
clean-files += Makefile.config
|
||||
|
||||
GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I src/libexpr -I src/nix -Wno-deprecated-declarations
|
||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
|
||||
|
||||
$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
||||
|
||||
$(GCH) $(PCH): src/libutil/util.hh config.h
|
||||
|
||||
GCH_CXXFLAGS = -I src/libutil
|
||||
|
|
|
@ -8,14 +8,14 @@ GCH = $(buildprefix)precompiled-headers.h.gch
|
|||
$(GCH): precompiled-headers.h
|
||||
@rm -f $@
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS)
|
||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
|
||||
|
||||
PCH = $(buildprefix)precompiled-headers.h.pch
|
||||
|
||||
$(PCH): precompiled-headers.h
|
||||
@rm -f $@
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS)
|
||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
|
||||
|
||||
clean-files += $(GCH) $(PCH)
|
||||
|
||||
|
|
94
release.nix
94
release.nix
|
@ -12,52 +12,64 @@ let
|
|||
builtins.readFile ./.version
|
||||
+ (if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}");
|
||||
|
||||
# Create a "vendor" directory that contains the crates listed in
|
||||
# Cargo.lock. This allows Nix to be built without network access.
|
||||
vendoredCrates' =
|
||||
let
|
||||
lockFile = builtins.fromTOML (builtins.readFile nix-rust/Cargo.lock);
|
||||
|
||||
files = map (pkg: import <nix/fetchurl.nix> {
|
||||
url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download";
|
||||
sha256 = lockFile.metadata."checksum ${pkg.name} ${pkg.version} (registry+https://github.com/rust-lang/crates.io-index)";
|
||||
}) (builtins.filter (pkg: pkg.source or "" == "registry+https://github.com/rust-lang/crates.io-index") lockFile.package);
|
||||
|
||||
in pkgs.runCommand "cargo-vendor-dir" {}
|
||||
''
|
||||
mkdir -p $out/vendor
|
||||
|
||||
cat > $out/vendor/config <<EOF
|
||||
[source.crates-io]
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source.vendored-sources]
|
||||
directory = "vendor"
|
||||
EOF
|
||||
|
||||
${toString (builtins.map (file: ''
|
||||
mkdir $out/vendor/tmp
|
||||
tar xvf ${file} -C $out/vendor/tmp
|
||||
dir=$(echo $out/vendor/tmp/*)
|
||||
|
||||
# Add just enough metadata to keep Cargo happy.
|
||||
printf '{"files":{},"package":"${file.outputHash}"}' > "$dir/.cargo-checksum.json"
|
||||
|
||||
# Clean up some cruft from the winapi crates. FIXME: find
|
||||
# a way to remove winapi* from our dependencies.
|
||||
if [[ $dir =~ /winapi ]]; then
|
||||
find $dir -name "*.a" -print0 | xargs -0 rm -f --
|
||||
fi
|
||||
|
||||
mv "$dir" $out/vendor/
|
||||
|
||||
rm -rf $out/vendor/tmp
|
||||
'') files)}
|
||||
'';
|
||||
|
||||
jobs = rec {
|
||||
|
||||
# Create a "vendor" directory that contains the crates listed in
|
||||
# Cargo.lock. This allows Nix to be built without network access.
|
||||
vendoredCrates =
|
||||
let
|
||||
lockFile = builtins.fromTOML (builtins.readFile nix-rust/Cargo.lock);
|
||||
|
||||
files = map (pkg: import <nix/fetchurl.nix> {
|
||||
url = "https://crates.io/api/v1/crates/${pkg.name}/${pkg.version}/download";
|
||||
sha256 = lockFile.metadata."checksum ${pkg.name} ${pkg.version} (registry+https://github.com/rust-lang/crates.io-index)";
|
||||
}) (builtins.filter (pkg: pkg.source or "" == "registry+https://github.com/rust-lang/crates.io-index") lockFile.package);
|
||||
|
||||
in pkgs.runCommand "cargo-vendor-dir" {}
|
||||
with pkgs;
|
||||
runCommand "vendored-crates" {}
|
||||
''
|
||||
mkdir -p $out/vendor
|
||||
|
||||
cat > $out/vendor/config <<EOF
|
||||
[source.crates-io]
|
||||
replace-with = "vendored-sources"
|
||||
|
||||
[source.vendored-sources]
|
||||
directory = "vendor"
|
||||
EOF
|
||||
|
||||
${toString (builtins.map (file: ''
|
||||
mkdir $out/vendor/tmp
|
||||
tar xvf ${file} -C $out/vendor/tmp
|
||||
dir=$(echo $out/vendor/tmp/*)
|
||||
|
||||
# Add just enough metadata to keep Cargo happy.
|
||||
printf '{"files":{},"package":"${file.outputHash}"}' > "$dir/.cargo-checksum.json"
|
||||
|
||||
# Clean up some cruft from the winapi crates. FIXME: find
|
||||
# a way to remove winapi* from our dependencies.
|
||||
if [[ $dir =~ /winapi ]]; then
|
||||
find $dir -name "*.a" -print0 | xargs -0 rm -f --
|
||||
fi
|
||||
|
||||
mv "$dir" $out/vendor/
|
||||
|
||||
rm -rf $out/vendor/tmp
|
||||
'') files)}
|
||||
mkdir -p $out/nix-support
|
||||
name=nix-vendored-crates-${version}
|
||||
fn=$out/$name.tar.xz
|
||||
tar cvfJ $fn -C ${vendoredCrates'} vendor \
|
||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||
--transform "s,vendor,$name,"
|
||||
echo "file crates-tarball $fn" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
|
||||
build = pkgs.lib.genAttrs systems (system:
|
||||
|
||||
let pkgs = import nixpkgs { inherit system; }; in
|
||||
|
@ -89,7 +101,7 @@ let
|
|||
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||
''}
|
||||
|
||||
ln -sfn ${vendoredCrates}/vendor/ nix-rust/vendor
|
||||
ln -sfn ${vendoredCrates'}/vendor/ nix-rust/vendor
|
||||
|
||||
(cd perl; autoreconf --install --force --verbose)
|
||||
'';
|
||||
|
|
|
@ -13,12 +13,12 @@ set -o pipefail
|
|||
# however tracking which bits came from which would be impossible.
|
||||
|
||||
readonly ESC='\033[0m'
|
||||
readonly BOLD='\033[38;1m'
|
||||
readonly BLUE='\033[38;34m'
|
||||
readonly BLUE_UL='\033[38;4;34m'
|
||||
readonly GREEN='\033[38;32m'
|
||||
readonly GREEN_UL='\033[38;4;32m'
|
||||
readonly RED='\033[38;31m'
|
||||
readonly BOLD='\033[1m'
|
||||
readonly BLUE='\033[34m'
|
||||
readonly BLUE_UL='\033[4;34m'
|
||||
readonly GREEN='\033[32m'
|
||||
readonly GREEN_UL='\033[4;32m'
|
||||
readonly RED='\033[31m'
|
||||
|
||||
readonly NIX_USER_COUNT="32"
|
||||
readonly NIX_BUILD_GROUP_ID="30000"
|
||||
|
@ -567,7 +567,7 @@ install_from_extracted_nix() {
|
|||
cd "$EXTRACTED_NIX_PATH"
|
||||
|
||||
_sudo "to copy the basic Nix files to the new store at $NIX_ROOT/store" \
|
||||
rsync -rlpt ./store/* "$NIX_ROOT/store/"
|
||||
rsync -rlpt --chmod=-w ./store/* "$NIX_ROOT/store/"
|
||||
|
||||
if [ -d "$NIX_INSTALLED_NIX" ]; then
|
||||
echo " Alright! We have our first nix at $NIX_INSTALLED_NIX"
|
||||
|
|
|
@ -36,6 +36,7 @@ tarball="$tmpDir/$(basename "$tmpDir/nix-@nixVersion@-$system.tar.xz")"
|
|||
|
||||
require_util curl "download the binary tarball"
|
||||
require_util tar "unpack the binary tarball"
|
||||
require_util xz "unpack the binary tarball"
|
||||
|
||||
echo "downloading Nix @nixVersion@ binary tarball for $system from '$url' to '$tmpDir'..."
|
||||
curl -L "$url" -o "$tarball" || oops "failed to download '$url'"
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "derivations.hh"
|
||||
#include "local-store.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
|
|
|
@ -32,15 +32,13 @@ static Strings parseAttrPath(const string & s)
|
|||
}
|
||||
|
||||
|
||||
Value * findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn)
|
||||
{
|
||||
Strings tokens = parseAttrPath(attrPath);
|
||||
|
||||
Error attrError =
|
||||
Error(format("attribute selection path '%1%' does not match expression") % attrPath);
|
||||
|
||||
Value * v = &vIn;
|
||||
Pos pos = noPos;
|
||||
|
||||
for (auto & attr : tokens) {
|
||||
|
||||
|
@ -70,8 +68,9 @@ Value * findAlongAttrPath(EvalState & state, const string & attrPath,
|
|||
|
||||
Bindings::iterator a = v->attrs->find(state.symbols.create(attr));
|
||||
if (a == v->attrs->end())
|
||||
throw Error(format("attribute '%1%' in selection path '%2%' not found") % attr % attrPath);
|
||||
throw AttrPathNotFound("attribute '%1%' in selection path '%2%' not found", attr, attrPath);
|
||||
v = &*a->value;
|
||||
pos = *a->pos;
|
||||
}
|
||||
|
||||
else if (apType == apIndex) {
|
||||
|
@ -82,14 +81,15 @@ Value * findAlongAttrPath(EvalState & state, const string & attrPath,
|
|||
% attrPath % showType(*v));
|
||||
|
||||
if (attrIndex >= v->listSize())
|
||||
throw Error(format("list index %1% in selection path '%2%' is out of range") % attrIndex % attrPath);
|
||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", attrIndex, attrPath);
|
||||
|
||||
v = v->listElems()[attrIndex];
|
||||
pos = noPos;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return v;
|
||||
return {v, pos};
|
||||
}
|
||||
|
||||
|
||||
|
@ -98,9 +98,9 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
|
|||
Value * v2;
|
||||
try {
|
||||
auto dummyArgs = state.allocBindings(0);
|
||||
v2 = findAlongAttrPath(state, "meta.position", *dummyArgs, v);
|
||||
v2 = findAlongAttrPath(state, "meta.position", *dummyArgs, v).first;
|
||||
} catch (Error &) {
|
||||
throw Error("package '%s' has no source location information", what);
|
||||
throw NoPositionInfo("package '%s' has no source location information", what);
|
||||
}
|
||||
|
||||
// FIXME: is it possible to extract the Pos object instead of doing this
|
||||
|
|
|
@ -7,7 +7,10 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Value * findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
MakeError(AttrPathNotFound, Error);
|
||||
MakeError(NoPositionInfo, Error);
|
||||
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn);
|
||||
|
||||
/* Heuristic to find the filename and lineno or a nix value. */
|
||||
|
|
|
@ -43,6 +43,12 @@ Value * EvalState::allocAttr(Value & vAttrs, const Symbol & name)
|
|||
}
|
||||
|
||||
|
||||
Value * EvalState::allocAttr(Value & vAttrs, const std::string & name)
|
||||
{
|
||||
return allocAttr(vAttrs, symbols.create(name));
|
||||
}
|
||||
|
||||
|
||||
void Bindings::sort()
|
||||
{
|
||||
std::sort(begin(), end());
|
||||
|
|
|
@ -272,6 +272,7 @@ public:
|
|||
Env & allocEnv(size_t size);
|
||||
|
||||
Value * allocAttr(Value & vAttrs, const Symbol & name);
|
||||
Value * allocAttr(Value & vAttrs, const std::string & name);
|
||||
|
||||
Bindings * allocBindings(size_t capacity);
|
||||
|
||||
|
@ -367,7 +368,7 @@ struct EvalSettings : Config
|
|||
"Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
|
||||
|
||||
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
||||
"Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)"};
|
||||
"Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
|
||||
};
|
||||
|
||||
extern EvalSettings evalSettings;
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "function-trace.hh"
|
||||
#include "logging.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@ libexpr_DIR := $(d)
|
|||
|
||||
libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc
|
||||
|
||||
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain -I src/libexpr
|
||||
|
||||
libexpr_LIBS = libutil libstore libnixrust
|
||||
|
||||
libexpr_LDFLAGS =
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#include "primops.hh"
|
||||
#include "eval-inline.hh"
|
||||
|
||||
#include "cpptoml/cpptoml.h"
|
||||
#include "../../cpptoml/cpptoml.h"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@ libmain_DIR := $(d)
|
|||
|
||||
libmain_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libmain_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
libmain_LDFLAGS = $(OPENSSL_LIBS)
|
||||
|
||||
libmain_LIBS = libstore libutil
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "archive.hh"
|
||||
#include "affinity.hh"
|
||||
#include "builtins.hh"
|
||||
#include "builtins/buildenv.hh"
|
||||
#include "download.hh"
|
||||
#include "finally.hh"
|
||||
#include "compression.hh"
|
||||
|
@ -1397,7 +1398,7 @@ void DerivationGoal::tryToBuild()
|
|||
few seconds and then retry this goal. */
|
||||
PathSet lockFiles;
|
||||
for (auto & outPath : drv->outputPaths())
|
||||
lockFiles.insert(worker.store.toRealPath(worker.store.printStorePath(outPath)));
|
||||
lockFiles.insert(worker.store.Store::toRealPath(outPath));
|
||||
|
||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||
worker.waitForAWhile(shared_from_this());
|
||||
|
@ -1428,7 +1429,7 @@ void DerivationGoal::tryToBuild()
|
|||
for (auto & i : drv->outputs) {
|
||||
if (worker.store.isValidPath(i.second.path)) continue;
|
||||
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path));
|
||||
deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second.path)));
|
||||
deletePath(worker.store.Store::toRealPath(i.second.path));
|
||||
}
|
||||
|
||||
/* Don't do a remote build if the derivation has the attribute
|
||||
|
@ -1685,7 +1686,7 @@ void DerivationGoal::buildDone()
|
|||
|
||||
/* Delete unused redirected outputs (when doing hash rewriting). */
|
||||
for (auto & i : redirectedOutputs)
|
||||
deletePath(worker.store.toRealPath(worker.store.printStorePath(i.second)));
|
||||
deletePath(worker.store.Store::toRealPath(i.second));
|
||||
|
||||
/* Delete the chroot (if we were using one). */
|
||||
autoDelChroot.reset(); /* this runs the destructor */
|
||||
|
@ -1904,7 +1905,7 @@ void DerivationGoal::startBuilder()
|
|||
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
|
||||
worker.store.printStorePath(drvPath),
|
||||
settings.thisSystem,
|
||||
concatStringsSep(", ", settings.systemFeatures));
|
||||
concatStringsSep<StringSet>(", ", settings.systemFeatures));
|
||||
|
||||
if (drv->isBuiltin())
|
||||
preloadNSS();
|
||||
|
@ -2071,7 +2072,7 @@ void DerivationGoal::startBuilder()
|
|||
environment using bind-mounts. We put it in the Nix store
|
||||
to ensure that we can create hard-links to non-directory
|
||||
inputs in the fake Nix store in the chroot (see below). */
|
||||
chrootRootDir = worker.store.toRealPath(worker.store.printStorePath(drvPath)) + ".chroot";
|
||||
chrootRootDir = worker.store.Store::toRealPath(drvPath) + ".chroot";
|
||||
deletePath(chrootRootDir);
|
||||
|
||||
/* Clean up the chroot directory automatically. */
|
||||
|
@ -2464,7 +2465,7 @@ void DerivationGoal::initTmpDir() {
|
|||
auto hash = hashString(htSHA256, i.first);
|
||||
string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
Path p = tmpDir + "/" + fn;
|
||||
writeFile(p, i.second);
|
||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||
chownToBuilder(p);
|
||||
env[i.first + "Path"] = tmpDirInSandbox + "/" + fn;
|
||||
}
|
||||
|
@ -2550,7 +2551,7 @@ static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
|||
|
||||
void DerivationGoal::writeStructuredAttrs()
|
||||
{
|
||||
auto & structuredAttrs = parsedDrv->getStructuredAttrs();
|
||||
auto structuredAttrs = parsedDrv->getStructuredAttrs();
|
||||
if (!structuredAttrs) return;
|
||||
|
||||
auto json = *structuredAttrs;
|
||||
|
@ -2916,7 +2917,7 @@ void DerivationGoal::addDependency(const StorePath & path)
|
|||
|
||||
#if __linux__
|
||||
|
||||
Path source = worker.store.toRealPath(worker.store.printStorePath(path));
|
||||
Path source = worker.store.Store::toRealPath(path);
|
||||
Path target = chrootRootDir + worker.store.printStorePath(path);
|
||||
debug("bind-mounting %s -> %s", target, source);
|
||||
|
||||
|
@ -3578,7 +3579,7 @@ void DerivationGoal::registerOutputs()
|
|||
if (needsHashRewrite()) {
|
||||
auto r = redirectedOutputs.find(i.second.path);
|
||||
if (r != redirectedOutputs.end()) {
|
||||
auto redirected = worker.store.toRealPath(worker.store.printStorePath(r->second));
|
||||
auto redirected = worker.store.Store::toRealPath(r->second);
|
||||
if (buildMode == bmRepair
|
||||
&& redirectedBadOutputs.count(i.second.path)
|
||||
&& pathExists(redirected))
|
||||
|
@ -3671,7 +3672,7 @@ void DerivationGoal::registerOutputs()
|
|||
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
||||
worker.store.printStorePath(dest), h.to_string(SRI), h2.to_string(SRI)));
|
||||
|
||||
Path actualDest = worker.store.toRealPath(worker.store.printStorePath(dest));
|
||||
Path actualDest = worker.store.Store::toRealPath(dest);
|
||||
|
||||
if (worker.store.isValidPath(dest))
|
||||
std::rethrow_exception(delayedException);
|
||||
|
|
|
@ -6,7 +6,6 @@ namespace nix {
|
|||
|
||||
// TODO: make pluggable.
|
||||
void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData);
|
||||
void builtinBuildenv(const BasicDerivation & drv);
|
||||
void builtinUnpackChannel(const BasicDerivation & drv);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#include "builtins.hh"
|
||||
#include "buildenv.hh"
|
||||
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
|
@ -7,16 +7,14 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
typedef std::map<Path,int> Priorities;
|
||||
|
||||
// FIXME: change into local variables.
|
||||
|
||||
static Priorities priorities;
|
||||
|
||||
static unsigned long symlinks;
|
||||
struct State
|
||||
{
|
||||
std::map<Path, int> priorities;
|
||||
unsigned long symlinks = 0;
|
||||
};
|
||||
|
||||
/* For each activated package, create symlinks */
|
||||
static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
|
||||
static void createLinks(State & state, const Path & srcDir, const Path & dstDir, int priority)
|
||||
{
|
||||
DirEntries srcFiles;
|
||||
|
||||
|
@ -67,7 +65,7 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
|
|||
auto res = lstat(dstFile.c_str(), &dstSt);
|
||||
if (res == 0) {
|
||||
if (S_ISDIR(dstSt.st_mode)) {
|
||||
createLinks(srcFile, dstFile, priority);
|
||||
createLinks(state, srcFile, dstFile, priority);
|
||||
continue;
|
||||
} else if (S_ISLNK(dstSt.st_mode)) {
|
||||
auto target = canonPath(dstFile, true);
|
||||
|
@ -77,8 +75,8 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
|
|||
throw SysError(format("unlinking '%1%'") % dstFile);
|
||||
if (mkdir(dstFile.c_str(), 0755) == -1)
|
||||
throw SysError(format("creating directory '%1%'"));
|
||||
createLinks(target, dstFile, priorities[dstFile]);
|
||||
createLinks(srcFile, dstFile, priority);
|
||||
createLinks(state, target, dstFile, state.priorities[dstFile]);
|
||||
createLinks(state, srcFile, dstFile, priority);
|
||||
continue;
|
||||
}
|
||||
} else if (errno != ENOENT)
|
||||
|
@ -90,7 +88,7 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
|
|||
auto res = lstat(dstFile.c_str(), &dstSt);
|
||||
if (res == 0) {
|
||||
if (S_ISLNK(dstSt.st_mode)) {
|
||||
auto prevPriority = priorities[dstFile];
|
||||
auto prevPriority = state.priorities[dstFile];
|
||||
if (prevPriority == priority)
|
||||
throw Error(
|
||||
"packages '%1%' and '%2%' have the same priority %3%; "
|
||||
|
@ -109,67 +107,30 @@ static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
|
|||
}
|
||||
|
||||
createSymlink(srcFile, dstFile);
|
||||
priorities[dstFile] = priority;
|
||||
symlinks++;
|
||||
state.priorities[dstFile] = priority;
|
||||
state.symlinks++;
|
||||
}
|
||||
}
|
||||
|
||||
typedef std::set<Path> FileProp;
|
||||
|
||||
static FileProp done;
|
||||
static FileProp postponed = FileProp{};
|
||||
|
||||
static Path out;
|
||||
|
||||
static void addPkg(const Path & pkgDir, int priority)
|
||||
void buildProfile(const Path & out, Packages && pkgs)
|
||||
{
|
||||
if (!done.insert(pkgDir).second) return;
|
||||
createLinks(pkgDir, out, priority);
|
||||
State state;
|
||||
|
||||
try {
|
||||
for (const auto & p : tokenizeString<std::vector<string>>(
|
||||
readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
|
||||
if (!done.count(p))
|
||||
postponed.insert(p);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT && e.errNo != ENOTDIR) throw;
|
||||
}
|
||||
}
|
||||
std::set<Path> done, postponed;
|
||||
|
||||
struct Package {
|
||||
Path path;
|
||||
bool active;
|
||||
int priority;
|
||||
Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
|
||||
};
|
||||
auto addPkg = [&](const Path & pkgDir, int priority) {
|
||||
if (!done.insert(pkgDir).second) return;
|
||||
createLinks(state, pkgDir, out, priority);
|
||||
|
||||
typedef std::vector<Package> Packages;
|
||||
|
||||
void builtinBuildenv(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
};
|
||||
|
||||
out = getAttr("out");
|
||||
createDirs(out);
|
||||
|
||||
/* Convert the stuff we get from the environment back into a
|
||||
* coherent data type. */
|
||||
Packages pkgs;
|
||||
auto derivations = tokenizeString<Strings>(getAttr("derivations"));
|
||||
while (!derivations.empty()) {
|
||||
/* !!! We're trusting the caller to structure derivations env var correctly */
|
||||
auto active = derivations.front(); derivations.pop_front();
|
||||
auto priority = stoi(derivations.front()); derivations.pop_front();
|
||||
auto outputs = stoi(derivations.front()); derivations.pop_front();
|
||||
for (auto n = 0; n < outputs; n++) {
|
||||
auto path = derivations.front(); derivations.pop_front();
|
||||
pkgs.emplace_back(path, active != "false", priority);
|
||||
try {
|
||||
for (const auto & p : tokenizeString<std::vector<string>>(
|
||||
readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
|
||||
if (!done.count(p))
|
||||
postponed.insert(p);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT && e.errNo != ENOTDIR) throw;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/* Symlink to the packages that have been installed explicitly by the
|
||||
* user. Process in priority order to reduce unnecessary
|
||||
|
@ -189,13 +150,42 @@ void builtinBuildenv(const BasicDerivation & drv)
|
|||
*/
|
||||
auto priorityCounter = 1000;
|
||||
while (!postponed.empty()) {
|
||||
auto pkgDirs = postponed;
|
||||
postponed = FileProp{};
|
||||
std::set<Path> pkgDirs;
|
||||
postponed.swap(pkgDirs);
|
||||
for (const auto & pkgDir : pkgDirs)
|
||||
addPkg(pkgDir, priorityCounter++);
|
||||
}
|
||||
|
||||
printError("created %d symlinks in user environment", symlinks);
|
||||
debug("created %d symlinks in user environment", state.symlinks);
|
||||
}
|
||||
|
||||
void builtinBuildenv(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
};
|
||||
|
||||
Path out = getAttr("out");
|
||||
createDirs(out);
|
||||
|
||||
/* Convert the stuff we get from the environment back into a
|
||||
* coherent data type. */
|
||||
Packages pkgs;
|
||||
auto derivations = tokenizeString<Strings>(getAttr("derivations"));
|
||||
while (!derivations.empty()) {
|
||||
/* !!! We're trusting the caller to structure derivations env var correctly */
|
||||
auto active = derivations.front(); derivations.pop_front();
|
||||
auto priority = stoi(derivations.front()); derivations.pop_front();
|
||||
auto outputs = stoi(derivations.front()); derivations.pop_front();
|
||||
for (auto n = 0; n < outputs; n++) {
|
||||
auto path = derivations.front(); derivations.pop_front();
|
||||
pkgs.emplace_back(path, active != "false", priority);
|
||||
}
|
||||
}
|
||||
|
||||
buildProfile(out, std::move(pkgs));
|
||||
|
||||
createSymlink(getAttr("manifest"), out + "/manifest.nix");
|
||||
}
|
||||
|
|
21
src/libstore/builtins/buildenv.hh
Normal file
21
src/libstore/builtins/buildenv.hh
Normal file
|
@ -0,0 +1,21 @@
|
|||
#pragma once
|
||||
|
||||
#include "derivations.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Package {
|
||||
Path path;
|
||||
bool active;
|
||||
int priority;
|
||||
Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
|
||||
};
|
||||
|
||||
typedef std::vector<Package> Packages;
|
||||
|
||||
void buildProfile(const Path & out, Packages && pkgs);
|
||||
|
||||
void builtinBuildenv(const BasicDerivation & drv);
|
||||
|
||||
}
|
|
@ -65,7 +65,7 @@ bool BasicDerivation::isBuiltin() const
|
|||
|
||||
|
||||
StorePath writeDerivation(ref<Store> store,
|
||||
const Derivation & drv, const string & name, RepairFlag repair)
|
||||
const Derivation & drv, std::string_view name, RepairFlag repair)
|
||||
{
|
||||
auto references = cloneStorePathSet(drv.inputSrcs);
|
||||
for (auto & i : drv.inputDrvs)
|
||||
|
@ -73,8 +73,8 @@ StorePath writeDerivation(ref<Store> store,
|
|||
/* Note that the outputs of a derivation are *not* references
|
||||
(that can be missing (of course) and should not necessarily be
|
||||
held during a garbage collection). */
|
||||
string suffix = name + drvExtension;
|
||||
string contents = drv.unparse(*store, false);
|
||||
auto suffix = std::string(name) + drvExtension;
|
||||
auto contents = drv.unparse(*store, false);
|
||||
return settings.readOnlyMode
|
||||
? store->computeStorePathForText(suffix, contents, references)
|
||||
: store->addTextToStore(suffix, contents, references, repair);
|
||||
|
|
|
@ -79,7 +79,7 @@ class Store;
|
|||
|
||||
/* Write a derivation to the Nix store, and return its path. */
|
||||
StorePath writeDerivation(ref<Store> store,
|
||||
const Derivation & drv, const string & name, RepairFlag repair = NoRepair);
|
||||
const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
|
||||
|
||||
/* Read a derivation from a file. */
|
||||
Derivation readDerivation(const Store & store, const Path & drvPath);
|
||||
|
|
|
@ -390,6 +390,7 @@ struct CurlDownloader : public Downloader
|
|||
case CURLE_SSL_CACERT_BADFILE:
|
||||
case CURLE_TOO_MANY_REDIRECTS:
|
||||
case CURLE_WRITE_ERROR:
|
||||
case CURLE_UNSUPPORTED_PROTOCOL:
|
||||
err = Misc;
|
||||
break;
|
||||
default: // Shut up warnings
|
||||
|
|
|
@ -20,13 +20,6 @@ namespace nix {
|
|||
must be deleted and recreated on startup.) */
|
||||
#define DEFAULT_SOCKET_PATH "/daemon-socket/socket"
|
||||
|
||||
/* chroot-like behavior from Apple's sandbox */
|
||||
#if __APPLE__
|
||||
#define DEFAULT_ALLOWED_IMPURE_PREFIXES "/System/Library /usr/lib /dev /bin/sh"
|
||||
#else
|
||||
#define DEFAULT_ALLOWED_IMPURE_PREFIXES ""
|
||||
#endif
|
||||
|
||||
Settings settings;
|
||||
|
||||
static GlobalConfig::Register r1(&settings);
|
||||
|
@ -68,7 +61,12 @@ Settings::Settings()
|
|||
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
|
||||
#endif
|
||||
|
||||
allowedImpureHostPrefixes = tokenizeString<StringSet>(DEFAULT_ALLOWED_IMPURE_PREFIXES);
|
||||
|
||||
/* chroot-like behavior from Apple's sandbox */
|
||||
#if __APPLE__
|
||||
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
|
||||
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
|
||||
#endif
|
||||
}
|
||||
|
||||
void loadConfFile()
|
||||
|
|
|
@ -311,12 +311,7 @@ public:
|
|||
Setting<bool> printMissing{this, true, "print-missing",
|
||||
"Whether to print what paths need to be built or downloaded."};
|
||||
|
||||
Setting<std::string> preBuildHook{this,
|
||||
#if __APPLE__
|
||||
nixLibexecDir + "/nix/resolve-system-dependencies",
|
||||
#else
|
||||
"",
|
||||
#endif
|
||||
Setting<std::string> preBuildHook{this, "",
|
||||
"pre-build-hook",
|
||||
"A program to run just before a build to set derivation-specific build settings."};
|
||||
|
||||
|
|
|
@ -163,10 +163,11 @@ static RegisterStoreImplementation regStore([](
|
|||
const std::string & uri, const Store::Params & params)
|
||||
-> std::shared_ptr<Store>
|
||||
{
|
||||
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
|
||||
if (std::string(uri, 0, 7) != "http://" &&
|
||||
std::string(uri, 0, 8) != "https://" &&
|
||||
(getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") != "1" || std::string(uri, 0, 7) != "file://")
|
||||
) return 0;
|
||||
(!forceHttp || std::string(uri, 0, 7) != "file://"))
|
||||
return 0;
|
||||
auto store = std::make_shared<HttpBinaryCacheStore>(params, uri);
|
||||
store->init();
|
||||
return store;
|
||||
|
|
|
@ -298,9 +298,7 @@ void LocalStore::openDB(State & state, bool create)
|
|||
/* Open the Nix database. */
|
||||
string dbPath = dbDir + "/db.sqlite";
|
||||
auto & db(state.db);
|
||||
if (sqlite3_open_v2(dbPath.c_str(), &db.db,
|
||||
SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
|
||||
throw Error(format("cannot open Nix database '%1%'") % dbPath);
|
||||
state.db = SQLite(dbPath, create);
|
||||
|
||||
#ifdef __CYGWIN__
|
||||
/* The cygwin version of sqlite3 has a patch which calls
|
||||
|
@ -312,11 +310,6 @@ void LocalStore::openDB(State & state, bool create)
|
|||
SetDllDirectoryW(L"");
|
||||
#endif
|
||||
|
||||
if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
|
||||
throwSQLiteError(db, "setting timeout");
|
||||
|
||||
db.exec("pragma foreign_keys = 1");
|
||||
|
||||
/* !!! check whether sqlite has been built with foreign key
|
||||
support */
|
||||
|
||||
|
@ -350,7 +343,7 @@ void LocalStore::openDB(State & state, bool create)
|
|||
|
||||
/* Initialise the database schema, if necessary. */
|
||||
if (create) {
|
||||
const char * schema =
|
||||
static const char schema[] =
|
||||
#include "schema.sql.gen.hh"
|
||||
;
|
||||
db.exec(schema);
|
||||
|
@ -1275,7 +1268,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
else
|
||||
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, storePathToHash(printStorePath(info->path)));
|
||||
|
||||
dumpPath(toRealPath(printStorePath(i)), *hashSink);
|
||||
dumpPath(Store::toRealPath(i), *hashSink);
|
||||
auto current = hashSink->finish();
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
|
|
|
@ -31,7 +31,8 @@ ifeq ($(HAVE_SECCOMP), 1)
|
|||
libstore_LDFLAGS += -lseccomp
|
||||
endif
|
||||
|
||||
libstore_CXXFLAGS = \
|
||||
libstore_CXXFLAGS += \
|
||||
-I src/libutil -I src/libstore \
|
||||
-DNIX_PREFIX=\"$(prefix)\" \
|
||||
-DNIX_STORE_DIR=\"$(storedir)\" \
|
||||
-DNIX_DATA_DIR=\"$(datadir)\" \
|
||||
|
|
|
@ -78,12 +78,7 @@ public:
|
|||
|
||||
state->db = SQLite(dbPath);
|
||||
|
||||
if (sqlite3_busy_timeout(state->db, 60 * 60 * 1000) != SQLITE_OK)
|
||||
throwSQLiteError(state->db, "setting timeout");
|
||||
|
||||
// We can always reproduce the cache.
|
||||
state->db.exec("pragma synchronous = off");
|
||||
state->db.exec("pragma main.journal_mode = truncate");
|
||||
state->db.isCache();
|
||||
|
||||
state->db.exec(schema);
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ class NarInfoDiskCache
|
|||
public:
|
||||
typedef enum { oValid, oInvalid, oUnknown } Outcome;
|
||||
|
||||
virtual ~NarInfoDiskCache() { };
|
||||
virtual ~NarInfoDiskCache() { }
|
||||
|
||||
virtual void createCache(const std::string & uri, const Path & storeDir,
|
||||
bool wantMassQuery, int priority) = 0;
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "parsed-derivations.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
|
||||
|
@ -9,13 +11,15 @@ ParsedDerivation::ParsedDerivation(StorePath && drvPath, BasicDerivation & drv)
|
|||
auto jsonAttr = drv.env.find("__json");
|
||||
if (jsonAttr != drv.env.end()) {
|
||||
try {
|
||||
structuredAttrs = nlohmann::json::parse(jsonAttr->second);
|
||||
structuredAttrs = std::make_unique<nlohmann::json>(nlohmann::json::parse(jsonAttr->second));
|
||||
} catch (std::exception & e) {
|
||||
throw Error("cannot process __json attribute of '%s': %s", drvPath.to_string(), e.what());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ParsedDerivation::~ParsedDerivation() { }
|
||||
|
||||
std::optional<std::string> ParsedDerivation::getStringAttr(const std::string & name) const
|
||||
{
|
||||
if (structuredAttrs) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "derivations.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -8,15 +8,17 @@ class ParsedDerivation
|
|||
{
|
||||
StorePath drvPath;
|
||||
BasicDerivation & drv;
|
||||
std::optional<nlohmann::json> structuredAttrs;
|
||||
std::unique_ptr<nlohmann::json> structuredAttrs;
|
||||
|
||||
public:
|
||||
|
||||
ParsedDerivation(StorePath && drvPath, BasicDerivation & drv);
|
||||
|
||||
const std::optional<nlohmann::json> & getStructuredAttrs() const
|
||||
~ParsedDerivation();
|
||||
|
||||
const nlohmann::json * getStructuredAttrs() const
|
||||
{
|
||||
return structuredAttrs;
|
||||
return structuredAttrs.get();
|
||||
}
|
||||
|
||||
std::optional<std::string> getStringAttr(const std::string & name) const;
|
||||
|
|
|
@ -256,4 +256,22 @@ string optimisticLockProfile(const Path & profile)
|
|||
}
|
||||
|
||||
|
||||
Path getDefaultProfile()
|
||||
{
|
||||
Path profileLink = getHome() + "/.nix-profile";
|
||||
try {
|
||||
if (!pathExists(profileLink)) {
|
||||
replaceSymlink(
|
||||
getuid() == 0
|
||||
? settings.nixStateDir + "/profiles/default"
|
||||
: fmt("%s/profiles/per-user/%s/profile", settings.nixStateDir, getUserName()),
|
||||
profileLink);
|
||||
}
|
||||
return absPath(readLink(profileLink), dirOf(profileLink));
|
||||
} catch (Error &) {
|
||||
return profileLink;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -64,4 +64,8 @@ void lockProfile(PathLocks & lock, const Path & profile);
|
|||
rebuilt. */
|
||||
string optimisticLockProfile(const Path & profile);
|
||||
|
||||
/* Resolve ~/.nix-profile. If ~/.nix-profile doesn't exist yet, create
|
||||
it. */
|
||||
Path getDefaultProfile();
|
||||
|
||||
}
|
||||
|
|
|
@ -71,6 +71,12 @@
|
|||
(literal "/dev/zero")
|
||||
(subpath "/dev/fd"))
|
||||
|
||||
; Allow pseudo-terminals.
|
||||
(allow file*
|
||||
(literal "/dev/ptmx")
|
||||
(regex #"^/dev/pty[a-z]+")
|
||||
(regex #"^/dev/ttys[0-9]+"))
|
||||
|
||||
; Does nothing, but reduces build noise.
|
||||
(allow file* (literal "/dev/dtracehelper"))
|
||||
|
||||
|
@ -85,3 +91,7 @@
|
|||
(literal "/etc")
|
||||
(literal "/var")
|
||||
(literal "/private/var/tmp"))
|
||||
|
||||
; This is used by /bin/sh on macOS 10.15 and later.
|
||||
(allow file*
|
||||
(literal "/private/var/select/sh"))
|
||||
|
|
|
@ -25,11 +25,16 @@ namespace nix {
|
|||
throw SQLiteError("%s: %s (in '%s')", fs.s, sqlite3_errstr(exterr), path);
|
||||
}
|
||||
|
||||
SQLite::SQLite(const Path & path)
|
||||
SQLite::SQLite(const Path & path, bool create)
|
||||
{
|
||||
if (sqlite3_open_v2(path.c_str(), &db,
|
||||
SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, 0) != SQLITE_OK)
|
||||
SQLITE_OPEN_READWRITE | (create ? SQLITE_OPEN_CREATE : 0), 0) != SQLITE_OK)
|
||||
throw Error(format("cannot open SQLite database '%s'") % path);
|
||||
|
||||
if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
|
||||
throwSQLiteError(db, "setting timeout");
|
||||
|
||||
exec("pragma foreign_keys = 1");
|
||||
}
|
||||
|
||||
SQLite::~SQLite()
|
||||
|
@ -42,6 +47,12 @@ SQLite::~SQLite()
|
|||
}
|
||||
}
|
||||
|
||||
void SQLite::isCache()
|
||||
{
|
||||
exec("pragma synchronous = off");
|
||||
exec("pragma main.journal_mode = truncate");
|
||||
}
|
||||
|
||||
void SQLite::exec(const std::string & stmt)
|
||||
{
|
||||
retrySQLite<void>([&]() {
|
||||
|
@ -94,6 +105,16 @@ SQLiteStmt::Use & SQLiteStmt::Use::operator () (const std::string & value, bool
|
|||
return *this;
|
||||
}
|
||||
|
||||
SQLiteStmt::Use & SQLiteStmt::Use::operator () (const unsigned char * data, size_t len, bool notNull)
|
||||
{
|
||||
if (notNull) {
|
||||
if (sqlite3_bind_blob(stmt, curArg++, data, len, SQLITE_TRANSIENT) != SQLITE_OK)
|
||||
throwSQLiteError(stmt.db, "binding argument");
|
||||
} else
|
||||
bind();
|
||||
return *this;
|
||||
}
|
||||
|
||||
SQLiteStmt::Use & SQLiteStmt::Use::operator () (int64_t value, bool notNull)
|
||||
{
|
||||
if (notNull) {
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
|
||||
#include "types.hh"
|
||||
|
||||
class sqlite3;
|
||||
class sqlite3_stmt;
|
||||
struct sqlite3;
|
||||
struct sqlite3_stmt;
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -15,13 +15,16 @@ struct SQLite
|
|||
{
|
||||
sqlite3 * db = 0;
|
||||
SQLite() { }
|
||||
SQLite(const Path & path);
|
||||
SQLite(const Path & path, bool create = true);
|
||||
SQLite(const SQLite & from) = delete;
|
||||
SQLite& operator = (const SQLite & from) = delete;
|
||||
SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
|
||||
~SQLite();
|
||||
operator sqlite3 * () { return db; }
|
||||
|
||||
/* Disable synchronous mode, set truncate journal mode. */
|
||||
void isCache();
|
||||
|
||||
void exec(const std::string & stmt);
|
||||
};
|
||||
|
||||
|
@ -52,6 +55,7 @@ struct SQLiteStmt
|
|||
|
||||
/* Bind the next parameter. */
|
||||
Use & operator () (const std::string & value, bool notNull = true);
|
||||
Use & operator () (const unsigned char * data, size_t len, bool notNull = true);
|
||||
Use & operator () (int64_t value, bool notNull = true);
|
||||
Use & bind(); // null
|
||||
|
||||
|
|
|
@ -441,7 +441,9 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
|||
|
||||
|
||||
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize, AllowInvalidFlag allowInvalid)
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
Base hashBase,
|
||||
AllowInvalidFlag allowInvalid)
|
||||
{
|
||||
auto jsonList = jsonOut.list();
|
||||
|
||||
|
@ -453,7 +455,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
|||
auto info = queryPathInfo(storePath);
|
||||
|
||||
jsonPath
|
||||
.attr("narHash", info->narHash.to_string())
|
||||
.attr("narHash", info->narHash.to_string(hashBase))
|
||||
.attr("narSize", info->narSize);
|
||||
|
||||
{
|
||||
|
@ -741,12 +743,7 @@ std::string Store::showPaths(const StorePathSet & paths)
|
|||
|
||||
string showPaths(const PathSet & paths)
|
||||
{
|
||||
string s;
|
||||
for (auto & i : paths) {
|
||||
if (s.size() != 0) s += ", ";
|
||||
s += "'" + i + "'";
|
||||
}
|
||||
return s;
|
||||
return concatStringsSep(", ", quoteStrings(paths));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -561,6 +561,7 @@ public:
|
|||
each path is included. */
|
||||
void pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
Base hashBase = Base32,
|
||||
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
||||
|
||||
/* Return the size of the closure of the specified path, that is,
|
||||
|
@ -676,6 +677,11 @@ public:
|
|||
return storePath;
|
||||
}
|
||||
|
||||
Path toRealPath(const StorePath & storePath)
|
||||
{
|
||||
return toRealPath(printStorePath(storePath));
|
||||
}
|
||||
|
||||
virtual void createUser(const std::string & userName, uid_t userId)
|
||||
{ }
|
||||
|
||||
|
|
|
@ -80,6 +80,18 @@ struct Hash
|
|||
or base-64. By default, this is prefixed by the hash type
|
||||
(e.g. "sha256:"). */
|
||||
std::string to_string(Base base = Base32, bool includeType = true) const;
|
||||
|
||||
std::string gitRev() const
|
||||
{
|
||||
assert(type == htSHA1);
|
||||
return to_string(Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
{
|
||||
assert(type == htSHA1);
|
||||
return std::string(to_string(Base16, false), 0, 7);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -97,10 +97,10 @@ void replaceEnv(std::map<std::string, std::string> newEnv)
|
|||
}
|
||||
|
||||
|
||||
Path absPath(Path path, Path dir)
|
||||
Path absPath(Path path, std::optional<Path> dir)
|
||||
{
|
||||
if (path[0] != '/') {
|
||||
if (dir == "") {
|
||||
if (!dir) {
|
||||
#ifdef __GNU__
|
||||
/* GNU (aka. GNU/Hurd) doesn't have any limitation on path
|
||||
lengths and doesn't define `PATH_MAX'. */
|
||||
|
@ -116,7 +116,7 @@ Path absPath(Path path, Path dir)
|
|||
free(buf);
|
||||
#endif
|
||||
}
|
||||
path = dir + "/" + path;
|
||||
path = *dir + "/" + path;
|
||||
}
|
||||
return canonPath(path);
|
||||
}
|
||||
|
@ -478,6 +478,17 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix,
|
|||
}
|
||||
|
||||
|
||||
std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
|
||||
{
|
||||
Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
|
||||
// Strictly speaking, this is UB, but who cares...
|
||||
AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
|
||||
if (!fd)
|
||||
throw SysError("creating temporary file '%s'", tmpl);
|
||||
return {std::move(fd), tmpl};
|
||||
}
|
||||
|
||||
|
||||
std::string getUserName()
|
||||
{
|
||||
auto pw = getpwuid(geteuid());
|
||||
|
@ -1205,28 +1216,6 @@ template StringSet tokenizeString(std::string_view s, const string & separators)
|
|||
template vector<string> tokenizeString(std::string_view s, const string & separators);
|
||||
|
||||
|
||||
string concatStringsSep(const string & sep, const Strings & ss)
|
||||
{
|
||||
string s;
|
||||
for (auto & i : ss) {
|
||||
if (s.size() != 0) s += sep;
|
||||
s += i;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
string concatStringsSep(const string & sep, const StringSet & ss)
|
||||
{
|
||||
string s;
|
||||
for (auto & i : ss) {
|
||||
if (s.size() != 0) s += sep;
|
||||
s += i;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
string chomp(const string & s)
|
||||
{
|
||||
size_t i = s.find_last_not_of(" \n\r\t");
|
||||
|
|
|
@ -48,7 +48,7 @@ void clearEnv();
|
|||
/* Return an absolutized path, resolving paths relative to the
|
||||
specified directory, or the current directory otherwise. The path
|
||||
is also canonicalised. */
|
||||
Path absPath(Path path, Path dir = "");
|
||||
Path absPath(Path path, std::optional<Path> dir = {});
|
||||
|
||||
/* Canonicalise a path by removing all `.' or `..' components and
|
||||
double or trailing slashes. Optionally resolves all symlink
|
||||
|
@ -124,10 +124,6 @@ void deletePath(const Path & path);
|
|||
|
||||
void deletePath(const Path & path, unsigned long long & bytesFreed);
|
||||
|
||||
/* Create a temporary directory. */
|
||||
Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
|
||||
bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755);
|
||||
|
||||
std::string getUserName();
|
||||
|
||||
/* Return $HOME or the user's home directory from /etc/passwd. */
|
||||
|
@ -207,6 +203,14 @@ public:
|
|||
};
|
||||
|
||||
|
||||
/* Create a temporary directory. */
|
||||
Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
|
||||
bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755);
|
||||
|
||||
/* Create a temporary file, returning a file handle and its path. */
|
||||
std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix = "nix");
|
||||
|
||||
|
||||
class Pipe
|
||||
{
|
||||
public:
|
||||
|
@ -347,8 +351,26 @@ template<class C> C tokenizeString(std::string_view s, const string & separators
|
|||
|
||||
/* Concatenate the given strings with a separator between the
|
||||
elements. */
|
||||
string concatStringsSep(const string & sep, const Strings & ss);
|
||||
string concatStringsSep(const string & sep, const StringSet & ss);
|
||||
template<class C>
|
||||
string concatStringsSep(const string & sep, const C & ss)
|
||||
{
|
||||
string s;
|
||||
for (auto & i : ss) {
|
||||
if (s.size() != 0) s += sep;
|
||||
s += i;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
/* Add quotes around a collection of strings. */
|
||||
template<class C> Strings quoteStrings(const C & c)
|
||||
{
|
||||
Strings res;
|
||||
for (auto & s : c)
|
||||
res.push_back("'" + s + "'");
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/* Remove trailing whitespace from a string. */
|
||||
|
@ -426,6 +448,13 @@ void ignoreException();
|
|||
|
||||
|
||||
|
||||
/* Tree formatting. */
|
||||
constexpr char treeConn[] = "├───";
|
||||
constexpr char treeLast[] = "└───";
|
||||
constexpr char treeLine[] = "│ ";
|
||||
constexpr char treeNull[] = " ";
|
||||
|
||||
|
||||
/* Truncate a string to 'width' printable characters. If 'filterAll'
|
||||
is true, all ANSI escape sequences are filtered out. Otherwise,
|
||||
some escape sequences (such as colour setting) are copied but not
|
||||
|
@ -444,10 +473,11 @@ string base64Decode(const string & s);
|
|||
/* Get a value for the specified key from an associate container, or a
|
||||
default value if the key doesn't exist. */
|
||||
template <class T>
|
||||
std::optional<std::string> get(const T & map, const std::string & key)
|
||||
std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
|
||||
{
|
||||
auto i = map.find(key);
|
||||
return i == map.end() ? std::optional<std::string>() : i->second;
|
||||
if (i == map.end()) return {};
|
||||
return std::optional<typename T::mapped_type>(i->second);
|
||||
}
|
||||
|
||||
|
||||
|
@ -551,4 +581,31 @@ extern PathFilter defaultPathFilter;
|
|||
AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode);
|
||||
|
||||
|
||||
// A Rust/Python-like enumerate() iterator adapter.
|
||||
// Borrowed from http://reedbeta.com/blog/python-like-enumerate-in-cpp17.
|
||||
template <typename T,
|
||||
typename TIter = decltype(std::begin(std::declval<T>())),
|
||||
typename = decltype(std::end(std::declval<T>()))>
|
||||
constexpr auto enumerate(T && iterable)
|
||||
{
|
||||
struct iterator
|
||||
{
|
||||
size_t i;
|
||||
TIter iter;
|
||||
bool operator != (const iterator & other) const { return iter != other.iter; }
|
||||
void operator ++ () { ++i; ++iter; }
|
||||
auto operator * () const { return std::tie(i, *iter); }
|
||||
};
|
||||
|
||||
struct iterable_wrapper
|
||||
{
|
||||
T iterable;
|
||||
auto begin() { return iterator{ 0, std::begin(iterable) }; }
|
||||
auto end() { return iterator{ 0, std::end(iterable) }; }
|
||||
};
|
||||
|
||||
return iterable_wrapper{ std::forward<T>(iterable) };
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
#include "get-drvs.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
using namespace std::string_literals;
|
||||
|
@ -314,7 +314,7 @@ static void _main(int argc, char * * argv)
|
|||
state->eval(e, vRoot);
|
||||
|
||||
for (auto & i : attrPaths) {
|
||||
Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot));
|
||||
Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first);
|
||||
state->forceValue(v);
|
||||
getDerivations(*state, v, "", *autoArgs, drvs, false);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#include "globals.hh"
|
||||
#include "download.hh"
|
||||
#include "store-api.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <regex>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#include "profiles.hh"
|
||||
#include "shared.hh"
|
||||
#include "globals.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <cerrno>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
#include "globals.hh"
|
||||
#include "derivations.hh"
|
||||
#include "finally.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "daemon.hh"
|
||||
|
||||
#include <algorithm>
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
#include "json.hh"
|
||||
#include "value-to-json.hh"
|
||||
#include "xml-writer.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
#include <cerrno>
|
||||
#include <ctime>
|
||||
|
@ -178,7 +178,7 @@ static void loadDerivations(EvalState & state, Path nixExprPath,
|
|||
Value vRoot;
|
||||
loadSourceExpr(state, nixExprPath, vRoot);
|
||||
|
||||
Value & v(*findAlongAttrPath(state, pathPrefix, autoArgs, vRoot));
|
||||
Value & v(*findAlongAttrPath(state, pathPrefix, autoArgs, vRoot).first);
|
||||
|
||||
getDerivations(state, v, pathPrefix, autoArgs, elems, true);
|
||||
|
||||
|
@ -408,7 +408,7 @@ static void queryInstSources(EvalState & state,
|
|||
Value vRoot;
|
||||
loadSourceExpr(state, instSource.nixExprPath, vRoot);
|
||||
for (auto & i : args) {
|
||||
Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot));
|
||||
Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot).first);
|
||||
getDerivations(state, v, "", *instSource.autoArgs, elems, true);
|
||||
}
|
||||
break;
|
||||
|
@ -1428,21 +1428,8 @@ static int _main(int argc, char * * argv)
|
|||
if (globals.profile == "")
|
||||
globals.profile = getEnv("NIX_PROFILE").value_or("");
|
||||
|
||||
if (globals.profile == "") {
|
||||
Path profileLink = getHome() + "/.nix-profile";
|
||||
try {
|
||||
if (!pathExists(profileLink)) {
|
||||
replaceSymlink(
|
||||
getuid() == 0
|
||||
? settings.nixStateDir + "/profiles/default"
|
||||
: fmt("%s/profiles/per-user/%s/profile", settings.nixStateDir, getUserName()),
|
||||
profileLink);
|
||||
}
|
||||
globals.profile = absPath(readLink(profileLink), dirOf(profileLink));
|
||||
} catch (Error &) {
|
||||
globals.profile = profileLink;
|
||||
}
|
||||
}
|
||||
if (globals.profile == "")
|
||||
globals.profile = getDefaultProfile();
|
||||
|
||||
op(globals, opFlags, opArgs);
|
||||
|
||||
|
|
|
@ -15,6 +15,8 @@ namespace nix {
|
|||
DrvInfos queryInstalled(EvalState & state, const Path & userEnv)
|
||||
{
|
||||
DrvInfos elems;
|
||||
if (pathExists(userEnv + "/manifest.json"))
|
||||
throw Error("profile '%s' is incompatible with 'nix-env'; please use 'nix profile' instead", userEnv);
|
||||
Path manifestFile = userEnv + "/manifest.nix";
|
||||
if (pathExists(manifestFile)) {
|
||||
Value v;
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
|
@ -39,7 +39,7 @@ void processExpr(EvalState & state, const Strings & attrPaths,
|
|||
state.eval(e, vRoot);
|
||||
|
||||
for (auto & i : attrPaths) {
|
||||
Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot));
|
||||
Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot).first);
|
||||
state.forceValue(v);
|
||||
|
||||
PathSet context;
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
#include "eval-inline.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "legacy.hh"
|
||||
#include "finally.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "../nix/progress-bar.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#include <iostream>
|
||||
|
@ -120,7 +120,7 @@ static int _main(int argc, char * * argv)
|
|||
Path path = resolveExprPath(lookupFileArg(*state, args.empty() ? "." : args[0]));
|
||||
Value vRoot;
|
||||
state->evalFile(path, vRoot);
|
||||
Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot));
|
||||
Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first);
|
||||
state->forceAttrs(v);
|
||||
|
||||
/* Extract the URI. */
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include "util.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "graphml.hh"
|
||||
#include "legacy.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
@ -229,12 +229,6 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
|
|||
/* Some code to print a tree representation of a derivation dependency
|
||||
graph. Topological sorting is used to keep the tree relatively
|
||||
flat. */
|
||||
|
||||
const string treeConn = "+---";
|
||||
const string treeLine = "| ";
|
||||
const string treeNull = " ";
|
||||
|
||||
|
||||
static void printTree(const StorePath & path,
|
||||
const string & firstPad, const string & tailPad, StorePathSet & done)
|
||||
{
|
||||
|
@ -254,10 +248,11 @@ static void printTree(const StorePath & path,
|
|||
auto sorted = store->topoSortPaths(info->references);
|
||||
reverse(sorted.begin(), sorted.end());
|
||||
|
||||
for (auto i = sorted.begin(); i != sorted.end(); ++i) {
|
||||
auto j = i; ++j;
|
||||
printTree(*i, tailPad + treeConn,
|
||||
j == sorted.end() ? tailPad + treeNull : tailPad + treeLine,
|
||||
for (const auto &[n, i] : enumerate(sorted)) {
|
||||
bool last = n + 1 == sorted.size();
|
||||
printTree(i,
|
||||
tailPad + (last ? treeLast : treeConn),
|
||||
tailPad + (last ? treeNull : treeLine),
|
||||
done);
|
||||
}
|
||||
}
|
||||
|
@ -577,7 +572,6 @@ static void opGC(Strings opFlags, Strings opArgs)
|
|||
if (*i == "--print-roots") printRoots = true;
|
||||
else if (*i == "--print-live") options.action = GCOptions::gcReturnLive;
|
||||
else if (*i == "--print-dead") options.action = GCOptions::gcReturnDead;
|
||||
else if (*i == "--delete") options.action = GCOptions::gcDeleteDead;
|
||||
else if (*i == "--max-freed") {
|
||||
long long maxFreed = getIntArg<long long>(*i, i, opFlags.end(), true);
|
||||
options.maxFreed = maxFreed >= 0 ? maxFreed : 0;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
using namespace nix;
|
||||
|
||||
struct CmdBuild : MixDryRun, InstallablesCommand
|
||||
struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
|
||||
{
|
||||
Path outLink = "result";
|
||||
|
||||
|
@ -40,6 +40,10 @@ struct CmdBuild : MixDryRun, InstallablesCommand
|
|||
"To build the build.x86_64-linux attribute from release.nix:",
|
||||
"nix build -f release.nix build.x86_64-linux"
|
||||
},
|
||||
Example{
|
||||
"To make a profile point at GNU Hello:",
|
||||
"nix build --profile /tmp/profile nixpkgs.hello"
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -49,18 +53,19 @@ struct CmdBuild : MixDryRun, InstallablesCommand
|
|||
|
||||
if (dryRun) return;
|
||||
|
||||
for (size_t i = 0; i < buildables.size(); ++i) {
|
||||
auto & b(buildables[i]);
|
||||
|
||||
if (outLink != "")
|
||||
for (auto & output : b.outputs)
|
||||
if (outLink != "") {
|
||||
for (size_t i = 0; i < buildables.size(); ++i) {
|
||||
for (auto & output : buildables[i].outputs)
|
||||
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) {
|
||||
std::string symlink = outLink;
|
||||
if (i) symlink += fmt("-%d", i);
|
||||
if (output.first != "out") symlink += fmt("-%s", output.first);
|
||||
store2->addPermRoot(output.second, absPath(symlink), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateProfile(buildables);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
#include "store-api.hh"
|
||||
#include "derivations.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "profiles.hh"
|
||||
|
||||
extern char * * environ;
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -96,4 +99,95 @@ Strings editorFor(const Pos & pos)
|
|||
return args;
|
||||
}
|
||||
|
||||
MixProfile::MixProfile()
|
||||
{
|
||||
mkFlag()
|
||||
.longName("profile")
|
||||
.description("profile to update")
|
||||
.labels({"path"})
|
||||
.dest(&profile);
|
||||
}
|
||||
|
||||
void MixProfile::updateProfile(const StorePath & storePath)
|
||||
{
|
||||
if (!profile) return;
|
||||
auto store = getStore().dynamic_pointer_cast<LocalFSStore>();
|
||||
if (!store) throw Error("'--profile' is not supported for this Nix store");
|
||||
auto profile2 = absPath(*profile);
|
||||
switchLink(profile2,
|
||||
createGeneration(
|
||||
ref<LocalFSStore>(store),
|
||||
profile2, store->printStorePath(storePath)));
|
||||
}
|
||||
|
||||
void MixProfile::updateProfile(const Buildables & buildables)
|
||||
{
|
||||
if (!profile) return;
|
||||
|
||||
std::optional<StorePath> result;
|
||||
|
||||
for (auto & buildable : buildables) {
|
||||
for (auto & output : buildable.outputs) {
|
||||
if (result)
|
||||
throw Error("'--profile' requires that the arguments produce a single store path, but there are multiple");
|
||||
result = output.second.clone();
|
||||
}
|
||||
}
|
||||
|
||||
if (!result)
|
||||
throw Error("'--profile' requires that the arguments produce a single store path, but there are none");
|
||||
|
||||
updateProfile(*result);
|
||||
}
|
||||
|
||||
MixDefaultProfile::MixDefaultProfile()
|
||||
{
|
||||
profile = getDefaultProfile();
|
||||
}
|
||||
|
||||
MixEnvironment::MixEnvironment() : ignoreEnvironment(false) {
|
||||
mkFlag()
|
||||
.longName("ignore-environment")
|
||||
.shortName('i')
|
||||
.description("clear the entire environment (except those specified with --keep)")
|
||||
.set(&ignoreEnvironment, true);
|
||||
|
||||
mkFlag()
|
||||
.longName("keep")
|
||||
.shortName('k')
|
||||
.description("keep specified environment variable")
|
||||
.arity(1)
|
||||
.labels({"name"})
|
||||
.handler([&](std::vector<std::string> ss) { keep.insert(ss.front()); });
|
||||
|
||||
mkFlag()
|
||||
.longName("unset")
|
||||
.shortName('u')
|
||||
.description("unset specified environment variable")
|
||||
.arity(1)
|
||||
.labels({"name"})
|
||||
.handler([&](std::vector<std::string> ss) { unset.insert(ss.front()); });
|
||||
}
|
||||
|
||||
void MixEnvironment::setEnviron() {
|
||||
if (ignoreEnvironment) {
|
||||
if (!unset.empty())
|
||||
throw UsageError("--unset does not make sense with --ignore-environment");
|
||||
|
||||
for (const auto & var : keep) {
|
||||
auto val = getenv(var.c_str());
|
||||
if (val) stringsEnv.emplace_back(fmt("%s=%s", var.c_str(), val));
|
||||
}
|
||||
|
||||
vectorEnv = stringsToCharPtrs(stringsEnv);
|
||||
environ = vectorEnv.data();
|
||||
} else {
|
||||
if (!keep.empty())
|
||||
throw UsageError("--keep does not make sense without --ignore-environment");
|
||||
|
||||
for (const auto & var : unset)
|
||||
unsetenv(var.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,19 +1,15 @@
|
|||
#pragma once
|
||||
|
||||
#include "installables.hh"
|
||||
#include "args.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "path.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
extern std::string programPath;
|
||||
|
||||
struct Value;
|
||||
class Bindings;
|
||||
class EvalState;
|
||||
struct Pos;
|
||||
class Store;
|
||||
|
||||
/* A command that requires a Nix store. */
|
||||
struct StoreCommand : virtual Command
|
||||
{
|
||||
|
@ -27,34 +23,7 @@ private:
|
|||
std::shared_ptr<Store> _store;
|
||||
};
|
||||
|
||||
struct Buildable
|
||||
{
|
||||
std::optional<StorePath> drvPath;
|
||||
std::map<std::string, StorePath> outputs;
|
||||
};
|
||||
|
||||
typedef std::vector<Buildable> Buildables;
|
||||
|
||||
struct Installable
|
||||
{
|
||||
virtual ~Installable() { }
|
||||
|
||||
virtual std::string what() = 0;
|
||||
|
||||
virtual Buildables toBuildables()
|
||||
{
|
||||
throw Error("argument '%s' cannot be built", what());
|
||||
}
|
||||
|
||||
Buildable toBuildable();
|
||||
|
||||
virtual Value * toValue(EvalState & state)
|
||||
{
|
||||
throw Error("argument '%s' cannot be evaluated", what());
|
||||
}
|
||||
};
|
||||
|
||||
struct SourceExprCommand : virtual Args, StoreCommand, MixEvalArgs
|
||||
struct SourceExprCommand : virtual StoreCommand, MixEvalArgs
|
||||
{
|
||||
Path file;
|
||||
|
||||
|
@ -189,4 +158,36 @@ std::set<StorePath> toDerivations(ref<Store> store,
|
|||
filename:lineno. */
|
||||
Strings editorFor(const Pos & pos);
|
||||
|
||||
struct MixProfile : virtual StoreCommand
|
||||
{
|
||||
std::optional<Path> profile;
|
||||
|
||||
MixProfile();
|
||||
|
||||
/* If 'profile' is set, make it point at 'storePath'. */
|
||||
void updateProfile(const StorePath & storePath);
|
||||
|
||||
/* If 'profile' is set, make it point at the store path produced
|
||||
by 'buildables'. */
|
||||
void updateProfile(const Buildables & buildables);
|
||||
};
|
||||
|
||||
struct MixDefaultProfile : MixProfile
|
||||
{
|
||||
MixDefaultProfile();
|
||||
};
|
||||
|
||||
struct MixEnvironment : virtual Args {
|
||||
|
||||
StringSet keep, unset;
|
||||
Strings stringsEnv;
|
||||
std::vector<char*> vectorEnv;
|
||||
bool ignoreEnvironment;
|
||||
|
||||
MixEnvironment();
|
||||
|
||||
/* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */
|
||||
void setEnviron();
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -29,9 +29,15 @@ struct CmdEdit : InstallableCommand
|
|||
{
|
||||
auto state = getEvalState();
|
||||
|
||||
auto v = installable->toValue(*state);
|
||||
auto [v, pos] = installable->toValue(*state);
|
||||
|
||||
Pos pos = findDerivationFilename(*state, *v, installable->what());
|
||||
try {
|
||||
pos = findDerivationFilename(*state, *v, installable->what());
|
||||
} catch (NoPositionInfo &) {
|
||||
}
|
||||
|
||||
if (pos == noPos)
|
||||
throw Error("cannot find position information for '%s", installable->what());
|
||||
|
||||
stopProgressBar();
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ struct CmdEval : MixJSON, InstallableCommand
|
|||
|
||||
auto state = getEvalState();
|
||||
|
||||
auto v = installable->toValue(*state);
|
||||
auto v = installable->toValue(*state).first;
|
||||
PathSet context;
|
||||
|
||||
stopProgressBar();
|
||||
|
|
|
@ -109,6 +109,11 @@ struct InstallableStorePath : Installable
|
|||
bs.push_back(std::move(b));
|
||||
return bs;
|
||||
}
|
||||
|
||||
std::optional<StorePath> getStorePath() override
|
||||
{
|
||||
return storePath.clone();
|
||||
}
|
||||
};
|
||||
|
||||
struct InstallableValue : Installable
|
||||
|
@ -121,7 +126,7 @@ struct InstallableValue : Installable
|
|||
{
|
||||
auto state = cmd.getEvalState();
|
||||
|
||||
auto v = toValue(*state);
|
||||
auto v = toValue(*state).first;
|
||||
|
||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||
|
||||
|
@ -169,11 +174,11 @@ struct InstallableExpr : InstallableValue
|
|||
|
||||
std::string what() override { return text; }
|
||||
|
||||
Value * toValue(EvalState & state) override
|
||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||
{
|
||||
auto v = state.allocValue();
|
||||
state.eval(state.parseExprFromString(text, absPath(".")), *v);
|
||||
return v;
|
||||
return {v, noPos};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -187,16 +192,16 @@ struct InstallableAttrPath : InstallableValue
|
|||
|
||||
std::string what() override { return attrPath; }
|
||||
|
||||
Value * toValue(EvalState & state) override
|
||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||
{
|
||||
auto source = cmd.getSourceExpr(state);
|
||||
|
||||
Bindings & autoArgs = *cmd.getAutoArgs(state);
|
||||
|
||||
Value * v = findAlongAttrPath(state, attrPath, autoArgs, *source);
|
||||
auto v = findAlongAttrPath(state, attrPath, autoArgs, *source).first;
|
||||
state.forceValue(*v);
|
||||
|
||||
return v;
|
||||
return {v, noPos};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
45
src/nix/installables.hh
Normal file
45
src/nix/installables.hh
Normal file
|
@ -0,0 +1,45 @@
|
|||
#pragma once
|
||||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Buildable
|
||||
{
|
||||
std::optional<StorePath> drvPath;
|
||||
std::map<std::string, StorePath> outputs;
|
||||
};
|
||||
|
||||
typedef std::vector<Buildable> Buildables;
|
||||
|
||||
struct Installable
|
||||
{
|
||||
virtual ~Installable() { }
|
||||
|
||||
virtual std::string what() = 0;
|
||||
|
||||
virtual Buildables toBuildables()
|
||||
{
|
||||
throw Error("argument '%s' cannot be built", what());
|
||||
}
|
||||
|
||||
Buildable toBuildable();
|
||||
|
||||
virtual std::pair<Value *, Pos> toValue(EvalState & state)
|
||||
{
|
||||
throw Error("argument '%s' cannot be evaluated", what());
|
||||
}
|
||||
|
||||
/* Return a value only if this installable is a store path or a
|
||||
symlink to it. */
|
||||
virtual std::optional<StorePath> getStorePath()
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -15,6 +15,8 @@ nix_SOURCES := \
|
|||
$(wildcard src/nix-prefetch-url/*.cc) \
|
||||
$(wildcard src/nix-store/*.cc) \
|
||||
|
||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain
|
||||
|
||||
nix_LIBS = libexpr libmain libstore libutil libnixrust
|
||||
|
||||
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system
|
||||
|
|
|
@ -55,6 +55,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
{
|
||||
bool printBuildLogs = false;
|
||||
bool useNet = true;
|
||||
bool refresh = false;
|
||||
|
||||
NixArgs() : MultiCommand(*RegisterCommand::commands), MixCommonArgs("nix")
|
||||
{
|
||||
|
@ -92,6 +93,11 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
.longName("no-net")
|
||||
.description("disable substituters and consider all previously downloaded files up-to-date")
|
||||
.handler([&]() { useNet = false; });
|
||||
|
||||
mkFlag()
|
||||
.longName("refresh")
|
||||
.description("consider all previously downloaded files out-of-date")
|
||||
.handler([&]() { refresh = true; });
|
||||
}
|
||||
|
||||
void printFlags(std::ostream & out) override
|
||||
|
@ -176,6 +182,9 @@ void mainWrapped(int argc, char * * argv)
|
|||
downloadSettings.connectTimeout = 1;
|
||||
}
|
||||
|
||||
if (args.refresh)
|
||||
settings.tarballTtl = 0;
|
||||
|
||||
args.command->prepare();
|
||||
args.command->run();
|
||||
}
|
||||
|
|
|
@ -89,7 +89,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
store->pathInfoToJSON(jsonRoot,
|
||||
// FIXME: preserve order?
|
||||
storePathsToSet(storePaths),
|
||||
true, showClosureSize, AllowInvalid);
|
||||
true, showClosureSize, SRI, AllowInvalid);
|
||||
}
|
||||
|
||||
else {
|
||||
|
|
120
src/nix/run.cc
120
src/nix/run.cc
|
@ -8,6 +8,7 @@
|
|||
#include "fs-accessor.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "affinity.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#if __linux__
|
||||
#include <sys/mount.h>
|
||||
|
@ -19,11 +20,46 @@ using namespace nix;
|
|||
|
||||
std::string chrootHelperName = "__run_in_chroot";
|
||||
|
||||
struct CmdRun : InstallablesCommand
|
||||
struct RunCommon : virtual Command
|
||||
{
|
||||
void runProgram(ref<Store> store,
|
||||
const std::string & program,
|
||||
const Strings & args)
|
||||
{
|
||||
stopProgressBar();
|
||||
|
||||
restoreSignals();
|
||||
|
||||
restoreAffinity();
|
||||
|
||||
/* If this is a diverted store (i.e. its "logical" location
|
||||
(typically /nix/store) differs from its "physical" location
|
||||
(e.g. /home/eelco/nix/store), then run the command in a
|
||||
chroot. For non-root users, this requires running it in new
|
||||
mount and user namespaces. Unfortunately,
|
||||
unshare(CLONE_NEWUSER) doesn't work in a multithreaded
|
||||
program (which "nix" is), so we exec() a single-threaded
|
||||
helper program (chrootHelper() below) to do the work. */
|
||||
auto store2 = store.dynamic_pointer_cast<LocalStore>();
|
||||
|
||||
if (store2 && store->storeDir != store2->realStoreDir) {
|
||||
Strings helperArgs = { chrootHelperName, store->storeDir, store2->realStoreDir, program };
|
||||
for (auto & arg : args) helperArgs.push_back(arg);
|
||||
|
||||
execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
|
||||
|
||||
throw SysError("could not execute chroot helper");
|
||||
}
|
||||
|
||||
execvp(program.c_str(), stringsToCharPtrs(args).data());
|
||||
|
||||
throw SysError("unable to execute '%s'", program);
|
||||
}
|
||||
};
|
||||
|
||||
struct CmdRun : InstallablesCommand, RunCommon, MixEnvironment
|
||||
{
|
||||
std::vector<std::string> command = { "bash" };
|
||||
StringSet keep, unset;
|
||||
bool ignoreEnvironment = false;
|
||||
|
||||
CmdRun()
|
||||
{
|
||||
|
@ -37,28 +73,6 @@ struct CmdRun : InstallablesCommand
|
|||
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
||||
command = ss;
|
||||
});
|
||||
|
||||
mkFlag()
|
||||
.longName("ignore-environment")
|
||||
.shortName('i')
|
||||
.description("clear the entire environment (except those specified with --keep)")
|
||||
.set(&ignoreEnvironment, true);
|
||||
|
||||
mkFlag()
|
||||
.longName("keep")
|
||||
.shortName('k')
|
||||
.description("keep specified environment variable")
|
||||
.arity(1)
|
||||
.labels({"name"})
|
||||
.handler([&](std::vector<std::string> ss) { keep.insert(ss.front()); });
|
||||
|
||||
mkFlag()
|
||||
.longName("unset")
|
||||
.shortName('u')
|
||||
.description("unset specified environment variable")
|
||||
.arity(1)
|
||||
.labels({"name"})
|
||||
.handler([&](std::vector<std::string> ss) { unset.insert(ss.front()); });
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
|
@ -94,35 +108,13 @@ struct CmdRun : InstallablesCommand
|
|||
|
||||
auto accessor = store->getFSAccessor();
|
||||
|
||||
if (ignoreEnvironment) {
|
||||
|
||||
if (!unset.empty())
|
||||
throw UsageError("--unset does not make sense with --ignore-environment");
|
||||
|
||||
std::map<std::string, std::string> kept;
|
||||
for (auto & var : keep) {
|
||||
auto s = getenv(var.c_str());
|
||||
if (s) kept[var] = s;
|
||||
}
|
||||
|
||||
clearEnv();
|
||||
|
||||
for (auto & var : kept)
|
||||
setenv(var.first.c_str(), var.second.c_str(), 1);
|
||||
|
||||
} else {
|
||||
|
||||
if (!keep.empty())
|
||||
throw UsageError("--keep does not make sense without --ignore-environment");
|
||||
|
||||
for (auto & var : unset)
|
||||
unsetenv(var.c_str());
|
||||
}
|
||||
|
||||
std::unordered_set<StorePath> done;
|
||||
std::queue<StorePath> todo;
|
||||
for (auto & path : outPaths) todo.push(path.clone());
|
||||
|
||||
setEnviron();
|
||||
|
||||
auto unixPath = tokenizeString<Strings>(getEnv("PATH").value_or(""), ":");
|
||||
|
||||
while (!todo.empty()) {
|
||||
|
@ -142,38 +134,10 @@ struct CmdRun : InstallablesCommand
|
|||
|
||||
setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1);
|
||||
|
||||
std::string cmd = *command.begin();
|
||||
Strings args;
|
||||
for (auto & arg : command) args.push_back(arg);
|
||||
|
||||
stopProgressBar();
|
||||
|
||||
restoreSignals();
|
||||
|
||||
restoreAffinity();
|
||||
|
||||
/* If this is a diverted store (i.e. its "logical" location
|
||||
(typically /nix/store) differs from its "physical" location
|
||||
(e.g. /home/eelco/nix/store), then run the command in a
|
||||
chroot. For non-root users, this requires running it in new
|
||||
mount and user namespaces. Unfortunately,
|
||||
unshare(CLONE_NEWUSER) doesn't work in a multithreaded
|
||||
program (which "nix" is), so we exec() a single-threaded
|
||||
helper program (chrootHelper() below) to do the work. */
|
||||
auto store2 = store.dynamic_pointer_cast<LocalStore>();
|
||||
|
||||
if (store2 && store->storeDir != store2->realStoreDir) {
|
||||
Strings helperArgs = { chrootHelperName, store->storeDir, store2->realStoreDir, cmd };
|
||||
for (auto & arg : args) helperArgs.push_back(arg);
|
||||
|
||||
execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
|
||||
|
||||
throw SysError("could not execute chroot helper");
|
||||
}
|
||||
|
||||
execvp(cmd.c_str(), stringsToCharPtrs(args).data());
|
||||
|
||||
throw SysError("unable to exec '%s'", cmd);
|
||||
runProgram(store, *command.begin(), args);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
319
src/nix/shell.cc
Normal file
319
src/nix/shell.cc
Normal file
|
@ -0,0 +1,319 @@
|
|||
#include "eval.hh"
|
||||
#include "command.hh"
|
||||
#include "common-args.hh"
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "derivations.hh"
|
||||
#include "affinity.hh"
|
||||
#include "progress-bar.hh"
|
||||
|
||||
#include <regex>
|
||||
|
||||
using namespace nix;
|
||||
|
||||
struct Var
|
||||
{
|
||||
bool exported;
|
||||
std::string value; // quoted string or array
|
||||
};
|
||||
|
||||
struct BuildEnvironment
|
||||
{
|
||||
std::map<std::string, Var> env;
|
||||
std::string bashFunctions;
|
||||
};
|
||||
|
||||
BuildEnvironment readEnvironment(const Path & path)
|
||||
{
|
||||
BuildEnvironment res;
|
||||
|
||||
std::set<std::string> exported;
|
||||
|
||||
debug("reading environment file '%s'", path);
|
||||
|
||||
auto file = readFile(path);
|
||||
|
||||
auto pos = file.cbegin();
|
||||
|
||||
static std::string varNameRegex =
|
||||
R"re((?:[a-zA-Z_][a-zA-Z0-9_]*))re";
|
||||
|
||||
static std::regex declareRegex(
|
||||
"^declare -x (" + varNameRegex + ")" +
|
||||
R"re((?:="((?:[^"\\]|\\.)*)")?\n)re");
|
||||
|
||||
static std::string simpleStringRegex =
|
||||
R"re((?:[a-zA-Z0-9_/:\.\-\+=]*))re";
|
||||
|
||||
static std::string quotedStringRegex =
|
||||
R"re((?:\$?'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'))re";
|
||||
|
||||
static std::string arrayRegex =
|
||||
R"re((?:\(( *\[[^\]]+\]="(?:[^"\\]|\\.)*")*\)))re";
|
||||
|
||||
static std::regex varRegex(
|
||||
"^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + quotedStringRegex + "|" + arrayRegex + ")\n");
|
||||
|
||||
static std::regex functionRegex(
|
||||
"^" + varNameRegex + " \\(\\) *\n");
|
||||
|
||||
while (pos != file.end()) {
|
||||
|
||||
std::smatch match;
|
||||
|
||||
if (std::regex_search(pos, file.cend(), match, declareRegex)) {
|
||||
pos = match[0].second;
|
||||
exported.insert(match[1]);
|
||||
}
|
||||
|
||||
else if (std::regex_search(pos, file.cend(), match, varRegex)) {
|
||||
pos = match[0].second;
|
||||
res.env.insert({match[1], Var { (bool) exported.count(match[1]), match[2] }});
|
||||
}
|
||||
|
||||
else if (std::regex_search(pos, file.cend(), match, functionRegex)) {
|
||||
res.bashFunctions = std::string(pos, file.cend());
|
||||
break;
|
||||
}
|
||||
|
||||
else throw Error("shell environment '%s' has unexpected line '%s'",
|
||||
path, file.substr(pos - file.cbegin(), 60));
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
/* Given an existing derivation, return the shell environment as
|
||||
initialised by stdenv's setup script. We do this by building a
|
||||
modified derivation with the same dependencies and nearly the same
|
||||
initial environment variables, that just writes the resulting
|
||||
environment to a file and exits. */
|
||||
StorePath getDerivationEnvironment(ref<Store> store, Derivation drv)
|
||||
{
|
||||
auto builder = baseNameOf(drv.builder);
|
||||
if (builder != "bash")
|
||||
throw Error("'nix dev-shell' only works on derivations that use 'bash' as their builder");
|
||||
|
||||
drv.args = {
|
||||
"-c",
|
||||
"set -e; "
|
||||
"export IN_NIX_SHELL=impure; "
|
||||
"export dontAddDisableDepTrack=1; "
|
||||
"if [[ -n $stdenv ]]; then "
|
||||
" source $stdenv/setup; "
|
||||
"fi; "
|
||||
"export > $out; "
|
||||
"set >> $out "};
|
||||
|
||||
/* Remove derivation checks. */
|
||||
drv.env.erase("allowedReferences");
|
||||
drv.env.erase("allowedRequisites");
|
||||
drv.env.erase("disallowedReferences");
|
||||
drv.env.erase("disallowedRequisites");
|
||||
|
||||
// FIXME: handle structured attrs
|
||||
|
||||
/* Rehash and write the derivation. FIXME: would be nice to use
|
||||
'buildDerivation', but that's privileged. */
|
||||
auto drvName = drv.env["name"] + "-env";
|
||||
for (auto & output : drv.outputs)
|
||||
drv.env.erase(output.first);
|
||||
drv.env["out"] = "";
|
||||
drv.env["outputs"] = "out";
|
||||
Hash h = hashDerivationModulo(*store, drv, true);
|
||||
auto shellOutPath = store->makeOutputPath("out", h, drvName);
|
||||
drv.outputs.insert_or_assign("out", DerivationOutput(shellOutPath.clone(), "", ""));
|
||||
drv.env["out"] = store->printStorePath(shellOutPath);
|
||||
auto shellDrvPath2 = writeDerivation(store, drv, drvName);
|
||||
|
||||
/* Build the derivation. */
|
||||
store->buildPaths({shellDrvPath2});
|
||||
|
||||
assert(store->isValidPath(shellOutPath));
|
||||
|
||||
return shellOutPath;
|
||||
}
|
||||
|
||||
struct Common : InstallableCommand, MixProfile
|
||||
{
|
||||
std::set<string> ignoreVars{
|
||||
"BASHOPTS",
|
||||
"EUID",
|
||||
"HOME", // FIXME: don't ignore in pure mode?
|
||||
"NIX_BUILD_TOP",
|
||||
"NIX_ENFORCE_PURITY",
|
||||
"NIX_LOG_FD",
|
||||
"PPID",
|
||||
"PWD",
|
||||
"SHELLOPTS",
|
||||
"SHLVL",
|
||||
"SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
|
||||
"TEMP",
|
||||
"TEMPDIR",
|
||||
"TERM",
|
||||
"TMP",
|
||||
"TMPDIR",
|
||||
"TZ",
|
||||
"UID",
|
||||
};
|
||||
|
||||
void makeRcScript(const BuildEnvironment & buildEnvironment, std::ostream & out)
|
||||
{
|
||||
out << "nix_saved_PATH=\"$PATH\"\n";
|
||||
|
||||
for (auto & i : buildEnvironment.env) {
|
||||
if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
|
||||
out << fmt("%s=%s\n", i.first, i.second.value);
|
||||
if (i.second.exported)
|
||||
out << fmt("export %s\n", i.first);
|
||||
}
|
||||
}
|
||||
|
||||
out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
|
||||
|
||||
out << buildEnvironment.bashFunctions << "\n";
|
||||
|
||||
// FIXME: set outputs
|
||||
|
||||
out << "export NIX_BUILD_TOP=\"$(mktemp -d --tmpdir nix-shell.XXXXXX)\"\n";
|
||||
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
|
||||
out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
|
||||
|
||||
out << "eval \"$shellHook\"\n";
|
||||
}
|
||||
|
||||
StorePath getShellOutPath(ref<Store> store)
|
||||
{
|
||||
auto path = installable->getStorePath();
|
||||
if (path && hasSuffix(path->to_string(), "-env"))
|
||||
return path->clone();
|
||||
else {
|
||||
auto drvs = toDerivations(store, {installable});
|
||||
|
||||
if (drvs.size() != 1)
|
||||
throw Error("'%s' needs to evaluate to a single derivation, but it evaluated to %d derivations",
|
||||
installable->what(), drvs.size());
|
||||
|
||||
auto & drvPath = *drvs.begin();
|
||||
|
||||
return getDerivationEnvironment(store, store->derivationFromPath(drvPath));
|
||||
}
|
||||
}
|
||||
|
||||
BuildEnvironment getBuildEnvironment(ref<Store> store)
|
||||
{
|
||||
auto shellOutPath = getShellOutPath(store);
|
||||
|
||||
updateProfile(shellOutPath);
|
||||
|
||||
return readEnvironment(store->printStorePath(shellOutPath));
|
||||
}
|
||||
};
|
||||
|
||||
struct CmdDevShell : Common, MixEnvironment
|
||||
{
|
||||
std::vector<std::string> command;
|
||||
|
||||
CmdDevShell()
|
||||
{
|
||||
mkFlag()
|
||||
.longName("command")
|
||||
.shortName('c')
|
||||
.description("command and arguments to be executed insted of an interactive shell")
|
||||
.labels({"command", "args"})
|
||||
.arity(ArityAny)
|
||||
.handler([&](std::vector<std::string> ss) {
|
||||
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
||||
command = ss;
|
||||
});
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
{
|
||||
return "run a bash shell that provides the build environment of a derivation";
|
||||
}
|
||||
|
||||
Examples examples() override
|
||||
{
|
||||
return {
|
||||
Example{
|
||||
"To get the build environment of GNU hello:",
|
||||
"nix dev-shell nixpkgs.hello"
|
||||
},
|
||||
Example{
|
||||
"To store the build environment in a profile:",
|
||||
"nix dev-shell --profile /tmp/my-shell nixpkgs.hello"
|
||||
},
|
||||
Example{
|
||||
"To use a build environment previously recorded in a profile:",
|
||||
"nix dev-shell /tmp/my-shell"
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
void run(ref<Store> store) override
|
||||
{
|
||||
auto buildEnvironment = getBuildEnvironment(store);
|
||||
|
||||
auto [rcFileFd, rcFilePath] = createTempFile("nix-shell");
|
||||
|
||||
std::ostringstream ss;
|
||||
makeRcScript(buildEnvironment, ss);
|
||||
|
||||
ss << fmt("rm -f '%s'\n", rcFilePath);
|
||||
|
||||
if (!command.empty()) {
|
||||
std::vector<std::string> args;
|
||||
for (auto s : command)
|
||||
args.push_back(shellEscape(s));
|
||||
ss << fmt("exec %s\n", concatStringsSep(" ", args));
|
||||
}
|
||||
|
||||
writeFull(rcFileFd.get(), ss.str());
|
||||
|
||||
stopProgressBar();
|
||||
|
||||
auto shell = getEnv("SHELL").value_or("bash");
|
||||
|
||||
setEnviron();
|
||||
|
||||
auto args = Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
|
||||
|
||||
restoreAffinity();
|
||||
restoreSignals();
|
||||
|
||||
execvp(shell.c_str(), stringsToCharPtrs(args).data());
|
||||
|
||||
throw SysError("executing shell '%s'", shell);
|
||||
}
|
||||
};
|
||||
|
||||
struct CmdPrintDevEnv : Common
|
||||
{
|
||||
std::string description() override
|
||||
{
|
||||
return "print shell code that can be sourced by bash to reproduce the build environment of a derivation";
|
||||
}
|
||||
|
||||
Examples examples() override
|
||||
{
|
||||
return {
|
||||
Example{
|
||||
"To apply the build environment of GNU hello to the current shell:",
|
||||
". <(nix print-dev-env nixpkgs.hello)"
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
void run(ref<Store> store) override
|
||||
{
|
||||
auto buildEnvironment = getBuildEnvironment(store);
|
||||
|
||||
stopProgressBar();
|
||||
|
||||
makeRcScript(buildEnvironment, std::cout);
|
||||
}
|
||||
};
|
||||
|
||||
static auto r1 = registerCommand<CmdPrintDevEnv>("print-dev-env");
|
||||
static auto r2 = registerCommand<CmdDevShell>("dev-shell");
|
|
@ -145,7 +145,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
|||
auto v = state->allocValue();
|
||||
state->eval(state->parseExprFromString(*res.data, "/no-such-path"), *v);
|
||||
Bindings & bindings(*state->allocBindings(0));
|
||||
auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v);
|
||||
auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first;
|
||||
|
||||
return store->parseStorePath(state->forceString(*v2));
|
||||
}
|
||||
|
|
|
@ -143,11 +143,6 @@ struct CmdWhyDepends : SourceExprCommand
|
|||
and `dependency`. */
|
||||
std::function<void(Node &, const string &, const string &)> printNode;
|
||||
|
||||
const string treeConn = "╠═══";
|
||||
const string treeLast = "╚═══";
|
||||
const string treeLine = "║ ";
|
||||
const string treeNull = " ";
|
||||
|
||||
struct BailOut { };
|
||||
|
||||
printNode = [&](Node & node, const string & firstPad, const string & tailPad) {
|
||||
|
@ -157,7 +152,7 @@ struct CmdWhyDepends : SourceExprCommand
|
|||
std::cout << fmt("%s%s%s%s" ANSI_NORMAL "\n",
|
||||
firstPad,
|
||||
node.visited ? "\e[38;5;244m" : "",
|
||||
firstPad != "" ? "=> " : "",
|
||||
firstPad != "" ? "→ " : "",
|
||||
pathS);
|
||||
|
||||
if (node.path == dependencyPath && !all
|
||||
|
|
|
@ -6,6 +6,8 @@ resolve-system-dependencies_DIR := $(d)
|
|||
|
||||
resolve-system-dependencies_INSTALL_DIR := $(libexecdir)/nix
|
||||
|
||||
resolve-system-dependencies_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain
|
||||
|
||||
resolve-system-dependencies_LIBS := libstore libmain libutil libnixrust
|
||||
|
||||
resolve-system-dependencies_SOURCES := $(d)/resolve-system-dependencies.cc
|
||||
|
|
|
@ -48,7 +48,7 @@ basicTests
|
|||
|
||||
|
||||
# Test HttpBinaryCacheStore.
|
||||
export _NIX_FORCE_HTTP_BINARY_CACHE_STORE=1
|
||||
export _NIX_FORCE_HTTP=1
|
||||
basicTests
|
||||
|
||||
|
||||
|
@ -126,7 +126,7 @@ badKey="$(cat $TEST_ROOT/pk2)"
|
|||
res=($(nix-store --generate-binary-cache-key foo.nixos.org-1 $TEST_ROOT/sk3 $TEST_ROOT/pk3))
|
||||
otherKey="$(cat $TEST_ROOT/pk3)"
|
||||
|
||||
_NIX_FORCE_HTTP_BINARY_CACHE_STORE= nix copy --to file://$cacheDir?secret-key=$TEST_ROOT/sk1 $outPath
|
||||
_NIX_FORCE_HTTP= nix copy --to file://$cacheDir?secret-key=$TEST_ROOT/sk1 $outPath
|
||||
|
||||
|
||||
# Downloading should fail if we don't provide a key.
|
||||
|
|
|
@ -3,7 +3,7 @@ rec {
|
|||
|
||||
path = "@coreutils@";
|
||||
|
||||
system = builtins.currentSystem;
|
||||
system = "@system@";
|
||||
|
||||
shared = builtins.getEnv "_NIX_TEST_SHARED";
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ drvPath=$(nix-instantiate dependencies.nix)
|
|||
|
||||
echo "derivation is $drvPath"
|
||||
|
||||
nix-store -q --tree "$drvPath" | grep ' +---.*builder1.sh'
|
||||
nix-store -q --tree "$drvPath" | grep '───.*builder1.sh'
|
||||
|
||||
# Test Graphviz graph generation.
|
||||
nix-store -q --graph "$drvPath" > $TEST_ROOT/graph
|
||||
|
@ -24,7 +24,7 @@ if test -n "$dot"; then
|
|||
$dot < $TEST_ROOT/graph
|
||||
fi
|
||||
|
||||
nix-store -q --tree "$outPath" | grep '+---.*dependencies-input-2'
|
||||
nix-store -q --tree "$outPath" | grep '───.*dependencies-input-2'
|
||||
|
||||
echo "output path is $outPath"
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ clearStore
|
|||
|
||||
repo=$TEST_ROOT/git
|
||||
|
||||
export _NIX_FORCE_HTTP=1
|
||||
|
||||
rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/gitv2
|
||||
|
||||
git init $repo
|
||||
|
|
|
@ -7,3 +7,5 @@ libplugintest_SOURCES := $(d)/plugintest.cc
|
|||
libplugintest_ALLOW_UNDEFINED := 1
|
||||
|
||||
libplugintest_EXCLUDE_FROM_LIBRARY_LIST := 1
|
||||
|
||||
libplugintest_CXXFLAGS := -I src/libutil -I src/libexpr
|
||||
|
|
Loading…
Reference in a new issue