diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 17a79dc97..b2b1f07fb 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -20,8 +20,7 @@ jobs:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
- #- run: nix flake check
- - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)
+ - run: nix-build -A checks.$(nix-instantiate --eval -E '(builtins.currentSystem)')
check_cachix:
name: Cachix secret present for installer tests
runs-on: ubuntu-latest
diff --git a/.gitignore b/.gitignore
index 2e14561fe..5f402dbc3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -76,7 +76,6 @@ perl/Makefile.config
# /tests/
/tests/test-tmp
/tests/common.sh
-/tests/dummy
/tests/result*
/tests/restricted-innocent
/tests/shell
diff --git a/Makefile b/Makefile
index b7f0e79db..dd259e5cd 100644
--- a/Makefile
+++ b/Makefile
@@ -12,6 +12,7 @@ makefiles = \
src/resolve-system-dependencies/local.mk \
scripts/local.mk \
misc/bash/local.mk \
+ misc/fish/local.mk \
misc/zsh/local.mk \
misc/systemd/local.mk \
misc/launchd/local.mk \
diff --git a/Makefile.config.in b/Makefile.config.in
index fd92365eb..c8c4446b4 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -1,3 +1,4 @@
+HOST_OS = @host_os@
AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@
BOOST_LDFLAGS = @BOOST_LDFLAGS@
diff --git a/boehmgc-coroutine-sp-fallback.diff b/boehmgc-coroutine-sp-fallback.diff
new file mode 100644
index 000000000..fa8dd0325
--- /dev/null
+++ b/boehmgc-coroutine-sp-fallback.diff
@@ -0,0 +1,42 @@
+diff --git a/pthread_stop_world.c b/pthread_stop_world.c
+index 1cee6a0b..46c3acd9 100644
+--- a/pthread_stop_world.c
++++ b/pthread_stop_world.c
+@@ -674,6 +674,8 @@ GC_INNER void GC_push_all_stacks(void)
+ struct GC_traced_stack_sect_s *traced_stack_sect;
+ pthread_t self = pthread_self();
+ word total_size = 0;
++ size_t stack_limit;
++ pthread_attr_t pattr;
+
+ if (!EXPECT(GC_thr_initialized, TRUE))
+ GC_thr_init();
+@@ -723,6 +725,28 @@ GC_INNER void GC_push_all_stacks(void)
+ hi = p->altstack + p->altstack_size;
+ /* FIXME: Need to scan the normal stack too, but how ? */
+ /* FIXME: Assume stack grows down */
++ } else {
++ if (pthread_getattr_np(p->id, &pattr)) {
++ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
++ }
++ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
++ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
++ }
++ // When a thread goes into a coroutine, we lose its original sp until
++ // control flow returns to the thread.
++ // While in the coroutine, the sp points outside the thread stack,
++ // so we can detect this and push the entire thread stack instead,
++ // as an approximation.
++ // We assume that the coroutine has similarly added its entire stack.
++ // This could be made accurate by cooperating with the application
++ // via new functions and/or callbacks.
++ #ifndef STACK_GROWS_UP
++ if (lo >= hi || lo < hi - stack_limit) { // sp outside stack
++ lo = hi - stack_limit;
++ }
++ #else
++ #error "STACK_GROWS_UP not supported in boost_coroutine2 (as of june 2021), so we don't support it in Nix."
++ #endif
+ }
+ GC_push_all_stack_sections(lo, hi, traced_stack_sect);
+ # ifdef STACK_GROWS_UP
diff --git a/configure.ac b/configure.ac
index 03b123366..6e563eec3 100644
--- a/configure.ac
+++ b/configure.ac
@@ -231,7 +231,7 @@ AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
# Look for aws-cpp-sdk-s3.
AC_LANG_PUSH(C++)
AC_CHECK_HEADERS([aws/s3/S3Client.h],
- [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1],
+ [AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1],
[AC_DEFINE([ENABLE_S3], [0], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=])
AC_SUBST(ENABLE_S3, [$enable_s3])
AC_LANG_POP(C++)
diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix
index 416a7fdba..92c7b1a31 100644
--- a/doc/manual/generate-builtins.nix
+++ b/doc/manual/generate-builtins.nix
@@ -6,9 +6,11 @@ builtins:
concatStrings (map
(name:
let builtin = builtins.${name}; in
- " - `builtins.${name}` " + concatStringsSep " " (map (s: "*${s}*") builtin.args)
- + " \n\n"
- + concatStrings (map (s: " ${s}\n") (splitLines builtin.doc)) + "\n\n"
+ "
${name} "
+ + concatStringsSep " " (map (s: "${s}") builtin.args)
+ + "
"
+ + "\n\n"
+ + builtin.doc
+ + "\n\n"
)
(attrNames builtins))
-
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 271529b38..e25157af8 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -64,6 +64,7 @@ $(d)/conf-file.json: $(bindir)/nix
$(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/expressions/builtins-prefix.md $(bindir)/nix
@cat doc/manual/src/expressions/builtins-prefix.md > $@.tmp
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
+ @cat doc/manual/src/expressions/builtins-suffix.md >> $@.tmp
@mv $@.tmp $@
$(d)/builtins.json: $(bindir)/nix
diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md
index b709ca9d1..6e2403461 100644
--- a/doc/manual/src/command-ref/env-common.md
+++ b/doc/manual/src/command-ref/env-common.md
@@ -10,35 +10,39 @@ Most Nix commands interpret the following environment variables:
A colon-separated list of directories used to look up Nix
expressions enclosed in angle brackets (i.e., ``). For
instance, the value
-
+
/home/eelco/Dev:/etc/nixos
-
+
will cause Nix to look for paths relative to `/home/eelco/Dev` and
`/etc/nixos`, in this order. It is also possible to match paths
against a prefix. For example, the value
-
+
nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
-
+
will cause Nix to search for `` in
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
-
+
If a path in the Nix search path starts with `http://` or
`https://`, it is interpreted as the URL of a tarball that will be
downloaded and unpacked to a temporary location. The tarball must
consist of a single top-level directory. For example, setting
`NIX_PATH` to
-
- nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-15.09.tar.gz
-
- tells Nix to download the latest revision in the Nixpkgs/NixOS 15.09
- channel.
-
- A following shorthand can be used to refer to the official channels:
-
- nixpkgs=channel:nixos-15.09
-
- The search path can be extended using the `-I` option, which takes
- precedence over `NIX_PATH`.
+
+ nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
+
+ tells Nix to download and use the current contents of the
+ `master` branch in the `nixpkgs` repository.
+
+ The URLs of the tarballs from the official nixos.org channels (see
+ [the manual for `nix-channel`](nix-channel.md)) can be abbreviated
+ as `channel:`. For instance, the following two
+ values of `NIX_PATH` are equivalent:
+
+ nixpkgs=channel:nixos-21.05
+ nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
+
+ The Nix search path can also be extended using the `-I` option to
+ many Nix commands, which takes precedence over `NIX_PATH`.
- `NIX_IGNORE_SYMLINK_STORE`\
Normally, the Nix store directory (typically `/nix/store`) is not
@@ -50,7 +54,7 @@ Most Nix commands interpret the following environment variables:
builds are deployed to machines where `/nix/store` resolves
differently. If you are sure that you’re not going to do that, you
can set `NIX_IGNORE_SYMLINK_STORE` to `1`.
-
+
Note that if you’re symlinking the Nix store so that you can put it
on another file system than the root file system, on Linux you’re
better off using `bind` mount points, e.g.,
@@ -59,7 +63,7 @@ Most Nix commands interpret the following environment variables:
$ mkdir /nix
$ mount -o bind /mnt/otherdisk/nix /nix
```
-
+
Consult the mount 8 manual page for details.
- `NIX_STORE_DIR`\
diff --git a/doc/manual/src/expressions/builtins-prefix.md b/doc/manual/src/expressions/builtins-prefix.md
index c16b2805f..87127de2a 100644
--- a/doc/manual/src/expressions/builtins-prefix.md
+++ b/doc/manual/src/expressions/builtins-prefix.md
@@ -9,7 +9,8 @@ scope. Instead, you can access them through the `builtins` built-in
value, which is a set that contains all built-in functions and values.
For instance, `derivation` is also available as `builtins.derivation`.
- - `derivation` *attrs*; `builtins.derivation` *attrs*\
-
- `derivation` is described in [its own section](derivations.md).
-
+
+ derivation attrs
;
+ builtins.derivation attrs
+ derivation in described in
+ its own section.
diff --git a/doc/manual/src/expressions/builtins-suffix.md b/doc/manual/src/expressions/builtins-suffix.md
new file mode 100644
index 000000000..a74db2857
--- /dev/null
+++ b/doc/manual/src/expressions/builtins-suffix.md
@@ -0,0 +1 @@
+
diff --git a/flake.lock b/flake.lock
index 8aad22957..5fc969d7b 100644
--- a/flake.lock
+++ b/flake.lock
@@ -19,11 +19,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1622593737,
- "narHash": "sha256-9loxFJg85AbzJrSkU4pE/divZ1+zOxDy2FSjlrufCB8=",
+ "lastModified": 1624862269,
+ "narHash": "sha256-JFcsh2+7QtfKdJFoPibLFPLgIW6Ycnv8Bts9a7RYme0=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "bb8a5e54845012ed1375ffd5f317d2fdf434b20e",
+ "rev": "f77036342e2b690c61c97202bf48f2ce13acc022",
"type": "github"
},
"original": {
diff --git a/flake.nix b/flake.nix
index 466145366..5430d3904 100644
--- a/flake.nix
+++ b/flake.nix
@@ -20,6 +20,8 @@
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
+ crossSystems = [ "armv6l-linux" "armv7l-linux" ];
+
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
# Memoize nixpkgs for different platforms for efficiency.
@@ -79,7 +81,7 @@
buildPackages.mercurial
buildPackages.jq
]
- ++ lib.optionals stdenv.isLinux [(pkgs.util-linuxMinimal or pkgs.utillinuxMinimal)];
+ ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps =
[ curl
@@ -93,7 +95,7 @@
]
++ lib.optionals stdenv.isLinux [libseccomp]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
- ++ lib.optional stdenv.isx86_64 libcpuid;
+ ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin)
(aws-sdk-cpp.override {
@@ -102,7 +104,13 @@
});
propagatedDeps =
- [ (boehmgc.override { enableLargeConfig = true; })
+ [ ((boehmgc.override {
+ enableLargeConfig = true;
+ }).overrideAttrs(o: {
+ patches = (o.patches or []) ++ [
+ ./boehmgc-coroutine-sp-fallback.diff
+ ];
+ }))
];
perlDeps =
@@ -133,10 +141,11 @@
substitute ${./scripts/install.in} $out/install \
${pkgs.lib.concatMapStrings
- (system:
- '' \
- --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${self.hydraJobs.binaryTarball.${system}}/*.tar.xz) \
- --replace '@tarballPath_${system}@' $(tarballPath ${self.hydraJobs.binaryTarball.${system}}/*.tar.xz) \
+ (system: let
+ tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system};
+ in '' \
+ --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
+ --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
''
)
systems
@@ -174,6 +183,77 @@
};
+ binaryTarball = buildPackages: nix: pkgs: let
+ inherit (pkgs) cacert;
+ installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
+ in
+
+ buildPackages.runCommand "nix-binary-tarball-${version}"
+ { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
+ meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
+ }
+ ''
+ cp ${installerClosureInfo}/registration $TMPDIR/reginfo
+ cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
+ substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+
+ substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+ substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+ substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
+ --subst-var-by nix ${nix} \
+ --subst-var-by cacert ${cacert}
+
+ if type -p shellcheck; then
+ # SC1090: Don't worry about not being able to find
+ # $nix/etc/profile.d/nix.sh
+ shellcheck --exclude SC1090 $TMPDIR/install
+ shellcheck $TMPDIR/create-darwin-volume.sh
+ shellcheck $TMPDIR/install-darwin-multi-user.sh
+ shellcheck $TMPDIR/install-systemd-multi-user.sh
+
+ # SC1091: Don't panic about not being able to source
+ # /etc/profile
+ # SC2002: Ignore "useless cat" "error", when loading
+ # .reginfo, as the cat is a much cleaner
+ # implementation, even though it is "useless"
+ # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
+ # root's home directory
+ shellcheck --external-sources \
+ --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
+ fi
+
+ chmod +x $TMPDIR/install
+ chmod +x $TMPDIR/create-darwin-volume.sh
+ chmod +x $TMPDIR/install-darwin-multi-user.sh
+ chmod +x $TMPDIR/install-systemd-multi-user.sh
+ chmod +x $TMPDIR/install-multi-user
+ dir=nix-${version}-${pkgs.system}
+ fn=$out/$dir.tar.xz
+ mkdir -p $out/nix-support
+ echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
+ tar cvfJ $fn \
+ --owner=0 --group=0 --mode=u+rw,uga+r \
+ --absolute-names \
+ --hard-dereference \
+ --transform "s,$TMPDIR/install,$dir/install," \
+ --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
+ --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
+ --transform "s,$NIX_STORE,$dir/store,S" \
+ $TMPDIR/install \
+ $TMPDIR/create-darwin-volume.sh \
+ $TMPDIR/install-darwin-multi-user.sh \
+ $TMPDIR/install-systemd-multi-user.sh \
+ $TMPDIR/install-multi-user \
+ $TMPDIR/reginfo \
+ $(cat ${installerClosureInfo}/store-paths)
+ '';
+
in {
# A Nixpkgs overlay that overrides the 'nix' and
@@ -285,7 +365,7 @@
outputs = [ "out" "bin" "dev" ];
- nativeBuildInputs = [ which ];
+ nativeBuildInputs = [ buildPackages.which ];
configurePhase = ''
${if (stdenv.isDarwin && stdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
@@ -304,92 +384,33 @@
buildStatic = nixpkgs.lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
+ buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
+ nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
+
# Perl bindings for various platforms.
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
# Binary tarball for various platforms, containing a Nix store
# with the closure of 'nix' package, and the second half of
# the installation script.
- binaryTarball = nixpkgs.lib.genAttrs systems (system:
+ binaryTarball = nixpkgs.lib.genAttrs systems (system: binaryTarball nixpkgsFor.${system} nixpkgsFor.${system}.nix nixpkgsFor.${system});
- with nixpkgsFor.${system};
-
- let
- installerClosureInfo = closureInfo { rootPaths = [ nix cacert ]; };
- in
-
- runCommand "nix-binary-tarball-${version}"
- { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
- meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
- }
- ''
- cp ${installerClosureInfo}/registration $TMPDIR/reginfo
- cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
- substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
-
- substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
- substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
- substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
- --subst-var-by nix ${nix} \
- --subst-var-by cacert ${cacert}
-
- if type -p shellcheck; then
- # SC1090: Don't worry about not being able to find
- # $nix/etc/profile.d/nix.sh
- shellcheck --exclude SC1090 $TMPDIR/install
- shellcheck $TMPDIR/create-darwin-volume.sh
- shellcheck $TMPDIR/install-darwin-multi-user.sh
- shellcheck $TMPDIR/install-systemd-multi-user.sh
-
- # SC1091: Don't panic about not being able to source
- # /etc/profile
- # SC2002: Ignore "useless cat" "error", when loading
- # .reginfo, as the cat is a much cleaner
- # implementation, even though it is "useless"
- # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
- # root's home directory
- shellcheck --external-sources \
- --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
- fi
-
- chmod +x $TMPDIR/install
- chmod +x $TMPDIR/create-darwin-volume.sh
- chmod +x $TMPDIR/install-darwin-multi-user.sh
- chmod +x $TMPDIR/install-systemd-multi-user.sh
- chmod +x $TMPDIR/install-multi-user
- dir=nix-${version}-${system}
- fn=$out/$dir.tar.xz
- mkdir -p $out/nix-support
- echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
- tar cvfJ $fn \
- --owner=0 --group=0 --mode=u+rw,uga+r \
- --absolute-names \
- --hard-dereference \
- --transform "s,$TMPDIR/install,$dir/install," \
- --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
- --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
- --transform "s,$NIX_STORE,$dir/store,S" \
- $TMPDIR/install \
- $TMPDIR/create-darwin-volume.sh \
- $TMPDIR/install-darwin-multi-user.sh \
- $TMPDIR/install-systemd-multi-user.sh \
- $TMPDIR/install-multi-user \
- $TMPDIR/reginfo \
- $(cat ${installerClosureInfo}/store-paths)
- '');
+ binaryTarballCross = nixpkgs.lib.genAttrs ["x86_64-linux"] (system: builtins.listToAttrs (map (crossSystem: {
+ name = crossSystem;
+ value = let
+ nixpkgsCross = import nixpkgs {
+ inherit system crossSystem;
+ overlays = [ self.overlay ];
+ };
+ in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
+ }) crossSystems));
# The first half of the installation script. This is uploaded
# to https://nixos.org/nix/install. It downloads the binary
# tarball for the user's system and calls the second half of the
# installation script.
- installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
- installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" ];
+ installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
+ installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
# Line coverage analysis.
coverage =
@@ -483,11 +504,14 @@
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
} "touch $out";
- });
+ } // (if system == "x86_64-linux" then (builtins.listToAttrs (map (crossSystem: {
+ name = "binaryTarball-${crossSystem}";
+ value = self.hydraJobs.binaryTarballCross.${system}.${crossSystem};
+ }) crossSystems)) else {}));
packages = forAllSystems (system: {
inherit (nixpkgsFor.${system}) nix;
- } // nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
+ } // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = let
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation {
@@ -525,8 +549,49 @@
stripAllList = ["bin"];
strictDeps = true;
+
+ hardeningDisable = [ "pie" ];
};
- });
+ } // builtins.listToAttrs (map (crossSystem: {
+ name = "nix-${crossSystem}";
+ value = let
+ nixpkgsCross = import nixpkgs {
+ inherit system crossSystem;
+ overlays = [ self.overlay ];
+ };
+ in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
+ name = "nix-${version}";
+
+ src = self;
+
+ VERSION_SUFFIX = versionSuffix;
+
+ outputs = [ "out" "dev" "doc" ];
+
+ nativeBuildInputs = nativeBuildDeps;
+ buildInputs = buildDeps ++ propagatedDeps;
+
+ configureFlags = [ "--sysconfdir=/etc" "--disable-doc-gen" ];
+
+ enableParallelBuilding = true;
+
+ makeFlags = "profiledir=$(out)/etc/profile.d";
+
+ doCheck = true;
+
+ installFlags = "sysconfdir=$(out)/etc";
+
+ postInstall = ''
+ mkdir -p $doc/nix-support
+ echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
+ mkdir -p $out/nix-support
+ echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
+ '';
+
+ doInstallCheck = true;
+ installCheckFlags = "sysconfdir=$(out)/etc";
+ };
+ }) crossSystems)));
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
index c2933300f..3a8d27499 100755
--- a/maintainers/upload-release.pl
+++ b/maintainers/upload-release.pl
@@ -110,6 +110,9 @@ downloadFile("binaryTarball.i686-linux", "1");
downloadFile("binaryTarball.x86_64-linux", "1");
downloadFile("binaryTarball.aarch64-linux", "1");
downloadFile("binaryTarball.x86_64-darwin", "1");
+downloadFile("binaryTarball.aarch64-darwin", "1");
+downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1");
+downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
downloadFile("installerScript", "1");
for my $fn (glob "$tmpDir/*") {
@@ -153,6 +156,7 @@ write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
" i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" .
" aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" .
" x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" .
+ " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" .
"}\n");
system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die;
diff --git a/misc/fish/completion.fish b/misc/fish/completion.fish
new file mode 100644
index 000000000..bedbefaf8
--- /dev/null
+++ b/misc/fish/completion.fish
@@ -0,0 +1,37 @@
+function _nix_complete
+ # Get the current command up to a cursor.
+ # - Behaves correctly even with pipes and nested in commands like env.
+ # - TODO: Returns the command verbatim (does not interpolate variables).
+ # That might not be optimal for arguments like -f.
+ set -l nix_args (commandline --current-process --tokenize --cut-at-cursor)
+ # --cut-at-cursor with --tokenize removes the current token so we need to add it separately.
+ # https://github.com/fish-shell/fish-shell/issues/7375
+ # Can be an empty string.
+ set -l current_token (commandline --current-token --cut-at-cursor)
+
+ # Nix wants the index of the argv item to complete but the $nix_args variable
+ # also contains the program name (argv[0]) so we would need to subtract 1.
+ # But the variable also misses the current token so it cancels out.
+ set -l nix_arg_to_complete (count $nix_args)
+
+ env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token
+end
+
+function _nix_accepts_files
+ set -l response (_nix_complete)
+ # First line is either filenames or no-filenames.
+ test $response[1] = 'filenames'
+end
+
+function _nix
+ set -l response (_nix_complete)
+ # Skip the first line since it handled by _nix_accepts_files.
+ # Tail lines each contain a command followed by a tab character and, optionally, a description.
+ # This is also the format fish expects.
+ string collect -- $response[2..-1]
+end
+
+# Disable file path completion if paths do not belong in the current context.
+complete --command nix --condition 'not _nix_accepts_files' --no-files
+
+complete --command nix --arguments '(_nix)'
diff --git a/misc/fish/local.mk b/misc/fish/local.mk
new file mode 100644
index 000000000..ece899fc3
--- /dev/null
+++ b/misc/fish/local.mk
@@ -0,0 +1 @@
+$(eval $(call install-file-as, $(d)/completion.fish, $(datarootdir)/fish/vendor_completions.d/nix.fish, 0644))
diff --git a/misc/launchd/local.mk b/misc/launchd/local.mk
index 0ba722efb..a39188fe6 100644
--- a/misc/launchd/local.mk
+++ b/misc/launchd/local.mk
@@ -1,4 +1,4 @@
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
$(eval $(call install-data-in, $(d)/org.nixos.nix-daemon.plist, $(prefix)/Library/LaunchDaemons))
diff --git a/misc/systemd/local.mk b/misc/systemd/local.mk
index 785db52a4..1fa037485 100644
--- a/misc/systemd/local.mk
+++ b/misc/systemd/local.mk
@@ -1,4 +1,4 @@
-ifeq ($(OS), Linux)
+ifdef HOST_LINUX
$(foreach n, nix-daemon.socket nix-daemon.service, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/systemd/system, 0644)))
diff --git a/misc/upstart/local.mk b/misc/upstart/local.mk
index 5071676dc..2fbfb29b9 100644
--- a/misc/upstart/local.mk
+++ b/misc/upstart/local.mk
@@ -1,4 +1,4 @@
-ifeq ($(OS), Linux)
+ifdef HOST_LINUX
$(foreach n, nix-daemon.conf, $(eval $(call install-file-in, $(d)/$(n), $(sysconfdir)/init, 0644)))
diff --git a/misc/zsh/completion.zsh b/misc/zsh/completion.zsh
index d4df6447e..a902e37dc 100644
--- a/misc/zsh/completion.zsh
+++ b/misc/zsh/completion.zsh
@@ -1,3 +1,5 @@
+#compdef nix
+
function _nix() {
local ifs_bk="$IFS"
local input=("${(Q)words[@]}")
@@ -18,4 +20,4 @@ function _nix() {
_describe 'nix' suggestions
}
-compdef _nix nix
+_nix "$@"
diff --git a/mk/lib.mk b/mk/lib.mk
index 975102531..92f0983d5 100644
--- a/mk/lib.mk
+++ b/mk/lib.mk
@@ -10,8 +10,25 @@ bin-scripts :=
noinst-scripts :=
man-pages :=
install-tests :=
-OS = $(shell uname -s)
+ifdef HOST_OS
+ HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
+ ifeq ($(HOST_KERNEL), cygwin)
+ HOST_CYGWIN = 1
+ endif
+ ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
+ HOST_DARWIN = 1
+ endif
+ ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
+ HOST_FREEBSD = 1
+ endif
+ ifeq ($(HOST_KERNEL), linux)
+ HOST_LINUX = 1
+ endif
+ ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
+ HOST_SOLARIS = 1
+ endif
+endif
# Hack to define a literal space.
space :=
@@ -50,16 +67,16 @@ endif
BUILD_SHARED_LIBS ?= 1
ifeq ($(BUILD_SHARED_LIBS), 1)
- ifeq (CYGWIN,$(findstring CYGWIN,$(OS)))
+ ifdef HOST_CYGWIN
GLOBAL_CFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
GLOBAL_CXXFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
else
GLOBAL_CFLAGS += -fPIC
GLOBAL_CXXFLAGS += -fPIC
endif
- ifneq ($(OS), Darwin)
- ifneq ($(OS), SunOS)
- ifneq ($(OS), FreeBSD)
+ ifndef HOST_DARWIN
+ ifndef HOST_SOLARIS
+ ifndef HOST_FREEBSD
GLOBAL_LDFLAGS += -Wl,--no-copy-dt-needed-entries
endif
endif
diff --git a/mk/libraries.mk b/mk/libraries.mk
index 7c0e4f100..07bd54dab 100644
--- a/mk/libraries.mk
+++ b/mk/libraries.mk
@@ -1,9 +1,9 @@
libs-list :=
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
SO_EXT = dylib
else
- ifeq (CYGWIN,$(findstring CYGWIN,$(OS)))
+ ifdef HOST_CYGWIN
SO_EXT = dll
else
SO_EXT = so
@@ -59,7 +59,7 @@ define build-library
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
- ifeq (CYGWIN,$(findstring CYGWIN,$(OS)))
+ ifdef HOST_CYGWIN
$(1)_INSTALL_DIR ?= $$(bindir)
else
$(1)_INSTALL_DIR ?= $$(libdir)
@@ -73,18 +73,18 @@ define build-library
ifeq ($(BUILD_SHARED_LIBS), 1)
ifdef $(1)_ALLOW_UNDEFINED
- ifeq ($(OS), Darwin)
+ ifdef HOST_DARWIN
$(1)_LDFLAGS += -undefined suppress -flat_namespace
endif
else
- ifneq ($(OS), Darwin)
- ifneq (CYGWIN,$(findstring CYGWIN,$(OS)))
+ ifndef HOST_DARWIN
+ ifndef HOST_CYGWIN
$(1)_LDFLAGS += -Wl,-z,defs
endif
endif
endif
- ifneq ($(OS), Darwin)
+ ifndef HOST_DARWIN
$(1)_LDFLAGS += -Wl,-soname=$$($(1)_NAME).$(SO_EXT)
endif
@@ -93,7 +93,7 @@ define build-library
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
- ifneq ($(OS), Darwin)
+ ifndef HOST_DARWIN
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
endif
$(1)_LDFLAGS_USE += -L$$(_d) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
@@ -108,7 +108,7 @@ define build-library
$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
- ifneq ($(OS), Darwin)
+ ifndef HOST_DARWIN
ifeq ($(SET_RPATH_TO_LIBS), 1)
$(1)_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$$($(1)_INSTALL_DIR)
else
diff --git a/nix-rust/local.mk b/nix-rust/local.mk
index 9650cdf93..538244594 100644
--- a/nix-rust/local.mk
+++ b/nix-rust/local.mk
@@ -11,12 +11,12 @@ libnixrust_INSTALL_PATH := $(libdir)/libnixrust.$(SO_EXT)
libnixrust_LDFLAGS_USE := -L$(d)/target/$(RUST_DIR) -lnixrust
libnixrust_LDFLAGS_USE_INSTALLED := -L$(libdir) -lnixrust
-ifeq ($(OS), Linux)
+ifdef HOST_LINUX
libnixrust_LDFLAGS_USE += -ldl
libnixrust_LDFLAGS_USE_INSTALLED += -ldl
endif
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
libnixrust_BUILD_FLAGS = NIX_LDFLAGS="-undefined dynamic_lookup"
else
libnixrust_LDFLAGS_USE += -Wl,-rpath,$(abspath $(d)/target/$(RUST_DIR))
@@ -31,7 +31,7 @@ $(libnixrust_PATH): $(call rwildcard, $(d)/src, *.rs) $(d)/Cargo.toml
$(libnixrust_INSTALL_PATH): $(libnixrust_PATH)
$(target-gen) cp $^ $@
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
install_name_tool -id $@ $@
endif
@@ -40,7 +40,7 @@ clean: clean-rust
clean-rust:
$(suppress) rm -rfv nix-rust/target
-ifneq ($(OS), Darwin)
+ifndef HOST_DARWIN
check: rust-tests
rust-tests:
diff --git a/perl/Makefile.config.in b/perl/Makefile.config.in
index eccfbd9f6..d856de3ad 100644
--- a/perl/Makefile.config.in
+++ b/perl/Makefile.config.in
@@ -1,3 +1,4 @@
+HOST_OS = @host_os@
CC = @CC@
CFLAGS = @CFLAGS@
CXX = @CXX@
diff --git a/perl/configure.ac b/perl/configure.ac
index 85183c005..eb65ac17b 100644
--- a/perl/configure.ac
+++ b/perl/configure.ac
@@ -7,6 +7,8 @@ CXXFLAGS=
AC_PROG_CC
AC_PROG_CXX
+AC_CANONICAL_HOST
+
# Use 64-bit file system calls so that we can support files > 2 GiB.
AC_SYS_LARGEFILE
diff --git a/perl/local.mk b/perl/local.mk
index b13d4c0d6..0eae651d8 100644
--- a/perl/local.mk
+++ b/perl/local.mk
@@ -28,7 +28,7 @@ Store_CXXFLAGS = \
Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS)
-ifeq (CYGWIN,$(findstring CYGWIN,$(OS)))
+ifdef HOST_CYGWIN
archlib = $(shell perl -E 'use Config; print $$Config{archlib};')
libperl = $(shell perl -E 'use Config; print $$Config{libperl};')
Store_LDFLAGS += $(shell find ${archlib} -name ${libperl})
diff --git a/scripts/install.in b/scripts/install.in
index 39016d161..ffc1f2785 100755
--- a/scripts/install.in
+++ b/scripts/install.in
@@ -40,13 +40,23 @@ case "$(uname -s).$(uname -m)" in
path=@tarballPath_aarch64-linux@
system=aarch64-linux
;;
+ Linux.armv6l_linux)
+ hash=@tarballHash_armv6l-linux@
+ path=@tarballPath_armv6l-linux@
+ system=armv6l-linux
+ ;;
+ Linux.armv7l_linux)
+ hash=@tarballHash_armv7l-linux@
+ path=@tarballPath_armv7l-linux@
+ system=armv7l-linux
+ ;;
Darwin.x86_64)
hash=@tarballHash_x86_64-darwin@
path=@tarballPath_x86_64-darwin@
system=x86_64-darwin
;;
Darwin.arm64|Darwin.aarch64)
- hash=@binaryTarball_aarch64-darwin@
+ hash=@tarballHash_aarch64-darwin@
path=@tarballPath_aarch64-darwin@
system=aarch64-darwin
;;
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 57f2cd32d..0904f2ce9 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -277,7 +277,16 @@ connected:
auto drv = store->readDerivation(*drvPath);
auto outputHashes = staticOutputHashes(*store, drv);
- drv.inputSrcs = store->parseStorePathSet(inputs);
+
+ // Hijack the inputs paths of the derivation to include all the paths
+ // that come from the `inputDrvs` set.
+ // We don’t do that for the derivations whose `inputDrvs` is empty
+ // because
+ // 1. It’s not needed
+ // 2. Changing the `inputSrcs` set changes the associated output ids,
+ // which break CA derivations
+ if (!drv.inputDrvs.empty())
+ drv.inputSrcs = store->parseStorePathSet(inputs);
auto result = sshStore->buildDerivation(*drvPath, drv);
diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc
index 569c4b9e4..6777b23be 100644
--- a/src/libcmd/command.cc
+++ b/src/libcmd/command.cc
@@ -188,7 +188,7 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
}
if (result.size() != 1)
- throw Error("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
+ throw UsageError("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
updateProfile(result[0]);
}
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index fe52912cf..658b415f3 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -171,14 +171,50 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
void SourceExprCommand::completeInstallable(std::string_view prefix)
{
- if (file) return; // FIXME
+ if (file) {
+ evalSettings.pureEval = false;
+ auto state = getEvalState();
+ Expr *e = state->parseExprFromFile(
+ resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
+ );
- completeFlakeRefWithFragment(
- getEvalState(),
- lockFlags,
- getDefaultFlakeAttrPathPrefixes(),
- getDefaultFlakeAttrPaths(),
- prefix);
+ Value root;
+ state->eval(e, root);
+
+ auto autoArgs = getAutoArgs(*state);
+
+ std::string prefix_ = std::string(prefix);
+ auto sep = prefix_.rfind('.');
+ std::string searchWord;
+ if (sep != std::string::npos) {
+ searchWord = prefix_.substr(sep, std::string::npos);
+ prefix_ = prefix_.substr(0, sep);
+ } else {
+ searchWord = prefix_;
+ prefix_ = "";
+ }
+
+ Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
+ state->forceValue(v1);
+ Value v2;
+ state->autoCallFunction(*autoArgs, v1, v2);
+
+ if (v2.type() == nAttrs) {
+ for (auto & i : *v2.attrs) {
+ std::string name = i.name;
+ if (name.find(searchWord) == 0) {
+ completions->add(i.name);
+ }
+ }
+ }
+ } else {
+ completeFlakeRefWithFragment(
+ getEvalState(),
+ lockFlags,
+ getDefaultFlakeAttrPathPrefixes(),
+ getDefaultFlakeAttrPaths(),
+ prefix);
+ }
}
void completeFlakeRefWithFragment(
@@ -573,10 +609,10 @@ InstallableFlake::getCursors(EvalState & state)
std::shared_ptr InstallableFlake::getLockedFlake() const
{
+ flake::LockFlags lockFlagsApplyConfig = lockFlags;
+ lockFlagsApplyConfig.applyNixConfig = true;
if (!_lockedFlake) {
- _lockedFlake = std::make_shared(lockFlake(*state, flakeRef, lockFlags));
- _lockedFlake->flake.config.apply();
- // FIXME: send new config to the daemon.
+ _lockedFlake = std::make_shared(lockFlake(*state, flakeRef, lockFlagsApplyConfig));
}
return _lockedFlake;
}
diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc
index 9dd557205..867eb13a5 100644
--- a/src/libexpr/attr-path.cc
+++ b/src/libexpr/attr-path.cc
@@ -19,7 +19,7 @@ static Strings parseAttrPath(std::string_view s)
++i;
while (1) {
if (i == s.end())
- throw Error("missing closing quote in selection path '%1%'", s);
+ throw ParseError("missing closing quote in selection path '%1%'", s);
if (*i == '"') break;
cur.push_back(*i++);
}
@@ -116,14 +116,14 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
auto colon = pos.rfind(':');
if (colon == std::string::npos)
- throw Error("cannot parse meta.position attribute '%s'", pos);
+ throw ParseError("cannot parse meta.position attribute '%s'", pos);
std::string filename(pos, 0, colon);
unsigned int lineno;
try {
lineno = std::stoi(std::string(pos, colon + 1));
} catch (std::invalid_argument & e) {
- throw Error("cannot parse line number '%s'", pos);
+ throw ParseError("cannot parse line number '%s'", pos);
}
Symbol file = state.symbols.create(filename);
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index ef9f8efca..c3206a577 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -64,7 +64,11 @@ static char * dupStringWithLen(const char * s, size_t size)
RootValue allocRootValue(Value * v)
{
+#if HAVE_BOEHMGC
return std::allocate_shared(traceable_allocator(), v);
+#else
+ return std::make_shared(v);
+#endif
}
@@ -233,22 +237,34 @@ static void * oomHandler(size_t requested)
}
class BoehmGCStackAllocator : public StackAllocator {
- boost::coroutines2::protected_fixedsize_stack stack {
- // We allocate 8 MB, the default max stack size on NixOS.
- // A smaller stack might be quicker to allocate but reduces the stack
- // depth available for source filter expressions etc.
- std::max(boost::context::stack_traits::default_size(), static_cast(8 * 1024 * 1024))
+ boost::coroutines2::protected_fixedsize_stack stack {
+ // We allocate 8 MB, the default max stack size on NixOS.
+ // A smaller stack might be quicker to allocate but reduces the stack
+ // depth available for source filter expressions etc.
+ std::max(boost::context::stack_traits::default_size(), static_cast(8 * 1024 * 1024))
};
+ // This is specific to boost::coroutines2::protected_fixedsize_stack.
+ // The stack protection page is included in sctx.size, so we have to
+ // subtract one page size from the stack size.
+ std::size_t pfss_usable_stack_size(boost::context::stack_context &sctx) {
+ return sctx.size - boost::context::stack_traits::page_size();
+ }
+
public:
boost::context::stack_context allocate() override {
auto sctx = stack.allocate();
- GC_add_roots(static_cast(sctx.sp) - sctx.size, sctx.sp);
+
+ // Stacks generally start at a high address and grow to lower addresses.
+ // Architectures that do the opposite are rare; in fact so rare that
+ // boost_routine does not implement it.
+ // So we subtract the stack size.
+ GC_add_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
return sctx;
}
void deallocate(boost::context::stack_context sctx) override {
- GC_remove_roots(static_cast(sctx.sp) - sctx.size, sctx.sp);
+ GC_remove_roots(static_cast(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
stack.deallocate(sctx);
}
@@ -908,7 +924,7 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
// computation.
if (mustBeTrivial &&
!(dynamic_cast(e)))
- throw Error("file '%s' must be an attribute set", path);
+ throw EvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", path2);
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 2e94490d4..e266bc36d 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -298,6 +298,11 @@ LockedFlake lockFlake(
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
+ if (lockFlags.applyNixConfig) {
+ flake.config.apply();
+ // FIXME: send new config to the daemon.
+ }
+
try {
// FIXME: symlink attack
@@ -359,7 +364,12 @@ LockedFlake lockFlake(
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
- if (hasOverride) overridesUsed.insert(inputPath);
+ if (hasOverride) {
+ overridesUsed.insert(inputPath);
+ // Respect the “flakeness” of the input even if we
+ // override it
+ i->second.isFlake = input2.isFlake;
+ }
auto & input = hasOverride ? i->second : input2;
/* Resolve 'follows' later (since it may refer to an input
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index d17d5e183..4479e95db 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -104,6 +104,10 @@ struct LockFlags
references like 'nixpkgs'. */
bool useRegistries = true;
+ /* Whether to apply flake's nixConfig attribute to the configuration */
+
+ bool applyNixConfig = false;
+
/* Whether mutable flake references (i.e. those without a Git
revision or similar) without a corresponding lock are
allowed. Mutable flake references with a lock are always
diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk
index c40abfb78..1aed8e152 100644
--- a/src/libexpr/local.mk
+++ b/src/libexpr/local.mk
@@ -16,7 +16,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS = -lboost_context
-ifeq ($(OS), Linux)
+ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index 4830ebec3..3f88ccb91 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -62,6 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "hg");
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
+ attrs.insert_or_assign("name", name);
if (ref) attrs.insert_or_assign("ref", *ref);
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs));
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index b8b99d4fa..730db84ed 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -7,6 +7,7 @@
#include
#include
+#include
namespace nix {
@@ -60,10 +61,19 @@ void emitTreeAttrs(
v.attrs->sort();
}
-std::string fixURI(std::string uri, EvalState &state)
+std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
- return uri.find("://") != std::string::npos ? uri : "file://" + uri;
+ return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
+}
+
+std::string fixURIForGit(std::string uri, EvalState & state)
+{
+ static std::regex scp_uri("([^/].*)@(.*):(.*)");
+ if (uri[0] != '/' && std::regex_match(uri, scp_uri))
+ return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
+ else
+ return fixURI(uri, state);
}
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
@@ -72,13 +82,18 @@ void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
}
+struct FetchTreeParams {
+ bool emptyRevFallback = false;
+ bool allowNameArgument = false;
+};
+
static void fetchTree(
EvalState &state,
const Pos &pos,
Value **args,
Value &v,
const std::optional type,
- bool emptyRevFallback = false
+ const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
PathSet context;
@@ -119,17 +134,25 @@ static void fetchTree(
.errPos = pos
});
+ if (!params.allowNameArgument)
+ if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
+ throw Error({
+ .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"),
+ .errPos = pos
+ });
+
+
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
- auto url = fixURI(state.coerceToString(pos, *args[0], context, false, false), state);
+ auto url = state.coerceToString(pos, *args[0], context, false, false);
if (type == "git") {
fetchers::Attrs attrs;
attrs.emplace("type", "git");
- attrs.emplace("url", url);
+ attrs.emplace("url", fixURIForGit(url, state));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
- input = fetchers::Input::fromURL(url);
+ input = fetchers::Input::fromURL(fixURI(url, state));
}
}
@@ -144,13 +167,13 @@ static void fetchTree(
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
- emitTreeAttrs(state, tree, input2, v, emptyRevFallback);
+ emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback);
}
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
settings.requireExperimentalFeature("flakes");
- fetchTree(state, pos, args, v, std::nullopt);
+ fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
}
// FIXME: document
@@ -292,7 +315,7 @@ static RegisterPrimOp primop_fetchTarball({
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
{
- fetchTree(state, pos, args, v, "git", true);
+ fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
}
static RegisterPrimOp primop_fetchGit({
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 916e0a8e8..e158d914b 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -200,12 +200,17 @@ void Input::markChangedFile(
return scheme->markChangedFile(*this, file, commitMsg);
}
+std::string Input::getName() const
+{
+ return maybeGetStrAttr(attrs, "name").value_or("source");
+}
+
StorePath Input::computeStorePath(Store & store) const
{
auto narHash = getNarHash();
if (!narHash)
throw Error("cannot compute store path for mutable input '%s'", to_string());
- return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
+ return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
}
std::string Input::getType() const
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index a72cfafa4..c839cf23b 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -81,6 +81,8 @@ public:
std::string_view file,
std::optional commitMsg) const;
+ std::string getName() const;
+
StorePath computeStorePath(Store & store) const;
// Convenience functions for common attributes.
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index d8e0dbe0a..bc1930138 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -60,7 +60,7 @@ struct GitInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs")
+ if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@@ -167,10 +167,10 @@ struct GitInputScheme : InputScheme
std::pair fetch(ref store, const Input & _input) override
{
- auto name = "source";
-
Input input(_input);
+ std::string name = input.getName();
+
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
@@ -270,7 +270,7 @@ struct GitInputScheme : InputScheme
return files.count(file);
};
- auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
+ auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 8352ef02d..298c05f9a 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -207,7 +207,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
- auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
+ auto [tree, lastModified] = downloadTarball(store, url.url, input.getName(), true, url.headers);
input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 0eb401e10..efb4ee8db 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -74,7 +74,7 @@ struct MercurialInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash")
+ if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@@ -147,10 +147,10 @@ struct MercurialInputScheme : InputScheme
std::pair fetch(ref store, const Input & _input) override
{
- auto name = "source";
-
Input input(_input);
+ auto name = input.getName();
+
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
@@ -193,7 +193,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
- auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
+ auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {
Tree(store->toRealPath(storePath), std::move(storePath)),
diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc
index 74376adc0..f35359d4b 100644
--- a/src/libfetchers/registry.cc
+++ b/src/libfetchers/registry.cc
@@ -124,6 +124,13 @@ std::shared_ptr getUserRegistry()
return userRegistry;
}
+std::shared_ptr getCustomRegistry(const Path & p)
+{
+ static auto customRegistry =
+ Registry::read(p, Registry::Custom);
+ return customRegistry;
+}
+
static std::shared_ptr flagRegistry =
std::make_shared(Registry::Flag);
diff --git a/src/libfetchers/registry.hh b/src/libfetchers/registry.hh
index 1077af020..260a2c460 100644
--- a/src/libfetchers/registry.hh
+++ b/src/libfetchers/registry.hh
@@ -14,6 +14,7 @@ struct Registry
User = 1,
System = 2,
Global = 3,
+ Custom = 4,
};
RegistryType type;
@@ -48,6 +49,8 @@ typedef std::vector> Registries;
std::shared_ptr getUserRegistry();
+std::shared_ptr getCustomRegistry(const Path & p);
+
Path getUserRegistryPath();
Registries getRegistries(ref store);
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 257465bae..031ccc5f7 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -196,7 +196,7 @@ struct TarballInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
for (auto & [name, value] : attrs)
- if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash")
+ if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash" && name != "name")
throw Error("unsupported tarball input attribute '%s'", name);
Input input;
@@ -226,7 +226,7 @@ struct TarballInputScheme : InputScheme
std::pair fetch(ref store, const Input & input) override
{
- auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
+ auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
return {std::move(tree), input};
}
};
diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc
index 15354549a..b2a6e2a82 100644
--- a/src/libmain/progress-bar.cc
+++ b/src/libmain/progress-bar.cc
@@ -484,7 +484,7 @@ Logger * makeProgressBar(bool printBuildLogs)
{
return new ProgressBar(
printBuildLogs,
- isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb"
+ shouldANSI()
);
}
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 4c49f3cab..3cbcf5199 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -738,6 +738,63 @@ void DerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
{
}
+void runPostBuildHook(
+ Store & store,
+ Logger & logger,
+ const StorePath & drvPath,
+ StorePathSet outputPaths
+)
+{
+ auto hook = settings.postBuildHook;
+ if (hook == "")
+ return;
+
+ Activity act(logger, lvlInfo, actPostBuildHook,
+ fmt("running post-build-hook '%s'", settings.postBuildHook),
+ Logger::Fields{store.printStorePath(drvPath)});
+ PushActivity pact(act.id);
+ std::map hookEnvironment = getEnv();
+
+ hookEnvironment.emplace("DRV_PATH", store.printStorePath(drvPath));
+ hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths))));
+
+ RunOptions opts(settings.postBuildHook, {});
+ opts.environment = hookEnvironment;
+
+ struct LogSink : Sink {
+ Activity & act;
+ std::string currentLine;
+
+ LogSink(Activity & act) : act(act) { }
+
+ void operator() (std::string_view data) override {
+ for (auto c : data) {
+ if (c == '\n') {
+ flushLine();
+ } else {
+ currentLine += c;
+ }
+ }
+ }
+
+ void flushLine() {
+ act.result(resPostBuildLogLine, currentLine);
+ currentLine.clear();
+ }
+
+ ~LogSink() {
+ if (currentLine != "") {
+ currentLine += '\n';
+ flushLine();
+ }
+ }
+ };
+ LogSink sink(act);
+
+ opts.standardOut = &sink;
+ opts.mergeStderrToStdout = true;
+ runProgram2(opts);
+}
void DerivationGoal::buildDone()
{
@@ -803,57 +860,15 @@ void DerivationGoal::buildDone()
being valid. */
registerOutputs();
- if (settings.postBuildHook != "") {
- Activity act(*logger, lvlInfo, actPostBuildHook,
- fmt("running post-build-hook '%s'", settings.postBuildHook),
- Logger::Fields{worker.store.printStorePath(drvPath)});
- PushActivity pact(act.id);
- StorePathSet outputPaths;
- for (auto i : drv->outputs) {
- outputPaths.insert(finalOutputs.at(i.first));
- }
- std::map hookEnvironment = getEnv();
-
- hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
- hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", worker.store.printStorePathSet(outputPaths))));
-
- RunOptions opts(settings.postBuildHook, {});
- opts.environment = hookEnvironment;
-
- struct LogSink : Sink {
- Activity & act;
- std::string currentLine;
-
- LogSink(Activity & act) : act(act) { }
-
- void operator() (std::string_view data) override {
- for (auto c : data) {
- if (c == '\n') {
- flushLine();
- } else {
- currentLine += c;
- }
- }
- }
-
- void flushLine() {
- act.result(resPostBuildLogLine, currentLine);
- currentLine.clear();
- }
-
- ~LogSink() {
- if (currentLine != "") {
- currentLine += '\n';
- flushLine();
- }
- }
- };
- LogSink sink(act);
-
- opts.standardOut = &sink;
- opts.mergeStderrToStdout = true;
- runProgram2(opts);
- }
+ StorePathSet outputPaths;
+ for (auto & [_, path] : finalOutputs)
+ outputPaths.insert(path);
+ runPostBuildHook(
+ worker.store,
+ *logger,
+ drvPath,
+ outputPaths
+ );
if (buildMode == bmCheck) {
cleanupPostOutputsRegisteredModeCheck();
@@ -909,6 +924,8 @@ void DerivationGoal::resolvedFinished() {
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
+ StorePathSet outputPaths;
+
// `wantedOutputs` might be empty, which means “all the outputs”
auto realWantedOutputs = wantedOutputs;
if (realWantedOutputs.empty())
@@ -926,8 +943,10 @@ void DerivationGoal::resolvedFinished() {
auto newRealisation = *realisation;
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
newRealisation.signatures.clear();
+ newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
+ outputPaths.insert(realisation->outPath);
} else {
// If we don't have a realisation, then it must mean that something
// failed when building the resolved drv
@@ -935,6 +954,13 @@ void DerivationGoal::resolvedFinished() {
}
}
+ runPostBuildHook(
+ worker.store,
+ *logger,
+ drvPath,
+ outputPaths
+ );
+
// This is potentially a bit fishy in terms of error reporting. Not sure
// how to do it in a cleaner way
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
diff --git a/src/libstore/build/drv-output-substitution-goal.cc b/src/libstore/build/drv-output-substitution-goal.cc
index a5ac4c49d..be270d079 100644
--- a/src/libstore/build/drv-output-substitution-goal.cc
+++ b/src/libstore/build/drv-output-substitution-goal.cc
@@ -17,6 +17,13 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker
void DrvOutputSubstitutionGoal::init()
{
trace("init");
+
+ /* If the derivation already exists, we’re done */
+ if (worker.store.queryRealisation(id)) {
+ amDone(ecSuccess);
+ return;
+ }
+
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list[>();
tryNext();
}
@@ -53,6 +60,26 @@ void DrvOutputSubstitutionGoal::tryNext()
return;
}
+ for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
+ if (depId != id) {
+ if (auto localOutputInfo = worker.store.queryRealisation(depId);
+ localOutputInfo && localOutputInfo->outPath != depPath) {
+ warn(
+ "substituter '%s' has an incompatible realisation for '%s', ignoring.\n"
+ "Local: %s\n"
+ "Remote: %s",
+ sub->getUri(),
+ depId.to_string(),
+ worker.store.printStorePath(localOutputInfo->outPath),
+ worker.store.printStorePath(depPath)
+ );
+ tryNext();
+ return;
+ }
+ addWaitee(worker.makeDrvOutputSubstitutionGoal(depId));
+ }
+ }
+
addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath));
if (waitees.empty()) outPathValid();
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 259134eba..02f902666 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -1248,13 +1248,18 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
std::optional queryRealisation(const DrvOutput & id) override
// XXX: This should probably be allowed if the realisation corresponds to
// an allowed derivation
- { throw Error("queryRealisation"); }
+ {
+ if (!goal.isAllowed(id))
+ throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
+ return next->queryRealisation(id);
+ }
void buildPaths(const std::vector & paths, BuildMode buildMode) override
{
if (buildMode != bmNormal) throw Error("unsupported build mode");
StorePathSet newPaths;
+ std::set newRealisations;
for (auto & req : paths) {
if (!goal.isAllowed(req))
@@ -1267,16 +1272,28 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
auto p = std::get_if(&path);
if (!p) continue;
auto & bfd = *p;
+ auto drv = readDerivation(bfd.drvPath);
+ auto drvHashes = staticOutputHashes(*this, drv);
auto outputs = next->queryDerivationOutputMap(bfd.drvPath);
for (auto & [outputName, outputPath] : outputs)
- if (wantOutput(outputName, bfd.outputs))
+ if (wantOutput(outputName, bfd.outputs)) {
newPaths.insert(outputPath);
+ if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ auto thisRealisation = next->queryRealisation(
+ DrvOutput{drvHashes.at(outputName), outputName}
+ );
+ assert(thisRealisation);
+ newRealisations.insert(*thisRealisation);
+ }
+ }
}
StorePathSet closure;
next->computeFSClosure(newPaths, closure);
for (auto & path : closure)
goal.addDependency(path);
+ for (auto & real : Realisation::closure(*next, newRealisations))
+ goal.addedDrvOutputs.insert(real.id);
}
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
@@ -2379,6 +2396,7 @@ void LocalDerivationGoal::registerOutputs()
floating CA derivations and hash-mismatching fixed-output
derivations. */
PathLocks dynamicOutputLock;
+ dynamicOutputLock.setDeletion(true);
auto optFixedPath = output.path(worker.store, drv->name, outputName);
if (!optFixedPath ||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh
index d30be2351..088a57209 100644
--- a/src/libstore/build/local-derivation-goal.hh
+++ b/src/libstore/build/local-derivation-goal.hh
@@ -108,6 +108,9 @@ struct LocalDerivationGoal : public DerivationGoal
/* Paths that were added via recursive Nix calls. */
StorePathSet addedPaths;
+ /* Realisations that were added via recursive Nix calls. */
+ std::set addedDrvOutputs;
+
/* Recursive Nix calls are only allowed to build or realize paths
in the original input closure or added via a recursive Nix call
(so e.g. you can't do 'nix-store -r /nix/store/' where
@@ -116,6 +119,11 @@ struct LocalDerivationGoal : public DerivationGoal
{
return inputPaths.count(path) || addedPaths.count(path);
}
+ bool isAllowed(const DrvOutput & id)
+ {
+ return addedDrvOutputs.count(id);
+ }
+
bool isAllowed(const DerivedPath & req);
friend struct RestrictedStore;
diff --git a/src/libstore/ca-specific-schema.sql b/src/libstore/ca-specific-schema.sql
index 20ee046a1..08af0cc1f 100644
--- a/src/libstore/ca-specific-schema.sql
+++ b/src/libstore/ca-specific-schema.sql
@@ -3,10 +3,19 @@
-- is enabled
create table if not exists Realisations (
+ id integer primary key autoincrement not null,
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
signatures text, -- space-separated list
- primary key (drvPath, outputName),
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
+
+create index if not exists IndexRealisations on Realisations(drvPath, outputName);
+
+create table if not exists RealisationsRefs (
+ referrer integer not null,
+ realisationReference integer,
+ foreign key (referrer) references Realisations(id) on delete cascade,
+ foreign key (realisationReference) references Realisations(id) on delete restrict
+);
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 28c175a51..d7c7f8e1d 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -53,12 +53,15 @@ struct LocalStore::State::Stmts {
SQLiteStmt InvalidatePath;
SQLiteStmt AddDerivationOutput;
SQLiteStmt RegisterRealisedOutput;
+ SQLiteStmt UpdateRealisedOutput;
SQLiteStmt QueryValidDerivers;
SQLiteStmt QueryDerivationOutputs;
SQLiteStmt QueryRealisedOutput;
SQLiteStmt QueryAllRealisedOutputs;
SQLiteStmt QueryPathFromHashPart;
SQLiteStmt QueryValidPaths;
+ SQLiteStmt QueryRealisationReferences;
+ SQLiteStmt AddRealisationReference;
};
int getSchema(Path schemaPath)
@@ -76,7 +79,7 @@ int getSchema(Path schemaPath)
void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
{
- const int nixCASchemaVersion = 1;
+ const int nixCASchemaVersion = 2;
int curCASchema = getSchema(schemaPath);
if (curCASchema != nixCASchemaVersion) {
if (curCASchema > nixCASchemaVersion) {
@@ -94,7 +97,39 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
#include "ca-specific-schema.sql.gen.hh"
;
db.exec(schema);
+ curCASchema = nixCASchemaVersion;
}
+
+ if (curCASchema < 2) {
+ SQLiteTxn txn(db);
+ // Ugly little sql dance to add a new `id` column and make it the primary key
+ db.exec(R"(
+ create table Realisations2 (
+ id integer primary key autoincrement not null,
+ drvPath text not null,
+ outputName text not null, -- symbolic output id, usually "out"
+ outputPath integer not null,
+ signatures text, -- space-separated list
+ foreign key (outputPath) references ValidPaths(id) on delete cascade
+ );
+ insert into Realisations2 (drvPath, outputName, outputPath, signatures)
+ select drvPath, outputName, outputPath, signatures from Realisations;
+ drop table Realisations;
+ alter table Realisations2 rename to Realisations;
+ )");
+ db.exec(R"(
+ create index if not exists IndexRealisations on Realisations(drvPath, outputName);
+
+ create table if not exists RealisationsRefs (
+ referrer integer not null,
+ realisationReference integer,
+ foreign key (referrer) references Realisations(id) on delete cascade,
+ foreign key (realisationReference) references Realisations(id) on delete restrict
+ );
+ )");
+ txn.commit();
+ }
+
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
lockFile(lockFd.get(), ltRead, true);
}
@@ -311,9 +346,18 @@ LocalStore::LocalStore(const Params & params)
values (?, ?, (select id from ValidPaths where path = ?), ?)
;
)");
+ state->stmts->UpdateRealisedOutput.create(state->db,
+ R"(
+ update Realisations
+ set signatures = ?
+ where
+ drvPath = ? and
+ outputName = ?
+ ;
+ )");
state->stmts->QueryRealisedOutput.create(state->db,
R"(
- select Output.path, Realisations.signatures from Realisations
+ select Realisations.id, Output.path, Realisations.signatures from Realisations
inner join ValidPaths as Output on Output.id = Realisations.outputPath
where drvPath = ? and outputName = ?
;
@@ -325,6 +369,19 @@ LocalStore::LocalStore(const Params & params)
where drvPath = ?
;
)");
+ state->stmts->QueryRealisationReferences.create(state->db,
+ R"(
+ select drvPath, outputName from Realisations
+ join RealisationsRefs on realisationReference = Realisations.id
+ where referrer = ?;
+ )");
+ state->stmts->AddRealisationReference.create(state->db,
+ R"(
+ insert or replace into RealisationsRefs (referrer, realisationReference)
+ values (
+ ?,
+ (select id from Realisations where drvPath = ? and outputName = ?));
+ )");
}
}
@@ -661,14 +718,54 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
void LocalStore::registerDrvOutput(const Realisation & info)
{
settings.requireExperimentalFeature("ca-derivations");
- auto state(_state.lock());
retrySQLite([&]() {
- state->stmts->RegisterRealisedOutput.use()
- (info.id.strHash())
- (info.id.outputName)
- (printStorePath(info.outPath))
- (concatStringsSep(" ", info.signatures))
- .exec();
+ auto state(_state.lock());
+ if (auto oldR = queryRealisation_(*state, info.id)) {
+ if (info.isCompatibleWith(*oldR)) {
+ auto combinedSignatures = oldR->signatures;
+ combinedSignatures.insert(info.signatures.begin(),
+ info.signatures.end());
+ state->stmts->UpdateRealisedOutput.use()
+ (concatStringsSep(" ", combinedSignatures))
+ (info.id.strHash())
+ (info.id.outputName)
+ .exec();
+ } else {
+ throw Error("Trying to register a realisation of '%s', but we already "
+ "have another one locally.\n"
+ "Local: %s\n"
+ "Remote: %s",
+ info.id.to_string(),
+ printStorePath(oldR->outPath),
+ printStorePath(info.outPath)
+ );
+ }
+ } else {
+ state->stmts->RegisterRealisedOutput.use()
+ (info.id.strHash())
+ (info.id.outputName)
+ (printStorePath(info.outPath))
+ (concatStringsSep(" ", info.signatures))
+ .exec();
+ }
+ uint64_t myId = state->db.getLastInsertedRowId();
+ for (auto & [outputId, depPath] : info.dependentRealisations) {
+ auto localRealisation = queryRealisationCore_(*state, outputId);
+ if (!localRealisation)
+ throw Error("unable to register the derivation '%s' as it "
+ "depends on the non existent '%s'",
+ info.id.to_string(), outputId.to_string());
+ if (localRealisation->second.outPath != depPath)
+ throw Error("unable to register the derivation '%s' as it "
+ "depends on a realisation of '%s' that doesn’t"
+ "match what we have locally",
+ info.id.to_string(), outputId.to_string());
+ state->stmts->AddRealisationReference.use()
+ (myId)
+ (outputId.strHash())
+ (outputId.outputName)
+ .exec();
+ }
});
}
@@ -1679,22 +1776,68 @@ void LocalStore::createUser(const std::string & userName, uid_t userId)
}
}
-std::optional LocalStore::queryRealisation(
- const DrvOutput& id) {
- typedef std::optional Ret;
- return retrySQLite([&]() -> Ret {
- auto state(_state.lock());
- auto use(state->stmts->QueryRealisedOutput.use()(id.strHash())(
- id.outputName));
- if (!use.next())
- return std::nullopt;
- auto outputPath = parseStorePath(use.getStr(0));
- auto signatures = tokenizeString(use.getStr(1));
- return Ret{Realisation{
- .id = id, .outPath = outputPath, .signatures = signatures}};
- });
+std::optional> LocalStore::queryRealisationCore_(
+ LocalStore::State & state,
+ const DrvOutput & id)
+{
+ auto useQueryRealisedOutput(
+ state.stmts->QueryRealisedOutput.use()
+ (id.strHash())
+ (id.outputName));
+ if (!useQueryRealisedOutput.next())
+ return std::nullopt;
+ auto realisationDbId = useQueryRealisedOutput.getInt(0);
+ auto outputPath = parseStorePath(useQueryRealisedOutput.getStr(1));
+ auto signatures =
+ tokenizeString(useQueryRealisedOutput.getStr(2));
+
+ return {{
+ realisationDbId,
+ Realisation{
+ .id = id,
+ .outPath = outputPath,
+ .signatures = signatures,
+ }
+ }};
}
+std::optional LocalStore::queryRealisation_(
+ LocalStore::State & state,
+ const DrvOutput & id)
+{
+ auto maybeCore = queryRealisationCore_(state, id);
+ if (!maybeCore)
+ return std::nullopt;
+ auto [realisationDbId, res] = *maybeCore;
+
+ std::map dependentRealisations;
+ auto useRealisationRefs(
+ state.stmts->QueryRealisationReferences.use()
+ (realisationDbId));
+ while (useRealisationRefs.next()) {
+ auto depId = DrvOutput {
+ Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)),
+ useRealisationRefs.getStr(1),
+ };
+ auto dependentRealisation = queryRealisationCore_(state, depId);
+ assert(dependentRealisation); // Enforced by the db schema
+ auto outputPath = dependentRealisation->second.outPath;
+ dependentRealisations.insert({depId, outputPath});
+ }
+
+ res.dependentRealisations = dependentRealisations;
+
+ return { res };
+}
+
+std::optional
+LocalStore::queryRealisation(const DrvOutput & id)
+{
+ return retrySQLite>([&]() {
+ auto state(_state.lock());
+ return queryRealisation_(*state, id);
+ });
+}
FixedOutputHash LocalStore::hashCAPath(
const FileIngestionMethod & method, const HashType & hashType,
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 15c7fc306..a01d48c4b 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -203,6 +203,8 @@ public:
void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
+ std::optional queryRealisation_(State & state, const DrvOutput & id);
+ std::optional> queryRealisationCore_(State & state, const DrvOutput & id);
std::optional queryRealisation(const DrvOutput&) override;
private:
diff --git a/src/libstore/local.mk b/src/libstore/local.mk
index b6652984c..2fc334a82 100644
--- a/src/libstore/local.mk
+++ b/src/libstore/local.mk
@@ -9,11 +9,11 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
libstore_LIBS = libutil
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
-ifeq ($(OS), Linux)
+ifdef HOST_LINUX
libstore_LDFLAGS += -ldl
endif
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
endif
@@ -23,7 +23,7 @@ ifeq ($(ENABLE_S3), 1)
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core
endif
-ifeq ($(OS), SunOS)
+ifdef HOST_SOLARIS
libstore_LDFLAGS += -lsocket
endif
diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc
index b42e5e434..9843ccf04 100644
--- a/src/libstore/machines.cc
+++ b/src/libstore/machines.cc
@@ -16,13 +16,18 @@ Machine::Machine(decltype(storeUri) storeUri,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey) :
storeUri(
- // Backwards compatibility: if the URI is a hostname,
- // prepend ssh://.
+ // Backwards compatibility: if the URI is schemeless, is not a path,
+ // and is not one of the special store connection words, prepend
+ // ssh://.
storeUri.find("://") != std::string::npos
- || hasPrefix(storeUri, "local")
- || hasPrefix(storeUri, "remote")
- || hasPrefix(storeUri, "auto")
- || hasPrefix(storeUri, "/")
+ || storeUri.find("/") != std::string::npos
+ || storeUri == "auto"
+ || storeUri == "daemon"
+ || storeUri == "local"
+ || hasPrefix(storeUri, "auto?")
+ || hasPrefix(storeUri, "daemon?")
+ || hasPrefix(storeUri, "local?")
+ || hasPrefix(storeUri, "?")
? storeUri
: "ssh://" + storeUri),
systemTypes(systemTypes),
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index bc5fd968c..b4929b445 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -29,9 +29,9 @@ void Store::computeFSClosure(const StorePathSet & startPaths,
res.insert(i);
if (includeDerivers && path.isDerivation())
- for (auto& i : queryDerivationOutputs(path))
- if (isValidPath(i) && queryPathInfo(i)->deriver == path)
- res.insert(i);
+ for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
+ if (maybeOutPath && isValidPath(*maybeOutPath))
+ res.insert(*maybeOutPath);
return res;
};
else
@@ -44,9 +44,9 @@ void Store::computeFSClosure(const StorePathSet & startPaths,
res.insert(ref);
if (includeOutputs && path.isDerivation())
- for (auto& i : queryDerivationOutputs(path))
- if (isValidPath(i))
- res.insert(i);
+ for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
+ if (maybeOutPath && isValidPath(*maybeOutPath))
+ res.insert(*maybeOutPath);
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
res.insert(*info->deriver);
@@ -254,5 +254,44 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
}});
}
+std::map drvOutputReferences(
+ const std::set & inputRealisations,
+ const StorePathSet & pathReferences)
+{
+ std::map res;
+ for (const auto & input : inputRealisations) {
+ if (pathReferences.count(input.outPath)) {
+ res.insert({input.id, input.outPath});
+ }
+ }
+
+ return res;
+}
+
+std::map drvOutputReferences(
+ Store & store,
+ const Derivation & drv,
+ const StorePath & outputPath)
+{
+ std::set inputRealisations;
+
+ for (const auto& [inputDrv, outputNames] : drv.inputDrvs) {
+ auto outputHashes =
+ staticOutputHashes(store, store.readDerivation(inputDrv));
+ for (const auto& outputName : outputNames) {
+ auto thisRealisation = store.queryRealisation(
+ DrvOutput{outputHashes.at(outputName), outputName});
+ if (!thisRealisation)
+ throw Error(
+ "output '%s' of derivation '%s' isn’t built", outputName,
+ store.printStorePath(inputDrv));
+ inputRealisations.insert(*thisRealisation);
+ }
+ }
+
+ auto info = store.queryPathInfo(outputPath);
+
+ return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
+}
}
diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc
index ee6bbb045..345235d66 100644
--- a/src/libstore/parsed-derivations.cc
+++ b/src/libstore/parsed-derivations.cc
@@ -93,6 +93,8 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
StringSet res;
for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings()))
res.insert(i);
+ if (!derivationHasKnownOutputPaths(drv.type()))
+ res.insert("ca-derivations");
return res;
}
diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc
index 638065547..eadec594c 100644
--- a/src/libstore/realisation.cc
+++ b/src/libstore/realisation.cc
@@ -1,5 +1,6 @@
#include "realisation.hh"
#include "store-api.hh"
+#include "closure.hh"
#include
namespace nix {
@@ -21,11 +22,52 @@ std::string DrvOutput::to_string() const {
return strHash() + "!" + outputName;
}
+std::set Realisation::closure(Store & store, const std::set & startOutputs)
+{
+ std::set res;
+ Realisation::closure(store, startOutputs, res);
+ return res;
+}
+
+void Realisation::closure(Store & store, const std::set & startOutputs, std::set & res)
+{
+ auto getDeps = [&](const Realisation& current) -> std::set {
+ std::set res;
+ for (auto& [currentDep, _] : current.dependentRealisations) {
+ if (auto currentRealisation = store.queryRealisation(currentDep))
+ res.insert(*currentRealisation);
+ else
+ throw Error(
+ "Unrealised derivation '%s'", currentDep.to_string());
+ }
+ return res;
+ };
+
+ computeClosure(
+ startOutputs, res,
+ [&](const Realisation& current,
+ std::function>&)>
+ processEdges) {
+ std::promise> promise;
+ try {
+ auto res = getDeps(current);
+ promise.set_value(res);
+ } catch (...) {
+ promise.set_exception(std::current_exception());
+ }
+ return processEdges(promise);
+ });
+}
+
nlohmann::json Realisation::toJSON() const {
+ auto jsonDependentRealisations = nlohmann::json::object();
+ for (auto & [depId, depOutPath] : dependentRealisations)
+ jsonDependentRealisations.emplace(depId.to_string(), depOutPath.to_string());
return nlohmann::json{
{"id", id.to_string()},
{"outPath", outPath.to_string()},
{"signatures", signatures},
+ {"dependentRealisations", jsonDependentRealisations},
};
}
@@ -51,10 +93,16 @@ Realisation Realisation::fromJSON(
if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end())
signatures.insert(signaturesIterator->begin(), signaturesIterator->end());
+ std::map dependentRealisations;
+ if (auto jsonDependencies = json.find("dependentRealisations"); jsonDependencies != json.end())
+ for (auto & [jsonDepId, jsonDepOutPath] : jsonDependencies->get>())
+ dependentRealisations.insert({DrvOutput::parse(jsonDepId), StorePath(jsonDepOutPath)});
+
return Realisation{
.id = DrvOutput::parse(getField("id")),
.outPath = StorePath(getField("outPath")),
.signatures = signatures,
+ .dependentRealisations = dependentRealisations,
};
}
@@ -92,6 +140,16 @@ StorePath RealisedPath::path() const {
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
}
+bool Realisation::isCompatibleWith(const Realisation & other) const
+{
+ assert (id == other.id);
+ if (outPath == other.outPath) {
+ assert(dependentRealisations == other.dependentRealisations);
+ return true;
+ }
+ return false;
+}
+
void RealisedPath::closure(
Store& store,
const RealisedPath::Set& startPaths,
diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh
index f5049c9e9..05d2bc44f 100644
--- a/src/libstore/realisation.hh
+++ b/src/libstore/realisation.hh
@@ -28,6 +28,14 @@ struct Realisation {
StringSet signatures;
+ /**
+ * The realisations that are required for the current one to be valid.
+ *
+ * When importing this realisation, the store will first check that all its
+ * dependencies exist, and map to the correct output path
+ */
+ std::map dependentRealisations;
+
nlohmann::json toJSON() const;
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
@@ -36,6 +44,11 @@ struct Realisation {
bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
size_t checkSignatures(const PublicKeys & publicKeys) const;
+ static std::set closure(Store &, const std::set &);
+ static void closure(Store &, const std::set &, std::set& res);
+
+ bool isCompatibleWith(const Realisation & other) const;
+
StorePath getPath() const { return outPath; }
GENERATE_CMP(Realisation, me->id, me->outPath);
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index cfdfb5269..18b9f3256 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -337,6 +337,13 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
return info;
}
+StringSet StoreConfig::getDefaultSystemFeatures()
+{
+ auto res = settings.systemFeatures.get();
+ if (settings.isExperimentalFeatureEnabled("ca-derivations"))
+ res.insert("ca-derivations");
+ return res;
+}
Store::Store(const Params & params)
: StoreConfig(params)
@@ -816,20 +823,39 @@ std::map copyPaths(ref srcStore, ref dstStor
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
{
StorePathSet storePaths;
- std::set realisations;
+ std::set toplevelRealisations;
for (auto & path : paths) {
storePaths.insert(path.path());
if (auto realisation = std::get_if(&path.raw)) {
settings.requireExperimentalFeature("ca-derivations");
- realisations.insert(*realisation);
+ toplevelRealisations.insert(*realisation);
}
}
auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute);
+
+ ThreadPool pool;
+
try {
- for (auto & realisation : realisations) {
- dstStore->registerDrvOutput(realisation, checkSigs);
- }
- } catch (MissingExperimentalFeature & e) {
+ // Copy the realisation closure
+ processGraph(
+ pool, Realisation::closure(*srcStore, toplevelRealisations),
+ [&](const Realisation& current) -> std::set {
+ std::set children;
+ for (const auto& [drvOutput, _] : current.dependentRealisations) {
+ auto currentChild = srcStore->queryRealisation(drvOutput);
+ if (!currentChild)
+ throw Error(
+ "Incomplete realisation closure: '%s' is a "
+ "dependency of '%s' but isn’t registered",
+ drvOutput.to_string(), current.id.to_string());
+ children.insert(*currentChild);
+ }
+ return children;
+ },
+ [&](const Realisation& current) -> void {
+ dstStore->registerDrvOutput(current, checkSigs);
+ });
+ } catch (MissingExperimentalFeature& e) {
// Don't fail if the remote doesn't support CA derivations is it might
// not be within our control to change that, and we might still want
// to at least copy the output paths.
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 7415cb54c..1b7645488 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -180,6 +180,8 @@ struct StoreConfig : public Config
StoreConfig() = delete;
+ StringSet getDefaultSystemFeatures();
+
virtual ~StoreConfig() { }
virtual const std::string name() = 0;
@@ -196,7 +198,7 @@ struct StoreConfig : public Config
Setting wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
- Setting systemFeatures{this, settings.systemFeatures,
+ Setting systemFeatures{this, getDefaultSystemFeatures(),
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
@@ -869,4 +871,9 @@ std::pair splitUriAndParams(const std::string & uri)
std::optional getDerivationCA(const BasicDerivation & drv);
+std::map drvOutputReferences(
+ Store & store,
+ const Derivation & drv,
+ const StorePath & outputPath);
+
}
diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh
index 0315dc506..eecd5b819 100644
--- a/src/libutil/comparator.hh
+++ b/src/libutil/comparator.hh
@@ -25,6 +25,8 @@
}
#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args)
#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args)
+#define GENERATE_NEQ(args...) GENERATE_ONE_CMP(!=, args)
#define GENERATE_CMP(args...) \
GENERATE_EQUAL(args) \
- GENERATE_LEQ(args)
+ GENERATE_LEQ(args) \
+ GENERATE_NEQ(args)
diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc
index d2e801175..6b9b850ca 100644
--- a/src/libutil/logging.cc
+++ b/src/libutil/logging.cc
@@ -46,7 +46,7 @@ public:
: printBuildLogs(printBuildLogs)
{
systemd = getEnv("IN_SYSTEMD") == "1";
- tty = isatty(STDERR_FILENO);
+ tty = shouldANSI();
}
bool isVerbose() override {
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
index c1bab866c..f6232d255 100644
--- a/src/libutil/url.cc
+++ b/src/libutil/url.cc
@@ -32,7 +32,7 @@ ParsedURL parseURL(const std::string & url)
auto isFile = scheme.find("file") != std::string::npos;
if (authority && *authority != "" && isFile)
- throw Error("file:// URL '%s' has unexpected authority '%s'",
+ throw BadURL("file:// URL '%s' has unexpected authority '%s'",
url, *authority);
if (isFile && path.empty())
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 7e57fd7ca..ee9f17228 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -1372,6 +1372,12 @@ void ignoreException()
}
}
+bool shouldANSI()
+{
+ return isatty(STDERR_FILENO)
+ && getEnv("TERM").value_or("dumb") != "dumb"
+ && !getEnv("NO_COLOR").has_value();
+}
std::string filterANSIEscapes(const std::string & s, bool filterAll, unsigned int width)
{
diff --git a/src/libutil/util.hh b/src/libutil/util.hh
index f84d0fb31..a8dd4bd47 100644
--- a/src/libutil/util.hh
+++ b/src/libutil/util.hh
@@ -482,6 +482,9 @@ constexpr char treeLast[] = "└───";
constexpr char treeLine[] = "│ ";
constexpr char treeNull[] = " ";
+/* Determine whether ANSI escape sequences are appropriate for the
+ present output. */
+bool shouldANSI();
/* Truncate a string to 'width' printable characters. If 'filterAll'
is true, all ANSI escape sequences are filtered out. Otherwise,
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 519cbd5bb..29be46cf5 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -392,6 +392,12 @@ static void main_nix_build(int argc, char * * argv)
if (dryRun) return;
+ if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ auto resolvedDrv = drv.tryResolve(*store);
+ assert(resolvedDrv && "Successfully resolved the derivation");
+ drv = *resolvedDrv;
+ }
+
// Set the environment.
auto env = getEnv();
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index ee782c4ec..26f53db09 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -8,7 +8,7 @@
#include "affinity.hh"
#include "progress-bar.hh"
-#include
+#include
using namespace nix;
@@ -25,94 +25,142 @@ static DevelopSettings developSettings;
static GlobalConfig::Register rDevelopSettings(&developSettings);
-struct Var
-{
- bool exported = true;
- bool associative = false;
- std::string quoted; // quoted string or array
-};
-
struct BuildEnvironment
{
- std::map env;
- std::string bashFunctions;
-};
+ struct String
+ {
+ bool exported;
+ std::string value;
-BuildEnvironment readEnvironment(const Path & path)
-{
- BuildEnvironment res;
+ bool operator == (const String & other) const
+ {
+ return exported == other.exported && value == other.value;
+ }
+ };
- std::set exported;
+ using Array = std::vector;
- debug("reading environment file '%s'", path);
+ using Associative = std::map;
- auto file = readFile(path);
+ using Value = std::variant;
- auto pos = file.cbegin();
+ std::map vars;
+ std::map bashFunctions;
- static std::string varNameRegex =
- R"re((?:[a-zA-Z_][a-zA-Z0-9_]*))re";
+ static BuildEnvironment fromJSON(std::string_view in)
+ {
+ BuildEnvironment res;
- static std::string simpleStringRegex =
- R"re((?:[a-zA-Z0-9_/:\.\-\+=]*))re";
+ std::set exported;
- static std::string dquotedStringRegex =
- R"re((?:\$?"(?:[^"\\]|\\[$`"\\\n])*"))re";
+ auto json = nlohmann::json::parse(in);
- static std::string squotedStringRegex =
- R"re((?:\$?(?:'(?:[^'\\]|\\[abeEfnrtv\\'"?])*'|\\')+))re";
-
- static std::string indexedArrayRegex =
- R"re((?:\(( *\[[0-9]+\]="(?:[^"\\]|\\.)*")*\)))re";
-
- static std::regex declareRegex(
- "^declare -a?x (" + varNameRegex + ")(=(" +
- dquotedStringRegex + "|" + indexedArrayRegex + "))?\n");
-
- static std::regex varRegex(
- "^(" + varNameRegex + ")=(" + simpleStringRegex + "|" + squotedStringRegex + "|" + indexedArrayRegex + ")\n");
-
- /* Note: we distinguish between an indexed and associative array
- using the space before the closing parenthesis. Will
- undoubtedly regret this some day. */
- static std::regex assocArrayRegex(
- "^(" + varNameRegex + ")=" + R"re((?:\(( *\[[^\]]+\]="(?:[^"\\]|\\.)*")* *\)))re" + "\n");
-
- static std::regex functionRegex(
- "^" + varNameRegex + " \\(\\) *\n");
-
- while (pos != file.end()) {
-
- std::smatch match;
-
- if (std::regex_search(pos, file.cend(), match, declareRegex, std::regex_constants::match_continuous)) {
- pos = match[0].second;
- exported.insert(match[1]);
+ for (auto & [name, info] : json["variables"].items()) {
+ std::string type = info["type"];
+ if (type == "var" || type == "exported")
+ res.vars.insert({name, BuildEnvironment::String { .exported = type == "exported", .value = info["value"] }});
+ else if (type == "array")
+ res.vars.insert({name, (Array) info["value"]});
+ else if (type == "associative")
+ res.vars.insert({name, (Associative) info["value"]});
}
- else if (std::regex_search(pos, file.cend(), match, varRegex, std::regex_constants::match_continuous)) {
- pos = match[0].second;
- res.env.insert({match[1], Var { .exported = exported.count(match[1]) > 0, .quoted = match[2] }});
+ for (auto & [name, def] : json["bashFunctions"].items()) {
+ res.bashFunctions.insert({name, def});
}
- else if (std::regex_search(pos, file.cend(), match, assocArrayRegex, std::regex_constants::match_continuous)) {
- pos = match[0].second;
- res.env.insert({match[1], Var { .associative = true, .quoted = match[2] }});
- }
-
- else if (std::regex_search(pos, file.cend(), match, functionRegex, std::regex_constants::match_continuous)) {
- res.bashFunctions = std::string(pos, file.cend());
- break;
- }
-
- else throw Error("shell environment '%s' has unexpected line '%s'",
- path, file.substr(pos - file.cbegin(), 60));
+ return res;
}
- res.env.erase("__output");
+ std::string toJSON() const
+ {
+ auto res = nlohmann::json::object();
- return res;
-}
+ auto vars2 = nlohmann::json::object();
+ for (auto & [name, value] : vars) {
+ auto info = nlohmann::json::object();
+ if (auto str = std::get_if(&value)) {
+ info["type"] = str->exported ? "exported" : "var";
+ info["value"] = str->value;
+ }
+ else if (auto arr = std::get_if(&value)) {
+ info["type"] = "array";
+ info["value"] = *arr;
+ }
+ else if (auto arr = std::get_if(&value)) {
+ info["type"] = "associative";
+ info["value"] = *arr;
+ }
+ vars2[name] = std::move(info);
+ }
+ res["variables"] = std::move(vars2);
+
+ res["bashFunctions"] = bashFunctions;
+
+ auto json = res.dump();
+
+ assert(BuildEnvironment::fromJSON(json) == *this);
+
+ return json;
+ }
+
+ void toBash(std::ostream & out, const std::set & ignoreVars) const
+ {
+ for (auto & [name, value] : vars) {
+ if (!ignoreVars.count(name)) {
+ if (auto str = std::get_if(&value)) {
+ out << fmt("%s=%s\n", name, shellEscape(str->value));
+ if (str->exported)
+ out << fmt("export %s\n", name);
+ }
+ else if (auto arr = std::get_if(&value)) {
+ out << "declare -a " << name << "=(";
+ for (auto & s : *arr)
+ out << shellEscape(s) << " ";
+ out << ")\n";
+ }
+ else if (auto arr = std::get_if(&value)) {
+ out << "declare -A " << name << "=(";
+ for (auto & [n, v] : *arr)
+ out << "[" << shellEscape(n) << "]=" << shellEscape(v) << " ";
+ out << ")\n";
+ }
+ }
+ }
+
+ for (auto & [name, def] : bashFunctions) {
+ out << name << " ()\n{\n" << def << "}\n";
+ }
+ }
+
+ static std::string getString(const Value & value)
+ {
+ if (auto str = std::get_if(&value))
+ return str->value;
+ else
+ throw Error("bash variable is not a string");
+ }
+
+ static Array getStrings(const Value & value)
+ {
+ if (auto str = std::get_if(&value))
+ return tokenizeString(str->value);
+ else if (auto arr = std::get_if(&value)) {
+ return *arr;
+ } else if (auto assoc = std::get_if(&value)) {
+ Array assocKeys;
+ std::for_each(assoc->begin(), assoc->end(), [&](auto & n) { assocKeys.push_back(n.first); });
+ return assocKeys;
+ }
+ else
+ throw Error("bash variable is not a string or array");
+ }
+
+ bool operator == (const BuildEnvironment & other) const
+ {
+ return vars == other.vars && bashFunctions == other.bashFunctions;
+ }
+};
const static std::string getEnvSh =
#include "get-env.sh.gen.hh"
@@ -144,17 +192,26 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath)
/* Rehash and write the derivation. FIXME: would be nice to use
'buildDerivation', but that's privileged. */
drv.name += "-env";
- for (auto & output : drv.outputs) {
- output.second = { .output = DerivationOutputInputAddressed { .path = StorePath::dummy } };
- drv.env[output.first] = "";
- }
drv.inputSrcs.insert(std::move(getEnvShPath));
- Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
+ if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
+ for (auto & output : drv.outputs) {
+ output.second = {
+ .output = DerivationOutputDeferred{},
+ };
+ drv.env[output.first] = hashPlaceholder(output.first);
+ }
+ } else {
+ for (auto & output : drv.outputs) {
+ output.second = { .output = DerivationOutputInputAddressed { .path = StorePath::dummy } };
+ drv.env[output.first] = "";
+ }
+ Hash h = std::get<0>(hashDerivationModulo(*store, drv, true));
- for (auto & output : drv.outputs) {
- auto outPath = store->makeOutputPath(output.first, h, drv.name);
- output.second = { .output = DerivationOutputInputAddressed { .path = outPath } };
- drv.env[output.first] = store->printStorePath(outPath);
+ for (auto & output : drv.outputs) {
+ auto outPath = store->makeOutputPath(output.first, h, drv.name);
+ output.second = { .output = DerivationOutputInputAddressed { .path = outPath } };
+ drv.env[output.first] = store->printStorePath(outPath);
+ }
}
auto shellDrvPath = writeDerivation(*store, drv);
@@ -162,8 +219,7 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath)
/* Build the derivation. */
store->buildPaths({DerivedPath::Built{shellDrvPath}});
- for (auto & [_0, outputAndOptPath] : drv.outputsAndOptPaths(*store)) {
- auto & [_1, optPath] = outputAndOptPath;
+ for (auto & [_0, optPath] : store->queryPartialDerivationOutputMap(shellDrvPath)) {
assert(optPath);
auto & outPath = *optPath;
assert(store->isValidPath(outPath));
@@ -177,19 +233,15 @@ StorePath getDerivationEnvironment(ref store, const StorePath & drvPath)
struct Common : InstallableCommand, MixProfile
{
- std::set ignoreVars{
+ std::set ignoreVars{
"BASHOPTS",
- "EUID",
"HOME", // FIXME: don't ignore in pure mode?
- "HOSTNAME",
"NIX_BUILD_TOP",
"NIX_ENFORCE_PURITY",
"NIX_LOG_FD",
"NIX_REMOTE",
"PPID",
- "PWD",
"SHELLOPTS",
- "SHLVL",
"SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
"TEMP",
"TEMPDIR",
@@ -225,22 +277,10 @@ struct Common : InstallableCommand, MixProfile
out << "nix_saved_PATH=\"$PATH\"\n";
- for (auto & i : buildEnvironment.env) {
- if (!ignoreVars.count(i.first) && !hasPrefix(i.first, "BASH_")) {
- if (i.second.associative)
- out << fmt("declare -A %s=(%s)\n", i.first, i.second.quoted);
- else {
- out << fmt("%s=%s\n", i.first, i.second.quoted);
- if (i.second.exported)
- out << fmt("export %s\n", i.first);
- }
- }
- }
+ buildEnvironment.toBash(out, ignoreVars);
out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
- out << buildEnvironment.bashFunctions << "\n";
-
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
@@ -250,24 +290,16 @@ struct Common : InstallableCommand, MixProfile
auto script = out.str();
/* Substitute occurrences of output paths. */
- auto outputs = buildEnvironment.env.find("outputs");
- assert(outputs != buildEnvironment.env.end());
+ auto outputs = buildEnvironment.vars.find("outputs");
+ assert(outputs != buildEnvironment.vars.end());
// FIXME: properly unquote 'outputs'.
StringMap rewrites;
- for (auto & outputName : tokenizeString>(replaceStrings(outputs->second.quoted, "'", ""))) {
- // Hacky way to obtain the key of an associate array. This is needed for strctured attrs where
- // `outputs` is an associative array. If the regex isn't matched, the non-structured-attrs behavior will
- // be used.
- std::regex ptrn(R"re(\[([A-z0-9]+)\]=.*)re");
- std::smatch match;
- if (std::regex_match(outputName, match, ptrn)) {
- outputName = match[1];
- }
- auto from = buildEnvironment.env.find(outputName);
- assert(from != buildEnvironment.env.end());
+ for (auto & outputName : BuildEnvironment::getStrings(outputs->second)) {
+ auto from = buildEnvironment.vars.find(outputName);
+ assert(from != buildEnvironment.vars.end());
// FIXME: unquote
- rewrites.insert({from->second.quoted, outputsDir + "/" + outputName});
+ rewrites.insert({BuildEnvironment::getString(from->second), outputsDir + "/" + outputName});
}
/* Substitute redirects. */
@@ -321,7 +353,9 @@ struct Common : InstallableCommand, MixProfile
updateProfile(shellOutPath);
- return {readEnvironment(strPath), strPath};
+ debug("reading environment file '%s'", strPath);
+
+ return {BuildEnvironment::fromJSON(readFile(strPath)), strPath};
}
};
@@ -443,13 +477,17 @@ struct CmdDevelop : Common, MixEnvironment
try {
auto state = getEvalState();
+ auto nixpkgsLockFlags = lockFlags;
+ nixpkgsLockFlags.inputOverrides = {};
+ nixpkgsLockFlags.inputUpdates = {};
+
auto bashInstallable = std::make_shared(
this,
state,
installable->nixpkgsFlakeRef(),
Strings{"bashInteractive"},
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
- lockFlags);
+ nixpkgsLockFlags);
shell = state->store->printStorePath(
toStorePath(state->store, Realise::Outputs, OperateOn::Output, bashInstallable)) + "/bin/bash";
@@ -470,7 +508,7 @@ struct CmdDevelop : Common, MixEnvironment
}
};
-struct CmdPrintDevEnv : Common
+struct CmdPrintDevEnv : Common, MixJSON
{
std::string description() override
{
@@ -492,7 +530,10 @@ struct CmdPrintDevEnv : Common
stopProgressBar();
- std::cout << makeRcScript(store, buildEnvironment);
+ logger->writeToStdout(
+ json
+ ? buildEnvironment.toJSON()
+ : makeRcScript(store, buildEnvironment));
}
};
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 64fcfc000..9055b07eb 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -84,6 +84,7 @@ struct CmdFlakeUpdate : FlakeCommand
lockFlags.recreateLockFile = true;
lockFlags.writeLockFile = true;
+ lockFlags.applyNixConfig = true;
lockFlake();
}
@@ -114,6 +115,7 @@ struct CmdFlakeLock : FlakeCommand
settings.tarballTtl = 0;
lockFlags.writeLockFile = true;
+ lockFlags.applyNixConfig = true;
lockFlake();
}
@@ -270,6 +272,8 @@ struct CmdFlakeCheck : FlakeCommand
settings.readOnlyMode = !build;
auto state = getEvalState();
+
+ lockFlags.applyNixConfig = true;
auto flake = lockFlake();
bool hasErrors = false;
diff --git a/src/nix/get-env.sh b/src/nix/get-env.sh
index 431440516..42c806450 100644
--- a/src/nix/get-env.sh
+++ b/src/nix/get-env.sh
@@ -8,6 +8,109 @@ if [[ -n $stdenv ]]; then
source $stdenv/setup
fi
+# Better to use compgen, but stdenv bash doesn't have it.
+__vars="$(declare -p)"
+__functions="$(declare -F)"
+
+__dumpEnv() {
+ printf '{\n'
+
+ printf ' "bashFunctions": {\n'
+ local __first=1
+ while read __line; do
+ if ! [[ $__line =~ ^declare\ -f\ (.*) ]]; then continue; fi
+ __fun_name="${BASH_REMATCH[1]}"
+ __fun_body="$(type $__fun_name)"
+ if [[ $__fun_body =~ \{(.*)\} ]]; then
+ if [[ -z $__first ]]; then printf ',\n'; else __first=; fi
+ __fun_body="${BASH_REMATCH[1]}"
+ printf " "
+ __escapeString "$__fun_name"
+ printf ':'
+ __escapeString "$__fun_body"
+ else
+ printf "Cannot parse definition of function '%s'.\n" "$__fun_name" >&2
+ return 1
+ fi
+ done < <(printf "%s\n" "$__functions")
+ printf '\n },\n'
+
+ printf ' "variables": {\n'
+ local __first=1
+ while read __line; do
+ if ! [[ $__line =~ ^declare\ (-[^ ])\ ([^=]*) ]]; then continue; fi
+ local type="${BASH_REMATCH[1]}"
+ local __var_name="${BASH_REMATCH[2]}"
+
+ if [[ $__var_name =~ ^BASH_ || \
+ $__var_name = _ || \
+ $__var_name = DIRSTACK || \
+ $__var_name = EUID || \
+ $__var_name = FUNCNAME || \
+ $__var_name = HISTCMD || \
+ $__var_name = HOSTNAME || \
+ $__var_name = GROUPS || \
+ $__var_name = PIPESTATUS || \
+ $__var_name = PWD || \
+ $__var_name = RANDOM || \
+ $__var_name = SHLVL || \
+ $__var_name = SECONDS \
+ ]]; then continue; fi
+
+ if [[ -z $__first ]]; then printf ',\n'; else __first=; fi
+
+ printf " "
+ __escapeString "$__var_name"
+ printf ': {'
+
+ # FIXME: handle -i, -r, -n.
+ if [[ $type == -x ]]; then
+ printf '"type": "exported", "value": '
+ __escapeString "${!__var_name}"
+ elif [[ $type == -- ]]; then
+ printf '"type": "var", "value": '
+ __escapeString "${!__var_name}"
+ elif [[ $type == -a ]]; then
+ printf '"type": "array", "value": ['
+ local __first2=1
+ __var_name="$__var_name[@]"
+ for __i in "${!__var_name}"; do
+ if [[ -z $__first2 ]]; then printf ', '; else __first2=; fi
+ __escapeString "$__i"
+ printf ' '
+ done
+ printf ']'
+ elif [[ $type == -A ]]; then
+ printf '"type": "associative", "value": {\n'
+ local __first2=1
+ declare -n __var_name2="$__var_name"
+ for __i in "${!__var_name2[@]}"; do
+ if [[ -z $__first2 ]]; then printf ',\n'; else __first2=; fi
+ printf " "
+ __escapeString "$__i"
+ printf ": "
+ __escapeString "${__var_name2[$__i]}"
+ done
+ printf '\n }'
+ else
+ printf '"type": "unknown"'
+ fi
+
+ printf "}"
+ done < <(printf "%s\n" "$__vars")
+ printf '\n }\n}'
+}
+
+__escapeString() {
+ local __s="$1"
+ __s="${__s//\\/\\\\}"
+ __s="${__s//\"/\\\"}"
+ __s="${__s//$'\n'/\\n}"
+ __s="${__s//$'\r'/\\r}"
+ __s="${__s//$'\t'/\\t}"
+ printf '"%s"' "$__s"
+}
+
# In case of `__structuredAttrs = true;` the list of outputs is an associative
# array with a format like `outname => /nix/store/hash-drvname-outname`, so `__olist`
# must contain the array's keys (hence `${!...[@]}`) in this case.
@@ -19,8 +122,7 @@ fi
for __output in $__olist; do
if [[ -z $__done ]]; then
- export > "${!__output}"
- set >> "${!__output}"
+ __dumpEnv > ${!__output}
__done=1
else
echo -n >> "${!__output}"
diff --git a/src/nix/print-dev-env.md b/src/nix/print-dev-env.md
index b80252acf..2aad491de 100644
--- a/src/nix/print-dev-env.md
+++ b/src/nix/print-dev-env.md
@@ -8,12 +8,43 @@ R""(
# . <(nix print-dev-env nixpkgs#hello)
```
+* Get the build environment in JSON format:
+
+ ```console
+ # nix print-dev-env nixpkgs#hello --json
+ ```
+
+ The output will look like this:
+
+ ```json
+ {
+ "bashFunctions": {
+ "buildPhase": " \n runHook preBuild;\n...",
+ ...
+ },
+ "variables": {
+ "src": {
+ "type": "exported",
+ "value": "/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz"
+ },
+ "postUnpackHooks": {
+ "type": "array",
+ "value": ["_updateSourceDateEpochFromSourceRoot"]
+ },
+ ...
+ }
+ }
+ ```
+
# Description
-This command prints a shell script that can be sourced by `b`ash and
-that sets the environment variables and shell functions defined by the
-build process of *installable*. This allows you to get a similar build
+This command prints a shell script that can be sourced by `bash` and
+that sets the variables and shell functions defined by the build
+process of *installable*. This allows you to get a similar build
environment in your current shell rather than in a subshell (as with
`nix develop`).
+With `--json`, the output is a JSON serialisation of the variables and
+functions defined by the build process.
+
)""
diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md
index dcf825da9..ba85441d8 100644
--- a/src/nix/profile-remove.md
+++ b/src/nix/profile-remove.md
@@ -15,6 +15,7 @@ R""(
```
* Remove all packages:
+
```console
# nix profile remove '.*'
```
diff --git a/src/nix/registry-add.md b/src/nix/registry-add.md
index 80a31996a..a947fa0b3 100644
--- a/src/nix/registry-add.md
+++ b/src/nix/registry-add.md
@@ -21,6 +21,13 @@ R""(
# nix registry add nixpkgs/nixos-20.03 ~/Dev/nixpkgs
```
+* Add `nixpkgs` pointing to `github:nixos/nixpkgs` to your custom flake
+ registry:
+
+ ```console
+ nix registry add --registry ./custom-flake-registry.json nixpkgs github:nixos/nixpkgs
+ ```
+
# Description
This command adds an entry to the user registry that maps flake
diff --git a/src/nix/registry-pin.md b/src/nix/registry-pin.md
index 6e97e003e..ebc0e3eff 100644
--- a/src/nix/registry-pin.md
+++ b/src/nix/registry-pin.md
@@ -24,6 +24,13 @@ R""(
…
```
+* Pin `nixpkgs` in a custom registry to its most recent Git revision:
+
+ ```console
+ # nix registry pin --registry ./custom-flake-registry.json nixpkgs
+ ```
+
+
# Description
This command adds an entry to the user registry that maps flake
diff --git a/src/nix/registry-remove.md b/src/nix/registry-remove.md
index 4c0eb4947..eecd4c6e7 100644
--- a/src/nix/registry-remove.md
+++ b/src/nix/registry-remove.md
@@ -8,6 +8,12 @@ R""(
# nix registry remove nixpkgs
```
+* Remove the entry `nixpkgs` from a custom registry:
+
+ ```console
+ # nix registry remove --registry ./custom-flake-registry.json nixpkgs
+ ```
+
# Description
This command removes from the user registry any entry for flake
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index f9719600f..6a92576c7 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -10,6 +10,46 @@
using namespace nix;
using namespace nix::flake;
+
+class RegistryCommand : virtual Args
+{
+ std::string registry_path;
+
+ std::shared_ptr registry;
+
+public:
+
+ RegistryCommand()
+ {
+ addFlag({
+ .longName = "registry",
+ .description = "The registry to operate on.",
+ .labels = {"registry"},
+ .handler = {®istry_path},
+ });
+ }
+
+ std::shared_ptr getRegistry()
+ {
+ if (registry) return registry;
+ if (registry_path.empty()) {
+ registry = fetchers::getUserRegistry();
+ } else {
+ registry = fetchers::getCustomRegistry(registry_path);
+ }
+ return registry;
+ }
+
+ Path getRegistryPath()
+ {
+ if (registry_path.empty()) {
+ return fetchers::getUserRegistryPath();
+ } else {
+ return registry_path;
+ }
+ }
+};
+
struct CmdRegistryList : StoreCommand
{
std::string description() override
@@ -45,7 +85,7 @@ struct CmdRegistryList : StoreCommand
}
};
-struct CmdRegistryAdd : MixEvalArgs, Command
+struct CmdRegistryAdd : MixEvalArgs, Command, RegistryCommand
{
std::string fromUrl, toUrl;
@@ -71,16 +111,16 @@ struct CmdRegistryAdd : MixEvalArgs, Command
{
auto fromRef = parseFlakeRef(fromUrl);
auto toRef = parseFlakeRef(toUrl);
+ auto registry = getRegistry();
fetchers::Attrs extraAttrs;
if (toRef.subdir != "") extraAttrs["dir"] = toRef.subdir;
- auto userRegistry = fetchers::getUserRegistry();
- userRegistry->remove(fromRef.input);
- userRegistry->add(fromRef.input, toRef.input, extraAttrs);
- userRegistry->write(fetchers::getUserRegistryPath());
+ registry->remove(fromRef.input);
+ registry->add(fromRef.input, toRef.input, extraAttrs);
+ registry->write(getRegistryPath());
}
};
-struct CmdRegistryRemove : virtual Args, MixEvalArgs, Command
+struct CmdRegistryRemove : RegistryCommand, Command
{
std::string url;
@@ -103,19 +143,21 @@ struct CmdRegistryRemove : virtual Args, MixEvalArgs, Command
void run() override
{
- auto userRegistry = fetchers::getUserRegistry();
- userRegistry->remove(parseFlakeRef(url).input);
- userRegistry->write(fetchers::getUserRegistryPath());
+ auto registry = getRegistry();
+ registry->remove(parseFlakeRef(url).input);
+ registry->write(getRegistryPath());
}
};
-struct CmdRegistryPin : virtual Args, EvalCommand
+struct CmdRegistryPin : RegistryCommand, EvalCommand
{
std::string url;
+ std::string locked;
+
std::string description() override
{
- return "pin a flake to its current version in user flake registry";
+ return "pin a flake to its current version or to the current version of a flake URL";
}
std::string doc() override
@@ -128,18 +170,31 @@ struct CmdRegistryPin : virtual Args, EvalCommand
CmdRegistryPin()
{
expectArg("url", &url);
+
+ expectArgs({
+ .label = "locked",
+ .optional = true,
+ .handler = {&locked},
+ .completer = {[&](size_t, std::string_view prefix) {
+ completeFlakeRef(getStore(), prefix);
+ }}
+ });
}
void run(nix::ref store) override
{
+ if (locked.empty()) {
+ locked = url;
+ }
+ auto registry = getRegistry();
auto ref = parseFlakeRef(url);
- auto userRegistry = fetchers::getUserRegistry();
- userRegistry->remove(ref.input);
- auto [tree, resolved] = ref.resolve(store).input.fetch(store);
+ auto locked_ref = parseFlakeRef(locked);
+ registry->remove(ref.input);
+ auto [tree, resolved] = locked_ref.resolve(store).input.fetch(store);
fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
- userRegistry->add(ref.input, resolved, extraAttrs);
- userRegistry->write(fetchers::getUserRegistryPath());
+ registry->add(ref.input, resolved, extraAttrs);
+ registry->write(getRegistryPath());
}
};
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index eed79c332..0275feae7 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -104,6 +104,26 @@ NixRepl::~NixRepl()
write_history(historyFile.c_str());
}
+string runNix(Path program, const Strings & args,
+ const std::optional & input = {})
+{
+ auto experimentalFeatures = concatStringsSep(" ", settings.experimentalFeatures.get());
+ auto nixConf = getEnv("NIX_CONFIG").value_or("");
+ nixConf.append("\nexperimental-features = " + experimentalFeatures);
+ auto subprocessEnv = getEnv();
+ subprocessEnv["NIX_CONFIG"] = nixConf;
+ RunOptions opts(settings.nixBinDir+ "/" + program, args);
+ opts.input = input;
+ opts.environment = subprocessEnv;
+
+ auto res = runProgram(opts);
+
+ if (!statusOk(res.first))
+ throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first)));
+
+ return res.second;
+}
+
static NixRepl * curRepl; // ugly
static char * completionCallback(char * s, int *match) {
@@ -463,7 +483,7 @@ bool NixRepl::processLine(string line)
state->callFunction(f, v, result, Pos());
StorePath drvPath = getDerivationPath(result);
- runProgram(settings.nixBinDir + "/nix-shell", true, {state->store->printStorePath(drvPath)});
+ runNix("nix-shell", {state->store->printStorePath(drvPath)});
}
else if (command == ":b" || command == ":i" || command == ":s") {
@@ -477,7 +497,7 @@ bool NixRepl::processLine(string line)
but doing it in a child makes it easier to recover from
problems / SIGINT. */
try {
- runProgram(settings.nixBinDir + "/nix", true, {"build", "--no-link", drvPathRaw});
+ runNix("nix", {"build", "--no-link", drvPathRaw});
auto drv = state->store->readDerivation(drvPath);
std::cout << std::endl << "this derivation produced the following outputs:" << std::endl;
for (auto & i : drv.outputsAndOptPaths(*state->store))
@@ -485,9 +505,9 @@ bool NixRepl::processLine(string line)
} catch (ExecError &) {
}
} else if (command == ":i") {
- runProgram(settings.nixBinDir + "/nix-env", true, {"-i", drvPathRaw});
+ runNix("nix-env", {"-i", drvPathRaw});
} else {
- runProgram(settings.nixBinDir + "/nix-shell", true, {drvPathRaw});
+ runNix("nix-shell", {drvPathRaw});
}
}
diff --git a/src/resolve-system-dependencies/local.mk b/src/resolve-system-dependencies/local.mk
index 054ae01cb..fc48a8417 100644
--- a/src/resolve-system-dependencies/local.mk
+++ b/src/resolve-system-dependencies/local.mk
@@ -1,4 +1,4 @@
-ifeq ($(OS), Darwin)
+ifdef HOST_DARWIN
programs += resolve-system-dependencies
endif
diff --git a/tests/add.sh b/tests/add.sh
index e26e05843..5c3eed793 100644
--- a/tests/add.sh
+++ b/tests/add.sh
@@ -9,7 +9,7 @@ echo $path2
if test "$path1" != "$path2"; then
echo "nix-store --add and --add-fixed mismatch"
exit 1
-fi
+fi
path3=$(nix-store --add-fixed sha256 ./dummy)
echo $path3
diff --git a/tests/build-remote-content-addressed-floating.sh b/tests/build-remote-content-addressed-floating.sh
index 7447d92bd..13ef47d2d 100644
--- a/tests/build-remote-content-addressed-floating.sh
+++ b/tests/build-remote-content-addressed-floating.sh
@@ -4,4 +4,6 @@ file=build-hook-ca-floating.nix
sed -i 's/experimental-features .*/& ca-derivations/' "$NIX_CONF_DIR"/nix.conf
+CONTENT_ADDRESSED=true
+
source build-remote.sh
diff --git a/tests/build-remote.sh b/tests/build-remote.sh
index 70f82e939..27d85a83d 100644
--- a/tests/build-remote.sh
+++ b/tests/build-remote.sh
@@ -6,12 +6,17 @@ unset NIX_STATE_DIR
function join_by { local d=$1; shift; echo -n "$1"; shift; printf "%s" "${@/#/$d}"; }
+EXTRA_SYSTEM_FEATURES=()
+if [[ -n "$CONTENT_ADDRESSED" ]]; then
+ EXTRA_SYSTEM_FEATURES=("ca-derivations")
+fi
+
builders=(
# system-features will automatically be added to the outer URL, but not inner
# remote-store URL.
- "ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=foo - - 1 1 foo"
- "$TEST_ROOT/machine2 - - 1 1 bar"
- "ssh-ng://localhost?remote-store=$TEST_ROOT/machine3?system-features=baz - - 1 1 baz"
+ "ssh://localhost?remote-store=$TEST_ROOT/machine1?system-features=$(join_by "%20" foo ${EXTRA_SYSTEM_FEATURES[@]}) - - 1 1 $(join_by "," foo ${EXTRA_SYSTEM_FEATURES[@]})"
+ "$TEST_ROOT/machine2 - - 1 1 $(join_by "," bar ${EXTRA_SYSTEM_FEATURES[@]})"
+ "ssh-ng://localhost?remote-store=$TEST_ROOT/machine3?system-features=$(join_by "%20" baz ${EXTRA_SYSTEM_FEATURES[@]}) - - 1 1 $(join_by "," baz ${EXTRA_SYSTEM_FEATURES[@]})"
)
chmod -R +w $TEST_ROOT/machine* || true
diff --git a/tests/build.sh b/tests/build.sh
index ce9d6602c..c77f620f7 100644
--- a/tests/build.sh
+++ b/tests/build.sh
@@ -1,7 +1,7 @@
source common.sh
expectedJSONRegex='\[\{"drvPath":".*multiple-outputs-a.drv","outputs":\{"first":".*multiple-outputs-a-first","second":".*multiple-outputs-a-second"}},\{"drvPath":".*multiple-outputs-b.drv","outputs":\{"out":".*multiple-outputs-b"}}]'
-nix build -f multiple-outputs.nix --json a.all b.all | jq --exit-status '
+nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-status '
(.[0] |
(.drvPath | match(".*multiple-outputs-a.drv")) and
(.outputs.first | match(".*multiple-outputs-a-first")) and
@@ -10,10 +10,10 @@ nix build -f multiple-outputs.nix --json a.all b.all | jq --exit-status '
(.drvPath | match(".*multiple-outputs-b.drv")) and
(.outputs.out | match(".*multiple-outputs-b")))
'
-
testNormalization () {
clearStore
- outPath=$(nix-build ./simple.nix)
+ outPath=$(nix-build ./simple.nix --no-out-link)
test "$(stat -c %Y $outPath)" -eq 1
}
+
testNormalization
diff --git a/tests/ca/duplicate-realisation-in-closure.sh b/tests/ca/duplicate-realisation-in-closure.sh
new file mode 100644
index 000000000..ca9099641
--- /dev/null
+++ b/tests/ca/duplicate-realisation-in-closure.sh
@@ -0,0 +1,26 @@
+source ./common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf
+
+export REMOTE_STORE_DIR="$TEST_ROOT/remote_store"
+export REMOTE_STORE="file://$REMOTE_STORE_DIR"
+
+rm -rf $REMOTE_STORE_DIR
+clearStore
+
+# Build dep1 and push that to the binary cache.
+# This entails building (and pushing) current-time.
+nix copy --to "$REMOTE_STORE" -f nondeterministic.nix dep1
+clearStore
+sleep 2 # To make sure that `$(date)` will be different
+# Build dep2.
+# As we’ve cleared the cache, we’ll have to rebuild current-time. And because
+# the current time isn’t the same as before, this will yield a new (different)
+# realisation
+nix build -f nondeterministic.nix dep2 --no-link
+
+# Build something that depends both on dep1 and dep2.
+# If everything goes right, we should rebuild dep2 rather than fetch it from
+# the cache (because that would mean duplicating `current-time` in the closure),
+# and have `dep1 == dep2`.
+nix build --substituters "$REMOTE_STORE" -f nondeterministic.nix toplevel --no-require-sigs --no-link
diff --git a/tests/ca/gc.sh b/tests/ca/gc.sh
new file mode 100755
index 000000000..e4f9857d6
--- /dev/null
+++ b/tests/ca/gc.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+# Ensure that garbage collection works properly with ca derivations
+
+source common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references/' "$NIX_CONF_DIR"/nix.conf
+
+export NIX_TESTS_CA_BY_DEFAULT=1
+
+cd ..
+source gc.sh
diff --git a/tests/ca/nix-shell.sh b/tests/ca/nix-shell.sh
new file mode 100755
index 000000000..7f1a3a73e
--- /dev/null
+++ b/tests/ca/nix-shell.sh
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+source common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf
+
+CONTENT_ADDRESSED=true
+cd ..
+source ./nix-shell.sh
+
diff --git a/tests/ca/nondeterministic.nix b/tests/ca/nondeterministic.nix
new file mode 100644
index 000000000..d6d099a3e
--- /dev/null
+++ b/tests/ca/nondeterministic.nix
@@ -0,0 +1,35 @@
+with import ./config.nix;
+
+let mkCADerivation = args: mkDerivation ({
+ __contentAddressed = true;
+ outputHashMode = "recursive";
+ outputHashAlgo = "sha256";
+} // args);
+in
+
+rec {
+ currentTime = mkCADerivation {
+ name = "current-time";
+ buildCommand = ''
+ mkdir $out
+ echo $(date) > $out/current-time
+ '';
+ };
+ dep = seed: mkCADerivation {
+ name = "dep";
+ inherit seed;
+ buildCommand = ''
+ echo ${currentTime} > $out
+ '';
+ };
+ dep1 = dep 1;
+ dep2 = dep 2;
+ toplevel = mkCADerivation {
+ name = "toplevel";
+ buildCommand = ''
+ test ${dep1} == ${dep2}
+ touch $out
+ '';
+ };
+}
+
diff --git a/tests/ca/post-hook.sh b/tests/ca/post-hook.sh
new file mode 100755
index 000000000..4b8da4cd8
--- /dev/null
+++ b/tests/ca/post-hook.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+source common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf
+
+export NIX_TESTS_CA_BY_DEFAULT=1
+cd ..
+source ./post-hook.sh
+
+
diff --git a/tests/ca/recursive.sh b/tests/ca/recursive.sh
new file mode 100755
index 000000000..d9281d91f
--- /dev/null
+++ b/tests/ca/recursive.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+source common.sh
+
+sed -i 's/experimental-features .*/& ca-derivations ca-references nix-command flakes/' "$NIX_CONF_DIR"/nix.conf
+
+export NIX_TESTS_CA_BY_DEFAULT=1
+cd ..
+source ./recursive.sh
+
+
diff --git a/tests/ca/substitute.sh b/tests/ca/substitute.sh
index dfc4ea68e..c80feaacf 100644
--- a/tests/ca/substitute.sh
+++ b/tests/ca/substitute.sh
@@ -17,11 +17,15 @@ buildDrvs () {
# Populate the remote cache
clearStore
-buildDrvs --post-build-hook ../push-to-store.sh
+nix copy --to $REMOTE_STORE --file ./content-addressed.nix
# Restart the build on an empty store, ensuring that we don't build
clearStore
-buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0
+buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0 transitivelyDependentCA
+# Check that the thing we’ve just substituted has its realisation stored
+nix realisation info --file ./content-addressed.nix transitivelyDependentCA
+# Check that its dependencies have it too
+nix realisation info --file ./content-addressed.nix dependentCA rootCA
# Same thing, but
# 1. With non-ca derivations
diff --git a/tests/check.nix b/tests/check.nix
index bca04fdaf..ec455ae2d 100644
--- a/tests/check.nix
+++ b/tests/check.nix
@@ -44,7 +44,7 @@ with import ./config.nix;
};
hashmismatch = import {
- url = "file://" + toString ./dummy;
+ url = "file://" + builtins.getEnv "TMPDIR" + "/dummy";
sha256 = "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73";
};
diff --git a/tests/check.sh b/tests/check.sh
index 5f4997e28..d26d4d8fc 100644
--- a/tests/check.sh
+++ b/tests/check.sh
@@ -74,12 +74,13 @@ nix-build check.nix -A fetchurl --no-out-link --check
nix-build check.nix -A fetchurl --no-out-link --repair
[[ $(cat $path) != foo ]]
+echo 'Hello World' > $TMPDIR/dummy
nix-build check.nix -A hashmismatch --no-out-link || status=$?
[ "$status" = "102" ]
-echo -n > ./dummy
+echo -n > $TMPDIR/dummy
nix-build check.nix -A hashmismatch --no-out-link
-echo 'Hello World' > ./dummy
+echo 'Hello World' > $TMPDIR/dummy
nix-build check.nix -A hashmismatch --no-out-link --check || status=$?
[ "$status" = "102" ]
diff --git a/tests/config.nix.in b/tests/config.nix.in
index a57a8c596..7facbdcbc 100644
--- a/tests/config.nix.in
+++ b/tests/config.nix.in
@@ -1,3 +1,12 @@
+let
+ contentAddressedByDefault = builtins.getEnv "NIX_TESTS_CA_BY_DEFAULT" == "1";
+ caArgs = if contentAddressedByDefault then {
+ __contentAddressed = true;
+ outputHashMode = "recursive";
+ outputHashAlgo = "sha256";
+ } else {};
+in
+
rec {
shell = "@bash@";
@@ -13,6 +22,6 @@ rec {
builder = shell;
args = ["-e" args.builder or (builtins.toFile "builder-${args.name}.sh" "if [ -e .attrs.sh ]; then source .attrs.sh; fi; eval \"$buildCommand\"")];
PATH = path;
- } // removeAttrs args ["builder" "meta"])
+ } // caArgs // removeAttrs args ["builder" "meta"])
// { meta = args.meta or {}; };
}
diff --git a/tests/dummy b/tests/dummy
new file mode 100644
index 000000000..557db03de
--- /dev/null
+++ b/tests/dummy
@@ -0,0 +1 @@
+Hello World
diff --git a/tests/fetchGit.sh b/tests/fetchGit.sh
index 88744ee7f..89294d8d2 100644
--- a/tests/fetchGit.sh
+++ b/tests/fetchGit.sh
@@ -189,3 +189,7 @@ path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$rep
rev4=$(git -C $repo rev-parse HEAD)
rev4_nix=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).rev")
[[ $rev4 = $rev4_nix ]]
+
+# The name argument should be handled
+path9=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; name = \"foo\"; }).outPath")
+[[ $path9 =~ -foo$ ]]
diff --git a/tests/fetchMercurial.sh b/tests/fetchMercurial.sh
index d8a4e09d2..726840664 100644
--- a/tests/fetchMercurial.sh
+++ b/tests/fetchMercurial.sh
@@ -94,3 +94,8 @@ hg commit --cwd $repo -m 'Bla3'
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
[[ $path2 = $path4 ]]
+
+echo paris > $repo/hello
+# Passing a `name` argument should be reflected in the output path
+path5=$(nix eval -vvvvv --impure --refresh --raw --expr "(builtins.fetchMercurial { url = \"file://$repo\"; name = \"foo\"; } ).outPath")
+[[ $path5 =~ -foo$ ]]
diff --git a/tests/flakes.sh b/tests/flakes.sh
index 9764e1a6c..a4e657980 100644
--- a/tests/flakes.sh
+++ b/tests/flakes.sh
@@ -90,76 +90,14 @@ EOF
git -C $nonFlakeDir add README.md
git -C $nonFlakeDir commit -m 'Initial'
-cat > $registry < ./dummy
diff --git a/tests/local.mk b/tests/local.mk
index 663b8c8bc..cbdf4efdb 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -2,6 +2,7 @@ nix_tests = \
hash.sh lang.sh add.sh simple.sh dependencies.sh \
config.sh \
gc.sh \
+ ca/gc.sh \
gc-concurrent.sh \
gc-auto.sh \
referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \
@@ -38,6 +39,7 @@ nix_tests = \
search.sh \
nix-copy-ssh.sh \
post-hook.sh \
+ ca/post-hook.sh \
function-trace.sh \
recursive.sh \
describe-stores.sh \
@@ -46,9 +48,12 @@ nix_tests = \
compute-levels.sh \
ca/build.sh \
ca/build-with-garbage-path.sh \
+ ca/duplicate-realisation-in-closure.sh \
ca/substitute.sh \
ca/signatures.sh \
+ ca/nix-shell.sh \
ca/nix-run.sh \
+ ca/recursive.sh \
ca/nix-copy.sh
# parallel.sh
diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh
index 4775bafb9..f60102f9c 100644
--- a/tests/nix-shell.sh
+++ b/tests/nix-shell.sh
@@ -2,6 +2,20 @@ source common.sh
clearStore
+if [[ -n ${CONTENT_ADDRESSED:-} ]]; then
+ nix-shell () {
+ command nix-shell --arg contentAddressed true "$@"
+ }
+
+ nix_develop() {
+ nix develop --arg contentAddressed true "$@"
+ }
+else
+ nix_develop() {
+ nix develop "$@"
+ }
+fi
+
# Test nix-shell -A
export IMPURE_VAR=foo
export SELECTED_IMPURE_VAR=baz
@@ -41,7 +55,7 @@ output=$(NIX_PATH=nixpkgs=shell.nix nix-shell --pure -p foo bar --run 'echo "$(f
[ "$output" = "foo bar" ]
# Test nix-shell shebang mode
-sed -e "s|@ENV_PROG@|$(type -p env)|" shell.shebang.sh > $TEST_ROOT/shell.shebang.sh
+sed -e "s|@ENV_PROG@|$(type -P env)|" shell.shebang.sh > $TEST_ROOT/shell.shebang.sh
chmod a+rx $TEST_ROOT/shell.shebang.sh
output=$($TEST_ROOT/shell.shebang.sh abc def)
@@ -49,7 +63,7 @@ output=$($TEST_ROOT/shell.shebang.sh abc def)
# Test nix-shell shebang mode again with metacharacters in the filename.
# First word of filename is chosen to not match any file in the test root.
-sed -e "s|@ENV_PROG@|$(type -p env)|" shell.shebang.sh > $TEST_ROOT/spaced\ \\\'\"shell.shebang.sh
+sed -e "s|@ENV_PROG@|$(type -P env)|" shell.shebang.sh > $TEST_ROOT/spaced\ \\\'\"shell.shebang.sh
chmod a+rx $TEST_ROOT/spaced\ \\\'\"shell.shebang.sh
output=$($TEST_ROOT/spaced\ \\\'\"shell.shebang.sh abc def)
@@ -58,7 +72,7 @@ output=$($TEST_ROOT/spaced\ \\\'\"shell.shebang.sh abc def)
# Test nix-shell shebang mode for ruby
# This uses a fake interpreter that returns the arguments passed
# This, in turn, verifies the `rc` script is valid and the `load()` script (given using `-e`) is as expected.
-sed -e "s|@SHELL_PROG@|$(type -p nix-shell)|" shell.shebang.rb > $TEST_ROOT/shell.shebang.rb
+sed -e "s|@SHELL_PROG@|$(type -P nix-shell)|" shell.shebang.rb > $TEST_ROOT/shell.shebang.rb
chmod a+rx $TEST_ROOT/shell.shebang.rb
output=$($TEST_ROOT/shell.shebang.rb abc ruby)
@@ -66,21 +80,27 @@ output=$($TEST_ROOT/shell.shebang.rb abc ruby)
# Test nix-shell shebang mode for ruby again with metacharacters in the filename.
# Note: fake interpreter only space-separates args without adding escapes to its output.
-sed -e "s|@SHELL_PROG@|$(type -p nix-shell)|" shell.shebang.rb > $TEST_ROOT/spaced\ \\\'\"shell.shebang.rb
+sed -e "s|@SHELL_PROG@|$(type -P nix-shell)|" shell.shebang.rb > $TEST_ROOT/spaced\ \\\'\"shell.shebang.rb
chmod a+rx $TEST_ROOT/spaced\ \\\'\"shell.shebang.rb
output=$($TEST_ROOT/spaced\ \\\'\"shell.shebang.rb abc ruby)
[ "$output" = '-e load(ARGV.shift) -- '"$TEST_ROOT"'/spaced \'\''"shell.shebang.rb abc ruby' ]
# Test 'nix develop'.
-nix develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
+nix_develop -f shell.nix shellDrv -c bash -c '[[ -n $stdenv ]]'
# Ensure `nix develop -c` preserves stdin
echo foo | nix develop -f shell.nix shellDrv -c cat | grep -q foo
# Ensure `nix develop -c` actually executes the command if stdout isn't a terminal
-nix develop -f shell.nix shellDrv -c echo foo |& grep -q foo
+nix_develop -f shell.nix shellDrv -c echo foo |& grep -q foo
# Test 'nix print-dev-env'.
+[[ $(nix print-dev-env -f shell.nix shellDrv --json | jq -r .variables.arr1.value[2]) = '3 4' ]]
+
source <(nix print-dev-env -f shell.nix shellDrv)
[[ -n $stdenv ]]
+[[ ${arr1[2]} = "3 4" ]]
+[[ ${arr2[1]} = $'\n' ]]
+[[ ${arr2[2]} = $'x\ny' ]]
+[[ $(fun) = blabla ]]
diff --git a/tests/post-hook.sh b/tests/post-hook.sh
index aa3e6a574..238a8f826 100644
--- a/tests/post-hook.sh
+++ b/tests/post-hook.sh
@@ -4,7 +4,7 @@ clearStore
rm -f $TEST_ROOT/result
-export REMOTE_STORE=$TEST_ROOT/remote_store
+export REMOTE_STORE=file:$TEST_ROOT/remote_store
# Build the dependencies and push them to the remote store
nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook $PWD/push-to-store.sh
diff --git a/tests/recursive.sh b/tests/recursive.sh
index a55b061b5..b6740877d 100644
--- a/tests/recursive.sh
+++ b/tests/recursive.sh
@@ -9,9 +9,9 @@ rm -f $TEST_ROOT/result
export unreachable=$(nix store add-path ./recursive.sh)
-NIX_BIN_DIR=$(dirname $(type -p nix)) nix --experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L --impure --expr '
+NIX_BIN_DIR=$(dirname $(type -p nix)) nix --extra-experimental-features 'nix-command recursive-nix' build -o $TEST_ROOT/result -L --impure --expr '
with import ./config.nix;
- mkDerivation {
+ mkDerivation rec {
name = "recursive";
dummy = builtins.toFile "dummy" "bla bla";
SHELL = shell;
@@ -19,11 +19,13 @@ NIX_BIN_DIR=$(dirname $(type -p nix)) nix --experimental-features 'nix-command r
# Note: this is a string without context.
unreachable = builtins.getEnv "unreachable";
+ NIX_TESTS_CA_BY_DEFAULT = builtins.getEnv "NIX_TESTS_CA_BY_DEFAULT";
+
requiredSystemFeatures = [ "recursive-nix" ];
buildCommand = '\'\''
mkdir $out
- opts="--experimental-features nix-command"
+ opts="--experimental-features nix-command ${if (NIX_TESTS_CA_BY_DEFAULT == "1") then "--extra-experimental-features ca-derivations" else ""}"
PATH=${builtins.getEnv "NIX_BIN_DIR"}:$PATH
@@ -46,16 +48,15 @@ NIX_BIN_DIR=$(dirname $(type -p nix)) nix --experimental-features 'nix-command r
# Add it to our closure.
ln -s $foobar $out/foobar
- [[ $(nix $opts path-info --all | wc -l) -eq 3 ]]
+ [[ $(nix $opts path-info --all | wc -l) -eq 4 ]]
# Build a derivation.
nix $opts build -L --impure --expr '\''
- derivation {
+ with import ${./config.nix};
+ mkDerivation {
name = "inner1";
- builder = builtins.getEnv "SHELL";
- system = builtins.getEnv "system";
+ buildCommand = "echo $fnord blaat > $out";
fnord = builtins.toFile "fnord" "fnord";
- args = [ "-c" "echo $fnord blaat > $out" ];
}
'\''
diff --git a/tests/shell.nix b/tests/shell.nix
index 4581fa40a..64817ed5c 100644
--- a/tests/shell.nix
+++ b/tests/shell.nix
@@ -1,6 +1,18 @@
-{ inNixShell ? false }:
+{ inNixShell ? false, contentAddressed ? false }:
-with import ./config.nix;
+let cfg = import ./config.nix; in
+with cfg;
+
+let
+ mkDerivation =
+ if contentAddressed then
+ args: cfg.mkDerivation ({
+ __contentAddressed = true;
+ outputHashMode = "recursive";
+ outputHashAlgo = "sha256";
+ } // args)
+ else cfg.mkDerivation;
+in
let pkgs = rec {
setupSh = builtins.toFile "setup" ''
@@ -16,6 +28,12 @@ let pkgs = rec {
export "''${o}"
done
fi
+
+ declare -a arr1=(1 2 "3 4" 5)
+ declare -a arr2=(x $'\n' $'x\ny')
+ fun() {
+ echo blabla
+ }
'';
stdenv = mkDerivation {
@@ -30,6 +48,8 @@ let pkgs = rec {
name = "shellDrv";
builder = "/does/not/exist";
VAR_FROM_NIX = "bar";
+ ASCII_PERCENT = "%";
+ ASCII_AT = "@";
TEST_inNixShell = if inNixShell then "true" else "false";
inherit stdenv;
outputs = ["dev" "out"];
diff --git a/tests/tarball.sh b/tests/tarball.sh
index d53ec8cd9..1301922a5 100644
--- a/tests/tarball.sh
+++ b/tests/tarball.sh
@@ -40,6 +40,11 @@ test_tarball() {
(! nix-instantiate --eval -E ' 1' -I fnord=file://no-such-tarball$ext)
nix-instantiate --eval -E '' -I fnord=file://no-such-tarball$ext -I fnord=.
+
+ # Ensure that the `name` attribute isn’t accepted as that would mess
+ # with the content-addressing
+ (! nix-instantiate --eval -E "fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; name = \"foo\"; }")
+
}
test_tarball '' cat
]