Merge branch 'master' into ignore-try

This commit is contained in:
Ben Burdette 2022-07-11 11:29:22 -06:00 committed by GitHub
commit cdcc349072
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
83 changed files with 843 additions and 291 deletions

View file

@ -4,6 +4,8 @@ on:
pull_request: pull_request:
push: push:
permissions: read-all
jobs: jobs:
tests: tests:
@ -28,6 +30,8 @@ jobs:
- run: nix --experimental-features 'nix-command flakes' flake check -L - run: nix --experimental-features 'nix-command flakes' flake check -L
check_cachix: check_cachix:
permissions:
contents: none
name: Cachix secret present for installer tests name: Cachix secret present for installer tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
@ -88,7 +92,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v17 - uses: cachix/install-nix-action@v17
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v10
if: needs.check_cachix.outputs.secret == 'true' if: needs.check_cachix.outputs.secret == 'true'
with: with:

View file

@ -1,8 +1,12 @@
name: Hydra status name: Hydra status
permissions: read-all
on: on:
schedule: schedule:
- cron: "12,42 * * * *" - cron: "12,42 * * * *"
workflow_dispatch: workflow_dispatch:
jobs: jobs:
check_hydra_status: check_hydra_status:
name: Check Hydra status name: Check Hydra status

View file

@ -1,4 +1,3 @@
HOST_OS = @host_os@
AR = @AR@ AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@ BDW_GC_LIBS = @BDW_GC_LIBS@
BOOST_LDFLAGS = @BOOST_LDFLAGS@ BOOST_LDFLAGS = @BOOST_LDFLAGS@
@ -13,13 +12,14 @@ ENABLE_S3 = @ENABLE_S3@
GTEST_LIBS = @GTEST_LIBS@ GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@ HAVE_SECCOMP = @HAVE_SECCOMP@
HOST_OS = @host_os@
LDFLAGS = @LDFLAGS@ LDFLAGS = @LDFLAGS@
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@ LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@ LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@ LIBCURL_LIBS = @LIBCURL_LIBS@
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
LOWDOWN_LIBS = @LOWDOWN_LIBS@ LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@ PACKAGE_VERSION = @PACKAGE_VERSION@
SHELL = @bash@ SHELL = @bash@
@ -31,6 +31,7 @@ datadir = @datadir@
datarootdir = @datarootdir@ datarootdir = @datarootdir@
doc_generate = @doc_generate@ doc_generate = @doc_generate@
docdir = @docdir@ docdir = @docdir@
embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@ exec_prefix = @exec_prefix@
includedir = @includedir@ includedir = @includedir@
libdir = @libdir@ libdir = @libdir@

View file

@ -320,6 +320,14 @@ if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
fi fi
fi fi
AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
AC_SUBST(embedded_sandbox_shell)
if test "$embedded_sandbox_shell" = yes; then
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
fi
# Expand all variables in config.status. # Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix test "$prefix" = NONE && prefix=$ac_default_prefix
test "$exec_prefix" = NONE && exec_prefix='${prefix}' test "$exec_prefix" = NONE && exec_prefix='${prefix}'

View file

@ -12,6 +12,12 @@
[`--dry-run`] [`--dry-run`]
[{`--out-link` | `-o`} *outlink*] [{`--out-link` | `-o`} *outlink*]
# Disambiguation
This man page describes the command `nix-build`, which is distinct from `nix
build`. For documentation on the latter, run `nix build --help` or see `man
nix3-build`.
# Description # Description
The `nix-build` command builds the derivations described by the Nix The `nix-build` command builds the derivations described by the Nix

View file

@ -31,7 +31,7 @@ subcommand to be performed. These are documented below.
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
arguments that specify the packages on which to operate. These are arguments that specify the packages on which to operate. These are
extended regular expressions that must match the entire name of the extended regular expressions that must match the entire name of the
package. (For details on regular expressions, see regex7.) The match is package. (For details on regular expressions, see **regex**(7).) The match is
case-sensitive. The regular expression can optionally be followed by a case-sensitive. The regular expression can optionally be followed by a
dash and a version number; if omitted, any version of the package will dash and a version number; if omitted, any version of the package will
match. Here are some examples: match. Here are some examples:
@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade
of a derivation `x` by looking at their respective `name` attributes. of a derivation `x` by looking at their respective `name` attributes.
The names (e.g., `gcc-3.3.1` are split into two parts: the package name The names (e.g., `gcc-3.3.1` are split into two parts: the package name
(`gcc`), and the version (`3.3.1`). The version part starts after the (`gcc`), and the version (`3.3.1`). The version part starts after the
first dash not followed by a letter. `x` is considered an upgrade of `y` first dash not followed by a letter. `y` is considered an upgrade of `x`
if their package names match, and the version of `y` is higher than that if their package names match, and the version of `y` is higher than that
of `x`. of `x`.

View file

@ -15,6 +15,12 @@
[`--keep` *name*] [`--keep` *name*]
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]} {{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
# Disambiguation
This man page describes the command `nix-shell`, which is distinct from `nix
shell`. For documentation on the latter, run `nix shell --help` or see `man
nix3-shell`.
# Description # Description
The command `nix-shell` will build the dependencies of the specified The command `nix-shell` will build the dependencies of the specified

View file

@ -1,4 +1,31 @@
# Release X.Y (202?-??-??) # Release X.Y (202?-??-??)
* `nix repl` now takes installables on the command line, unifying the usage
with other commands that use `--file` and `--expr`. Primary breaking change
is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with
`nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`.
This is currently guarded by the `repl-flake` experimental feature.
* A new function `builtins.traceVerbose` is available. It is similar
to `builtins.trace` if the `trace-verbose` setting is set to true,
and it is a no-op otherwise.
* `nix search` has a new flag `--exclude` to filter out packages.
* On Linux, if `/nix` doesn't exist and cannot be created and you're
not running as root, Nix will automatically use
`~/.local/share/nix/root` as a chroot store. This enables non-root
users to download the statically linked Nix binary and have it work
out of the box, e.g.
```
# ~/nix run nixpkgs#hello
warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store
Hello, world!
```
* `flake-registry.json` is now fetched from `channels.nixos.org`.
* Nix can now be built with LTO by passing `--enable-lto` to `configure`. * Nix can now be built with LTO by passing `--enable-lto` to `configure`.
LTO is currently only supported when building with GCC. LTO is currently only supported when building with GCC.

View file

@ -4,6 +4,8 @@
, tag ? "latest" , tag ? "latest"
, channelName ? "nixpkgs" , channelName ? "nixpkgs"
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable" , channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
, extraPkgs ? []
, maxLayers ? 100
}: }:
let let
defaultPkgs = with pkgs; [ defaultPkgs = with pkgs; [
@ -23,7 +25,7 @@ let
iana-etc iana-etc
git git
openssh openssh
]; ] ++ extraPkgs;
users = { users = {
@ -229,7 +231,7 @@ let
in in
pkgs.dockerTools.buildLayeredImageWithNixDb { pkgs.dockerTools.buildLayeredImageWithNixDb {
inherit name tag; inherit name tag maxLayers;
contents = [ baseSystem ]; contents = [ baseSystem ];

View file

@ -18,16 +18,16 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1645296114, "lastModified": 1653988320,
"narHash": "sha256-y53N7TyIkXsjMpOG7RhvqJFGDacLs9HlyHeSTBioqYU=", "narHash": "sha256-ZaqFFsSDipZ6KVqriwM34T739+KLYJvNmCWzErjAg7c=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "530a53dcbc9437363471167a5e4762c5fcfa34a1", "rev": "2fa57ed190fd6c7c746319444f34b5917666e5c1",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-21.05-small", "ref": "nixos-22.05-small",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

110
flake.nix
View file

@ -1,7 +1,7 @@
{ {
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-21.05-small"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
@ -36,7 +36,7 @@
) )
); );
forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv); forAllStdenvs = f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
# Memoize nixpkgs for different platforms for efficiency. # Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor = nixpkgsFor =
@ -88,7 +88,6 @@
"LDFLAGS=-fuse-ld=gold" "LDFLAGS=-fuse-ld=gold"
]; ];
nativeBuildDeps = nativeBuildDeps =
[ [
buildPackages.bison buildPackages.bison
@ -314,6 +313,7 @@
for LIB in $out/lib/*.dylib; do for LIB in $out/lib/*.dylib; do
chmod u+w $LIB chmod u+w $LIB
install_name_tool -id $LIB $LIB install_name_tool -id $LIB $LIB
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
done done
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
''} ''}
@ -370,10 +370,10 @@
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium ++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security; ++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
configureFlags = '' configureFlags = [
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix} "--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}"
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix} "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}"
''; ];
enableParallelBuilding = true; enableParallelBuilding = true;
@ -405,7 +405,7 @@
# A Nixpkgs overlay that overrides the 'nix' and # A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages. # 'nix.perl-bindings' packages.
overlay = overlayFor (p: p.stdenv); overlays.default = overlayFor (p: p.stdenv);
hydraJobs = { hydraJobs = {
@ -430,7 +430,7 @@
value = let value = let
nixpkgsCross = import nixpkgs { nixpkgsCross = import nixpkgs {
inherit system crossSystem; inherit system crossSystem;
overlays = [ self.overlay ]; overlays = [ self.overlays.default ];
}; };
in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross; in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
}) crossSystems)); }) crossSystems));
@ -476,31 +476,31 @@
tests.remoteBuilds = import ./tests/remote-builds.nix { tests.remoteBuilds = import ./tests/remote-builds.nix {
system = "x86_64-linux"; system = "x86_64-linux";
inherit nixpkgs; inherit nixpkgs;
inherit (self) overlay; overlay = self.overlays.default;
}; };
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix { tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
system = "x86_64-linux"; system = "x86_64-linux";
inherit nixpkgs; inherit nixpkgs;
inherit (self) overlay; overlay = self.overlays.default;
}; };
tests.nssPreload = (import ./tests/nss-preload.nix rec { tests.nssPreload = (import ./tests/nss-preload.nix rec {
system = "x86_64-linux"; system = "x86_64-linux";
inherit nixpkgs; inherit nixpkgs;
inherit (self) overlay; overlay = self.overlays.default;
}); });
tests.githubFlakes = (import ./tests/github-flakes.nix rec { tests.githubFlakes = (import ./tests/github-flakes.nix rec {
system = "x86_64-linux"; system = "x86_64-linux";
inherit nixpkgs; inherit nixpkgs;
inherit (self) overlay; overlay = self.overlays.default;
}); });
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec { tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
system = "x86_64-linux"; system = "x86_64-linux";
inherit nixpkgs; inherit nixpkgs;
inherit (self) overlay; overlay = self.overlays.default;
}); });
tests.setuid = nixpkgs.lib.genAttrs tests.setuid = nixpkgs.lib.genAttrs
@ -508,7 +508,7 @@
(system: (system:
import ./tests/setuid.nix rec { import ./tests/setuid.nix rec {
inherit nixpkgs system; inherit nixpkgs system;
inherit (self) overlay; overlay = self.overlays.default;
}); });
# Make sure that nix-env still produces the exact same result # Make sure that nix-env still produces the exact same result
@ -553,8 +553,9 @@
dockerImage = self.hydraJobs.dockerImage.${system}; dockerImage = self.hydraJobs.dockerImage.${system};
}); });
packages = forAllSystems (system: { packages = forAllSystems (system: rec {
inherit (nixpkgsFor.${system}) nix; inherit (nixpkgsFor.${system}) nix;
default = nix;
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) { } // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = let nix-static = let
nixpkgs = nixpkgsFor.${system}.pkgsStatic; nixpkgs = nixpkgsFor.${system}.pkgsStatic;
@ -570,14 +571,24 @@
nativeBuildInputs = nativeBuildDeps; nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps; buildInputs = buildDeps ++ propagatedDeps;
configureFlags = [ "--sysconfdir=/etc" ]; # Work around pkgsStatic disabling all tests.
# Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271.
preHook =
''
doCheck=1
doInstallCheck=1
'';
configureFlags =
configureFlags ++
[ "--sysconfdir=/etc"
"--enable-embedded-sandbox-shell"
];
enableParallelBuilding = true; enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d"; makeFlags = "profiledir=$(out)/etc/profile.d";
doCheck = true;
installFlags = "sysconfdir=$(out)/etc"; installFlags = "sysconfdir=$(out)/etc";
postInstall = '' postInstall = ''
@ -587,7 +598,6 @@
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
''; '';
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc"; installCheckFlags = "sysconfdir=$(out)/etc";
stripAllList = ["bin"]; stripAllList = ["bin"];
@ -596,6 +606,7 @@
hardeningDisable = [ "pie" ]; hardeningDisable = [ "pie" ];
}; };
dockerImage = dockerImage =
let let
pkgs = nixpkgsFor.${system}; pkgs = nixpkgsFor.${system};
@ -610,12 +621,14 @@
ln -s ${image} $image ln -s ${image} $image
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
''; '';
} // builtins.listToAttrs (map (crossSystem: { }
// builtins.listToAttrs (map (crossSystem: {
name = "nix-${crossSystem}"; name = "nix-${crossSystem}";
value = let value = let
nixpkgsCross = import nixpkgs { nixpkgsCross = import nixpkgs {
inherit system crossSystem; inherit system crossSystem;
overlays = [ self.overlay ]; overlays = [ self.overlays.default ];
}; };
in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation { in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
name = "nix-${version}"; name = "nix-${version}";
@ -649,44 +662,45 @@
doInstallCheck = true; doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc"; installCheckFlags = "sysconfdir=$(out)/etc";
}; };
}) crossSystems)) // (builtins.listToAttrs (map (stdenvName: }) (if system == "x86_64-linux" then crossSystems else [])))
// (builtins.listToAttrs (map (stdenvName:
nixpkgsFor.${system}.lib.nameValuePair nixpkgsFor.${system}.lib.nameValuePair
"nix-${stdenvName}" "nix-${stdenvName}"
nixpkgsFor.${system}."${stdenvName}Packages".nix nixpkgsFor.${system}."${stdenvName}Packages".nix
) stdenvs))); ) stdenvs)));
defaultPackage = forAllSystems (system: self.packages.${system}.nix); devShells = forAllSystems (system:
forAllStdenvs (stdenv:
with nixpkgsFor.${system};
with commonDeps pkgs;
nixpkgsFor.${system}.${stdenv}.mkDerivation {
name = "nix";
devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages); outputs = [ "out" "dev" "doc" ];
devShells = forAllSystemsAndStdenvs (system: stdenv: nativeBuildInputs = nativeBuildDeps;
with nixpkgsFor.${system}; buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
with commonDeps pkgs;
nixpkgsFor.${system}.${stdenv}.mkDerivation { inherit configureFlags;
name = "nix";
outputs = [ "out" "dev" "doc" ]; enableParallelBuilding = true;
nativeBuildInputs = nativeBuildDeps; installFlags = "sysconfdir=$(out)/etc";
buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
inherit configureFlags; shellHook =
''
PATH=$prefix/bin:$PATH
unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH
enableParallelBuilding = true; # Make bash completion work.
XDG_DATA_DIRS+=:$out/share
installFlags = "sysconfdir=$(out)/etc"; '';
}
shellHook = )
'' // { default = self.devShells.${system}.stdenv; }
PATH=$prefix/bin:$PATH );
unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH
# Make bash completion work.
XDG_DATA_DIRS+=:$out/share
'';
});
}; };
} }

View file

@ -125,7 +125,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a $(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/ $$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
+$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$? +$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o $$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS) $(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)

View file

@ -442,8 +442,9 @@ add_nix_vol_fstab_line() {
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}" local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
shift shift
# wrap `ex` to work around a problem with vim plugins breaking exit codes # wrap `ex` to work around problems w/ vim features breaking exit codes
# (see github.com/NixOS/nix/issues/5468) # - plugins (see github.com/NixOS/nix/issues/5468): -u NONE
# - swap file: -n
# #
# the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462 # the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462
# suggests we need the less-semantic `-u NONE` # suggests we need the less-semantic `-u NONE`
@ -456,7 +457,7 @@ add_nix_vol_fstab_line() {
# minver 10.12.6 seems to have released with vim 7.4 # minver 10.12.6 seems to have released with vim 7.4
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
#!/bin/sh #!/bin/sh
/usr/bin/ex -u NONE "\$@" /usr/bin/ex -u NONE -n "\$@"
EOF EOF
chmod 755 "$SCRATCH/ex_cleanroom_wrapper" chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
@ -650,9 +651,9 @@ EOF
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2 task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
# technically /etc/synthetic.d/nix is supported in Big Sur+ # technically /etc/synthetic.d/nix is supported in Big Sur+
# but handling both takes even more code... # but handling both takes even more code...
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
_sudo "to add Nix to /etc/synthetic.conf" \ _sudo "to add Nix to /etc/synthetic.conf" \
/usr/bin/ex -u NONE /etc/synthetic.conf <<EOF /usr/bin/ex -u NONE -n /etc/synthetic.conf <<EOF
:a :a
${NIX_ROOT:1} ${NIX_ROOT:1}
. .
@ -820,8 +821,8 @@ setup_volume_daemon() {
local volume_uuid="$2" local volume_uuid="$2"
if ! test_voldaemon; then if ! test_voldaemon; then
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2 task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier # See earlier note; `-u NONE` disables vim plugins/rc, `-n` skips swapfile
_sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE "$NIX_VOLUME_MOUNTD_DEST" <<EOF _sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE -n "$NIX_VOLUME_MOUNTD_DEST" <<EOF
:a :a
$(generate_mount_daemon "$cmd_type" "$volume_uuid") $(generate_mount_daemon "$cmd_type" "$volume_uuid")
. .

View file

@ -638,6 +638,17 @@ place_channel_configuration() {
fi fi
} }
check_selinux() {
if command -v getenforce > /dev/null 2>&1; then
if ! [ "$(getenforce)" = "Disabled" ]; then
failure <<EOF
Nix does not work with selinux enabled yet!
see https://github.com/NixOS/nix/issues/2374
EOF
fi
fi
}
welcome_to_nix() { welcome_to_nix() {
ok "Welcome to the Multi-User Nix Installation" ok "Welcome to the Multi-User Nix Installation"
@ -866,6 +877,8 @@ when I need to.
EOF EOF
fi fi
check_selinux
if [ "$(uname -s)" = "Darwin" ]; then if [ "$(uname -s)" = "Darwin" ]; then
# shellcheck source=./install-darwin-multi-user.sh # shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"

View file

@ -117,12 +117,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
InstallablesCommand(); InstallablesCommand();
void prepare() override; void prepare() override;
Installables load();
virtual bool useDefaultInstallables() { return true; } virtual bool useDefaultInstallables() { return true; }
std::optional<FlakeRef> getFlakeRefForCompletion() override; std::optional<FlakeRef> getFlakeRefForCompletion() override;
private: protected:
std::vector<std::string> _installables; std::vector<std::string> _installables;
}; };

View file

@ -146,7 +146,8 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
.shortName = 'f', .shortName = 'f',
.description = .description =
"Interpret installables as attribute paths relative to the Nix expression stored in *file*. " "Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
"If *file* is the character -, then a Nix expression will be read from standard input.", "If *file* is the character -, then a Nix expression will be read from standard input. "
"Implies `--impure`.",
.category = installablesCategory, .category = installablesCategory,
.labels = {"file"}, .labels = {"file"},
.handler = {&file}, .handler = {&file},
@ -919,6 +920,9 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
break; break;
case Realise::Outputs: { case Realise::Outputs: {
if (settings.printMissing)
printMissing(store, pathsToBuild, lvlInfo);
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) { for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
if (!buildResult.success()) if (!buildResult.success())
buildResult.rethrow(); buildResult.rethrow();
@ -1032,11 +1036,16 @@ InstallablesCommand::InstallablesCommand()
void InstallablesCommand::prepare() void InstallablesCommand::prepare()
{ {
installables = load();
}
Installables InstallablesCommand::load() {
Installables installables;
if (_installables.empty() && useDefaultInstallables()) if (_installables.empty() && useDefaultInstallables())
// FIXME: commands like "nix profile install" should not have a // FIXME: commands like "nix profile install" should not have a
// default, probably. // default, probably.
_installables.push_back("."); _installables.push_back(".");
installables = parseInstallables(getStore(), _installables); return parseInstallables(getStore(), _installables);
} }
std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion() std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()

View file

@ -132,6 +132,8 @@ struct Installable
const std::vector<std::shared_ptr<Installable>> & installables); const std::vector<std::shared_ptr<Installable>> & installables);
}; };
typedef std::vector<std::shared_ptr<Installable>> Installables;
struct InstallableValue : Installable struct InstallableValue : Installable
{ {
ref<EvalState> state; ref<EvalState> state;

View file

@ -22,6 +22,7 @@ extern "C" {
#include "ansicolor.hh" #include "ansicolor.hh"
#include "shared.hh" #include "shared.hh"
#include "eval.hh" #include "eval.hh"
#include "eval-cache.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "attr-path.hh" #include "attr-path.hh"
#include "store-api.hh" #include "store-api.hh"
@ -54,6 +55,8 @@ struct NixRepl
size_t debugTraceIndex; size_t debugTraceIndex;
Strings loadedFiles; Strings loadedFiles;
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
std::function<AnnotatedValues()> getValues;
const static int envSize = 32768; const static int envSize = 32768;
std::shared_ptr<StaticEnv> staticEnv; std::shared_ptr<StaticEnv> staticEnv;
@ -63,13 +66,15 @@ struct NixRepl
const Path historyFile; const Path historyFile;
NixRepl(ref<EvalState> state); NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
~NixRepl(); ~NixRepl();
void mainLoop(const std::vector<std::string> & files); void mainLoop();
StringSet completePrefix(const std::string & prefix); StringSet completePrefix(const std::string & prefix);
bool getLine(std::string & input, const std::string & prompt); bool getLine(std::string & input, const std::string & prompt);
StorePath getDerivationPath(Value & v); StorePath getDerivationPath(Value & v);
bool processLine(std::string line); bool processLine(std::string line);
void loadFile(const Path & path); void loadFile(const Path & path);
void loadFlake(const std::string & flakeRef); void loadFlake(const std::string & flakeRef);
void initEnv(); void initEnv();
@ -96,9 +101,11 @@ std::string removeWhitespace(std::string s)
} }
NixRepl::NixRepl(ref<EvalState> state) NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<NixRepl::AnnotatedValues()> getValues)
: state(state) : state(state)
, debugTraceIndex(0) , debugTraceIndex(0)
, getValues(getValues)
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get())) , staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history") , historyFile(getDataDir() + "/nix/repl-history")
{ {
@ -111,23 +118,20 @@ NixRepl::~NixRepl()
write_history(historyFile.c_str()); write_history(historyFile.c_str());
} }
std::string runNix(Path program, const Strings & args, void runNix(Path program, const Strings & args,
const std::optional<std::string> & input = {}) const std::optional<std::string> & input = {})
{ {
auto subprocessEnv = getEnv(); auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue(); subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
auto res = runProgram(RunOptions { runProgram2(RunOptions {
.program = settings.nixBinDir+ "/" + program, .program = settings.nixBinDir+ "/" + program,
.args = args, .args = args,
.environment = subprocessEnv, .environment = subprocessEnv,
.input = input, .input = input,
}); });
if (!statusOk(res.first)) return;
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
return res.second;
} }
static NixRepl * curRepl; // ugly static NixRepl * curRepl; // ugly
@ -228,18 +232,12 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
return out; return out;
} }
void NixRepl::mainLoop(const std::vector<std::string> & files) void NixRepl::mainLoop()
{ {
std::string error = ANSI_RED "error:" ANSI_NORMAL " "; std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
if (!files.empty()) {
for (auto & i : files)
loadedFiles.push_back(i);
}
loadFiles(); loadFiles();
if (!loadedFiles.empty()) notice("");
// Allow nix-repl specific settings in .inputrc // Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl"; rl_readline_name = "nix-repl";
@ -749,7 +747,6 @@ bool NixRepl::processLine(std::string line)
return true; return true;
} }
void NixRepl::loadFile(const Path & path) void NixRepl::loadFile(const Path & path)
{ {
loadedFiles.remove(path); loadedFiles.remove(path);
@ -809,13 +806,15 @@ void NixRepl::loadFiles()
Strings old = loadedFiles; Strings old = loadedFiles;
loadedFiles.clear(); loadedFiles.clear();
bool first = true;
for (auto & i : old) { for (auto & i : old) {
if (!first) notice("");
first = false;
notice("Loading '%1%'...", i); notice("Loading '%1%'...", i);
loadFile(i); loadFile(i);
} }
for (auto & [i, what] : getValues()) {
notice("Loading installable '%1%'...", what);
addAttrsToScope(*i);
}
} }
@ -1015,7 +1014,17 @@ void runRepl(
ref<EvalState>evalState, ref<EvalState>evalState,
const ValMap & extraEnv) const ValMap & extraEnv)
{ {
auto repl = std::make_unique<NixRepl>(evalState); auto getValues = [&]()->NixRepl::AnnotatedValues{
NixRepl::AnnotatedValues values;
return values;
};
const Strings & searchPath = {};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),
evalState,
getValues
);
repl->initEnv(); repl->initEnv();
@ -1023,20 +1032,40 @@ void runRepl(
for (auto & [name, value] : extraEnv) for (auto & [name, value] : extraEnv)
repl->addVarToScope(repl->state->symbols.create(name), *value); repl->addVarToScope(repl->state->symbols.create(name), *value);
repl->mainLoop({}); repl->mainLoop();
} }
struct CmdRepl : StoreCommand, MixEvalArgs struct CmdRepl : InstallablesCommand
{ {
std::vector<std::string> files; CmdRepl(){
evalSettings.pureEval = false;
CmdRepl() }
void prepare()
{ {
expectArgs({ if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
.label = "files", warn("future versions of Nix will require using `--file` to load a file");
.handler = {&files}, if (this->_installables.size() > 1)
.completer = completePath warn("more than one input file is not currently supported");
}); auto filePath = this->_installables[0].data();
file = std::optional(filePath);
_installables.front() = _installables.back();
_installables.pop_back();
}
installables = InstallablesCommand::load();
}
std::vector<std::string> files;
Strings getDefaultFlakeAttrPaths() override
{
return {""};
}
virtual bool useDefaultInstallables() override
{
return file.has_value() or expr.has_value();
}
bool forceImpureByDefault() override
{
return true;
} }
std::string description() override std::string description() override
@ -1053,14 +1082,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs
void run(ref<Store> store) override void run(ref<Store> store) override
{ {
evalSettings.pureEval = false; auto state = getEvalState();
auto getValues = [&]()->NixRepl::AnnotatedValues{
auto evalState = make_ref<EvalState>(searchPath, store); auto installables = load();
NixRepl::AnnotatedValues values;
auto repl = std::make_unique<NixRepl>(evalState); for (auto & installable: installables){
auto what = installable->what();
if (file){
auto [val, pos] = installable->toValue(*state);
auto what = installable->what();
state->forceValue(*val, pos);
auto autoArgs = getAutoArgs(*state);
auto valPost = state->allocValue();
state->autoCallFunction(*autoArgs, *val, *valPost);
state->forceValue(*valPost, pos);
values.push_back( {valPost, what });
} else {
auto [val, pos] = installable->toValue(*state);
values.push_back( {val, what} );
}
}
return values;
};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),
state,
getValues
);
repl->autoArgs = getAutoArgs(*repl->state); repl->autoArgs = getAutoArgs(*repl->state);
repl->initEnv(); repl->initEnv();
repl->mainLoop(files); repl->mainLoop();
} }
}; };

View file

@ -282,7 +282,7 @@ struct AttrDb
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second])); auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
if (!queryAttribute.next()) return {}; if (!queryAttribute.next()) return {};
auto rowId = (AttrType) queryAttribute.getInt(0); auto rowId = (AttrId) queryAttribute.getInt(0);
auto type = (AttrType) queryAttribute.getInt(1); auto type = (AttrType) queryAttribute.getInt(1);
switch (type) { switch (type) {

View file

@ -151,7 +151,7 @@ public:
if (debugRepl) if (debugRepl)
runDebugRepl(&error, env, expr); runDebugRepl(&error, env, expr);
throw error; throw std::move(error);
} }
template<class E> template<class E>
@ -166,7 +166,7 @@ public:
runDebugRepl(&e, last.env, last.expr); runDebugRepl(&e, last.env, last.expr);
} }
throw e; throw std::move(e);
} }
@ -654,6 +654,8 @@ struct EvalSettings : Config
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions. debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
)"}; )"};
Setting<bool> traceVerbose{this, false, "trace-verbose",
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
}; };
extern EvalSettings evalSettings; extern EvalSettings evalSettings;

View file

@ -513,6 +513,15 @@ LockedFlake lockFlake(
if (!lockFlags.allowMutable && !input.ref->input.isLocked()) if (!lockFlags.allowMutable && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS); throw Error("cannot update flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto ref = input2.ref ? *input2.ref : *input.ref;
if (input.isFlake) { if (input.isFlake) {
Path localPath = parentPath; Path localPath = parentPath;
FlakeRef localRef = *input.ref; FlakeRef localRef = *input.ref;
@ -524,15 +533,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath); auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
/* Note: in case of an --override-input, we use auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto childNode = std::make_shared<LockedNode>(
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
node->inputs.insert_or_assign(id, childNode); node->inputs.insert_or_assign(id, childNode);
@ -560,7 +561,7 @@ LockedFlake lockFlake(
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree( auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache); state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id, node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false)); std::make_shared<LockedNode>(lockedRef, ref, false));
} }
} }

View file

@ -150,16 +150,16 @@ struct Expr
}; };
#define COMMON_METHODS \ #define COMMON_METHODS \
void show(const SymbolTable & symbols, std::ostream & str) const; \ void show(const SymbolTable & symbols, std::ostream & str) const override; \
void eval(EvalState & state, Env & env, Value & v); \ void eval(EvalState & state, Env & env, Value & v) override; \
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env); void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override;
struct ExprInt : Expr struct ExprInt : Expr
{ {
NixInt n; NixInt n;
Value v; Value v;
ExprInt(NixInt n) : n(n) { v.mkInt(n); }; ExprInt(NixInt n) : n(n) { v.mkInt(n); };
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS COMMON_METHODS
}; };
@ -168,7 +168,7 @@ struct ExprFloat : Expr
NixFloat nf; NixFloat nf;
Value v; Value v;
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); }; ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS COMMON_METHODS
}; };
@ -177,7 +177,7 @@ struct ExprString : Expr
std::string s; std::string s;
Value v; Value v;
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); }; ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS COMMON_METHODS
}; };
@ -186,7 +186,7 @@ struct ExprPath : Expr
std::string s; std::string s;
Value v; Value v;
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); }; ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS COMMON_METHODS
}; };
@ -213,7 +213,7 @@ struct ExprVar : Expr
ExprVar(Symbol name) : name(name) { }; ExprVar(Symbol name) : name(name) { };
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { }; ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env) override;
PosIdx getPos() const override { return pos; } PosIdx getPos() const override { return pos; }
COMMON_METHODS COMMON_METHODS
}; };
@ -326,7 +326,7 @@ struct ExprLambda : Expr
: pos(pos), formals(formals), body(body) : pos(pos), formals(formals), body(body)
{ {
} }
void setName(Symbol name); void setName(Symbol name) override;
std::string showNamePos(const EvalState & state) const; std::string showNamePos(const EvalState & state) const;
inline bool hasFormals() const { return formals != nullptr; } inline bool hasFormals() const { return formals != nullptr; }
PosIdx getPos() const override { return pos; } PosIdx getPos() const override { return pos; }
@ -395,15 +395,15 @@ struct ExprOpNot : Expr
Expr * e1, * e2; \ Expr * e1, * e2; \
name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \ name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \
name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \ name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \
void show(const SymbolTable & symbols, std::ostream & str) const \ void show(const SymbolTable & symbols, std::ostream & str) const override \
{ \ { \
str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \ str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \
} \ } \
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) \ void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override \
{ \ { \
e1->bindVars(es, env); e2->bindVars(es, env); \ e1->bindVars(es, env); e2->bindVars(es, env); \
} \ } \
void eval(EvalState & state, Env & env, Value & v); \ void eval(EvalState & state, Env & env, Value & v) override; \
PosIdx getPos() const override { return pos; } \ PosIdx getPos() const override { return pos; } \
}; };

View file

@ -520,6 +520,12 @@ path_start
$$ = new ExprPath(path); $$ = new ExprPath(path);
} }
| HPATH { | HPATH {
if (evalSettings.pureEval) {
throw Error(
"the path '%s' can not be resolved in pure mode",
std::string_view($1.p, $1.l)
);
}
Path path(getHome() + std::string($1.p + 1, $1.l - 1)); Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path); $$ = new ExprPath(path);
} }

View file

@ -987,6 +987,15 @@ static RegisterPrimOp primop_trace({
}); });
/* Takes two arguments and evaluates to the second one. Used as the
* builtins.traceVerbose implementation when --trace-verbose is not enabled
*/
static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceValue(*args[1], pos);
v = *args[1];
}
/************************************************************* /*************************************************************
* Derivations * Derivations
*************************************************************/ *************************************************************/
@ -3943,6 +3952,18 @@ void EvalState::createBaseEnv()
addPrimOp("__exec", 1, prim_exec); addPrimOp("__exec", 1, prim_exec);
} }
addPrimOp({
.fun = evalSettings.traceVerbose ? prim_trace : prim_second,
.arity = 2,
.name = "__traceVerbose",
.args = { "e1", "e2" },
.doc = R"(
Evaluate *e1* and print its abstract syntax representation on standard
error if `--trace-verbose` is enabled. Then return *e2*. This function
is useful for debugging.
)",
});
/* Add a value containing the current Nix expression search path. */ /* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size()); mkList(v, searchPath.size());
int n = 0; int n = 0;

View file

@ -364,6 +364,10 @@ static RegisterPrimOp primop_fetchGit({
A Boolean parameter that specifies whether submodules should be A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`. checked out. Defaults to `false`.
- shallow\
A Boolean parameter that specifies whether fetching a shallow clone
is allowed. Defaults to `false`.
- allRefs\ - allRefs\
Whether to fetch all refs of the repository. With this argument being Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only true, it's possible to load a `rev` from *any* `ref` (by default only

View file

@ -540,22 +540,22 @@ namespace nix {
ASSERT_THAT(v, IsStringEq(output)); ASSERT_THAT(v, IsStringEq(output));
} }
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " #input), std::string_view(output))) #define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P( INSTANTIATE_TEST_SUITE_P(
toString, toString,
ToStringPrimOpTest, ToStringPrimOpTest,
testing::Values( testing::Values(
CASE("foo", "foo"), CASE(R"("foo")", "foo"),
CASE(1, "1"), CASE(R"(1)", "1"),
CASE([1 2 3], "1 2 3"), CASE(R"([1 2 3])", "1 2 3"),
CASE(.123, "0.123000"), CASE(R"(.123)", "0.123000"),
CASE(true, "1"), CASE(R"(true)", "1"),
CASE(false, ""), CASE(R"(false)", ""),
CASE(null, ""), CASE(R"(null)", ""),
CASE({ v = "bar"; __toString = self: self.v; }, "bar"), CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
CASE({ v = "bar"; __toString = self: self.v; outPath = "foo"; }, "bar"), CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
CASE({ outPath = "foo"; }, "foo"), CASE(R"({ outPath = "foo"; })", "foo"),
CASE(./test, "/test") CASE(R"(./test)", "/test")
) )
); );
#undef CASE #undef CASE

View file

@ -70,7 +70,7 @@ struct FetchSettings : public Config
Setting<bool> warnDirty{this, true, "warn-dirty", Setting<bool> warnDirty{this, true, "warn-dirty",
"Whether to warn about dirty Git/Mercurial trees."}; "Whether to warn about dirty Git/Mercurial trees."};
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry", Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."}; "Path or URI of the global flake registry."};
Setting<bool> useRegistries{this, true, "use-registries", Setting<bool> useRegistries{this, true, "use-registries",

View file

@ -85,8 +85,9 @@ std::optional<std::string> readHead(const Path & path)
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef) bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
{ {
Path cacheDir = getCachePath(actualUrl); Path cacheDir = getCachePath(actualUrl);
auto gitDir = ".";
try { try {
runProgram("git", true, { "-C", cacheDir, "symbolic-ref", "--", "HEAD", headRef }); runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
} catch (ExecError &e) { } catch (ExecError &e) {
if (!WIFEXITED(e.status)) throw; if (!WIFEXITED(e.status)) throw;
return false; return false;
@ -182,7 +183,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
if (hasHead) { if (hasHead) {
// Using git diff is preferrable over lower-level operations here, // Using git diff is preferrable over lower-level operations here,
// because its conceptually simpler and we only need the exit code anyways. // because its conceptually simpler and we only need the exit code anyways.
auto gitDiffOpts = Strings({ "-C", workdir, "diff", "HEAD", "--quiet"}); auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
if (!submodules) { if (!submodules) {
// Changes in submodules should only make the tree dirty // Changes in submodules should only make the tree dirty
// when those submodules will be copied as well. // when those submodules will be copied as well.
@ -203,6 +204,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo) std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
{ {
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false); const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
auto gitDir = ".git";
if (!fetchSettings.allowDirty) if (!fetchSettings.allowDirty)
throw Error("Git tree '%s' is dirty", workdir); throw Error("Git tree '%s' is dirty", workdir);
@ -210,7 +212,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
if (fetchSettings.warnDirty) if (fetchSettings.warnDirty)
warn("Git tree '%s' is dirty", workdir); warn("Git tree '%s' is dirty", workdir);
auto gitOpts = Strings({ "-C", workdir, "ls-files", "-z" }); auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
if (submodules) if (submodules)
gitOpts.emplace_back("--recurse-submodules"); gitOpts.emplace_back("--recurse-submodules");
@ -240,7 +242,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
// modified dirty file? // modified dirty file?
input.attrs.insert_or_assign( input.attrs.insert_or_assign(
"lastModified", "lastModified",
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0); workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return {std::move(storePath), input}; return {std::move(storePath), input};
} }
@ -572,7 +574,7 @@ struct GitInputScheme : InputScheme
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true"; bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
if (isShallow && !shallow) if (isShallow && !shallow)
throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl); throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
// FIXME: check whether rev is an ancestor of ref. // FIXME: check whether rev is an ancestor of ref.

View file

@ -381,7 +381,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
Headers headers = makeHeadersWithAuthTokens(host); Headers headers = makeHeadersWithAuthTokens(host);
std::string ref_uri; std::string refUri;
if (ref == "HEAD") { if (ref == "HEAD") {
auto file = store->toRealPath( auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath); downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
@ -393,10 +393,11 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if (!remoteLine) { if (!remoteLine) {
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref); throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
} }
ref_uri = remoteLine->target; refUri = remoteLine->target;
} else { } else {
ref_uri = fmt("refs/(heads|tags)/%s", ref); refUri = fmt("refs/(heads|tags)/%s", ref);
} }
std::regex refRegex(refUri);
auto file = store->toRealPath( auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath); downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
@ -406,7 +407,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::optional<std::string> id; std::optional<std::string> id;
while(!id && getline(is, line)) { while(!id && getline(is, line)) {
auto parsedLine = git::parseLsRemoteLine(line); auto parsedLine = git::parseLsRemoteLine(line);
if (parsedLine && parsedLine->reference == ref_uri) if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
id = parsedLine->target; id = parsedLine->target;
} }

View file

@ -7,6 +7,22 @@ HookInstance::HookInstance()
{ {
debug("starting build hook '%s'", settings.buildHook); debug("starting build hook '%s'", settings.buildHook);
auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
if (buildHookArgs.empty())
throw Error("'build-hook' setting is empty");
auto buildHook = buildHookArgs.front();
buildHookArgs.pop_front();
Strings args;
for (auto & arg : buildHookArgs)
args.push_back(arg);
args.push_back(std::string(baseNameOf(settings.buildHook.get())));
args.push_back(std::to_string(verbosity));
/* Create a pipe to get the output of the child. */ /* Create a pipe to get the output of the child. */
fromHook.create(); fromHook.create();
@ -36,14 +52,9 @@ HookInstance::HookInstance()
if (dup2(builderOut.readSide.get(), 5) == -1) if (dup2(builderOut.readSide.get(), 5) == -1)
throw SysError("dupping builder's stdout/stderr"); throw SysError("dupping builder's stdout/stderr");
Strings args = { execv(buildHook.c_str(), stringsToCharPtrs(args).data());
std::string(baseNameOf(settings.buildHook.get())),
std::to_string(verbosity),
};
execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data()); throw SysError("executing '%s'", buildHook);
throw SysError("executing '%s'", settings.buildHook);
}); });
pid.setSeparatePG(true); pid.setSeparatePG(true);

View file

@ -1717,7 +1717,19 @@ void LocalDerivationGoal::runChild()
for (auto & i : dirsInChroot) { for (auto & i : dirsInChroot) {
if (i.second.source == "/proc") continue; // backwards compatibility if (i.second.source == "/proc") continue; // backwards compatibility
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
#if HAVE_EMBEDDED_SANDBOX_SHELL
if (i.second.source == "__embedded_sandbox_shell__") {
static unsigned char sh[] = {
#include "embedded-sandbox-shell.gen.hh"
};
auto dst = chrootRootDir + i.first;
createDirs(dirOf(dst));
writeFile(dst, std::string_view((const char *) sh, sizeof(sh)));
chmod_(dst, 0555);
} else
#endif
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
} }
/* Bind a new instance of procfs on /proc. */ /* Bind a new instance of procfs on /proc. */

View file

@ -154,7 +154,7 @@ void PathSubstitutionGoal::tryNext()
only after we've downloaded the path. */ only after we've downloaded the path. */
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info)) if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{ {
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'", warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
worker.store.printStorePath(storePath), sub->getUri()); worker.store.printStorePath(storePath), sub->getUri());
tryNext(); tryNext();
return; return;

View file

@ -135,6 +135,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close(); state->fdRootsSocket.close();
goto restart; goto restart;
} }
throw;
} }
} }
@ -153,6 +154,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close(); state->fdRootsSocket.close();
goto restart; goto restart;
} }
throw;
} catch (EndOfFile & e) { } catch (EndOfFile & e) {
debug("GC socket disconnected"); debug("GC socket disconnected");
state->fdRootsSocket.close(); state->fdRootsSocket.close();

View file

@ -36,7 +36,6 @@ Settings::Settings()
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR))) , nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR))) , nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
, nixUserConfFiles(getUserConfigFiles()) , nixUserConfFiles(getUserConfigFiles())
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR))) , nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR)) , nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH))) , nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
@ -67,12 +66,13 @@ Settings::Settings()
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL); sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
#endif #endif
/* chroot-like behavior from Apple's sandbox */
/* chroot-like behavior from Apple's sandbox */
#if __APPLE__ #if __APPLE__
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib"); sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh"); allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
#endif #endif
buildHook = getSelfExe().value_or("nix") + " __build-remote";
} }
void loadConfFile() void loadConfFile()

View file

@ -79,9 +79,6 @@ public:
/* A list of user configuration files to load. */ /* A list of user configuration files to load. */
std::vector<Path> nixUserConfFiles; std::vector<Path> nixUserConfFiles;
/* The directory where internal helper programs are stored. */
Path nixLibexecDir;
/* The directory where the main programs are stored. */ /* The directory where the main programs are stored. */
Path nixBinDir; Path nixBinDir;
@ -195,7 +192,7 @@ public:
)", )",
{"build-timeout"}}; {"build-timeout"}};
PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook", PathSetting buildHook{this, true, "", "build-hook",
"The path of the helper program that executes builds to remote machines."}; "The path of the helper program that executes builds to remote machines."};
Setting<std::string> builders{ Setting<std::string> builders{
@ -802,7 +799,7 @@ public:
)"}; )"};
Setting<StringSet> ignoredAcls{ Setting<StringSet> ignoredAcls{
this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls", this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
R"( R"(
A list of ACLs that should be ignored, normally Nix attempts to A list of ACLs that should be ignored, normally Nix attempts to
remove all ACLs from files and directories in the Nix store, but remove all ACLs from files and directories in the Nix store, but

View file

@ -69,6 +69,7 @@ protected:
} catch (SysError & e) { } catch (SysError & e) {
if (e.errNo == ENOENT) if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path); throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
throw;
} }
} }

View file

@ -39,14 +39,23 @@ libstore_CXXFLAGS += \
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \ -DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \ -DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \ -DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
-DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \
-DNIX_BIN_DIR=\"$(bindir)\" \ -DNIX_BIN_DIR=\"$(bindir)\" \
-DNIX_MAN_DIR=\"$(mandir)\" \ -DNIX_MAN_DIR=\"$(mandir)\" \
-DLSOF=\"$(lsof)\" -DLSOF=\"$(lsof)\"
ifeq ($(embedded_sandbox_shell),yes)
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
@mv $@.tmp $@
else
ifneq ($(sandbox_shell),) ifneq ($(sandbox_shell),)
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
endif endif
endif
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh

View file

@ -67,13 +67,26 @@ bool UserLock::findFreeUser() {
#if __linux__ #if __linux__
/* Get the list of supplementary groups of this build user. This /* Get the list of supplementary groups of this build user. This
is usually either empty or contains a group such as "kvm". */ is usually either empty or contains a group such as "kvm". */
supplementaryGIDs.resize(10); int ngroups = 32; // arbitrary initial guess
int ngroups = supplementaryGIDs.size(); supplementaryGIDs.resize(ngroups);
int err = getgrouplist(pw->pw_name, pw->pw_gid,
supplementaryGIDs.data(), &ngroups);
if (err == -1)
throw Error("failed to get list of supplementary groups for '%1%'", pw->pw_name);
int err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
&ngroups);
// Our initial size of 32 wasn't sufficient, the correct size has
// been stored in ngroups, so we try again.
if (err == -1) {
supplementaryGIDs.resize(ngroups);
err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
&ngroups);
}
// If it failed once more, then something must be broken.
if (err == -1)
throw Error("failed to get list of supplementary groups for '%1%'",
pw->pw_name);
// Finally, trim back the GID list to its real size
supplementaryGIDs.resize(ngroups); supplementaryGIDs.resize(ngroups);
#endif #endif

View file

@ -62,6 +62,9 @@ public:
/* How often to purge expired entries from the cache. */ /* How often to purge expired entries from the cache. */
const int purgeInterval = 24 * 3600; const int purgeInterval = 24 * 3600;
/* How long to cache binary cache info (i.e. /nix-cache-info) */
const int cacheInfoTtl = 7 * 24 * 3600;
struct Cache struct Cache
{ {
int id; int id;
@ -98,7 +101,7 @@ public:
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)"); "insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
state->queryCache.create(state->db, state->queryCache.create(state->db,
"select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?"); "select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?");
state->insertNAR.create(state->db, state->insertNAR.create(state->db,
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, " "insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
@ -183,7 +186,7 @@ public:
auto i = state->caches.find(uri); auto i = state->caches.find(uri);
if (i == state->caches.end()) { if (i == state->caches.end()) {
auto queryCache(state->queryCache.use()(uri)); auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl));
if (!queryCache.next()) if (!queryCache.next())
return std::nullopt; return std::nullopt;
state->caches.emplace(uri, state->caches.emplace(uri,

View file

@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (value != "unknown-deriver") if (value != "unknown-deriver")
deriver = StorePath(value); deriver = StorePath(value);
} }
else if (name == "System")
system = value;
else if (name == "Sig") else if (name == "Sig")
sigs.insert(value); sigs.insert(value);
else if (name == "CA") { else if (name == "CA") {
@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const
if (deriver) if (deriver)
res += "Deriver: " + std::string(deriver->to_string()) + "\n"; res += "Deriver: " + std::string(deriver->to_string()) + "\n";
if (!system.empty())
res += "System: " + system + "\n";
for (auto sig : sigs) for (auto sig : sigs)
res += "Sig: " + sig + "\n"; res += "Sig: " + sig + "\n";

View file

@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo
std::string compression; std::string compression;
std::optional<Hash> fileHash; std::optional<Hash> fileHash;
uint64_t fileSize = 0; uint64_t fileSize = 0;
std::string system;
NarInfo() = delete; NarInfo() = delete;
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { } NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }

View file

@ -1,7 +1,7 @@
create table if not exists ValidPaths ( create table if not exists ValidPaths (
id integer primary key autoincrement not null, id integer primary key autoincrement not null,
path text unique not null, path text unique not null,
hash text not null, hash text not null, -- base16 representation
registrationTime integer not null, registrationTime integer not null,
deriver text, deriver text,
narSize integer, narSize integer,

View file

@ -1302,7 +1302,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params}; return {uri, params};
} }
static bool isNonUriPath(const std::string & spec) { static bool isNonUriPath(const std::string & spec)
{
return return
// is not a URL // is not a URL
spec.find("://") == std::string::npos spec.find("://") == std::string::npos
@ -1319,6 +1320,26 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
return std::make_shared<LocalStore>(params); return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile)) else if (pathExists(settings.nixDaemonSocketFile))
return std::make_shared<UDSRemoteStore>(params); return std::make_shared<UDSRemoteStore>(params);
#if __linux__
else if (!pathExists(stateDir) && params.empty() && getuid() != 0) {
/* If /nix doesn't exist, there is no daemon socket, and
we're not root, then automatically set up a chroot
store in ~/.local/share/nix/root. */
auto chrootStore = getDataDir() + "/nix/root";
if (!pathExists(chrootStore)) {
try {
createDirs(chrootStore);
} catch (Error & e) {
return std::make_shared<LocalStore>(params);
}
warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
} else
debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
Store::Params params2;
params2["root"] = chrootStore;
return std::make_shared<LocalStore>(params2);
}
#endif
else else
return std::make_shared<LocalStore>(params); return std::make_shared<LocalStore>(params);
} else if (uri == "daemon") { } else if (uri == "daemon") {

View file

@ -25,6 +25,8 @@ public:
/* Return a short one-line description of the command. */ /* Return a short one-line description of the command. */
virtual std::string description() { return ""; } virtual std::string description() { return ""; }
virtual bool forceImpureByDefault() { return false; }
/* Return documentation about this command, in Markdown format. */ /* Return documentation about this command, in Markdown format. */
virtual std::string doc() { return ""; } virtual std::string doc() { return ""; }

View file

@ -13,6 +13,7 @@ std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
{ Xp::RecursiveNix, "recursive-nix" }, { Xp::RecursiveNix, "recursive-nix" },
{ Xp::NoUrlLiterals, "no-url-literals" }, { Xp::NoUrlLiterals, "no-url-literals" },
{ Xp::FetchClosure, "fetch-closure" }, { Xp::FetchClosure, "fetch-closure" },
{ Xp::ReplFlake, "repl-flake" },
}; };
const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name) const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)

View file

@ -22,6 +22,7 @@ enum struct ExperimentalFeature
RecursiveNix, RecursiveNix,
NoUrlLiterals, NoUrlLiterals,
FetchClosure, FetchClosure,
ReplFlake,
}; };
/** /**

View file

@ -8,9 +8,9 @@ std::string hiliteMatches(
std::string_view prefix, std::string_view prefix,
std::string_view postfix) std::string_view postfix)
{ {
// Avoid copy on zero matches // Avoid extra work on zero matches
if (matches.size() == 0) if (matches.size() == 0)
return (std::string) s; return std::string(s);
std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) { std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) {
return a.position() < b.position(); return a.position() < b.position();

View file

@ -29,6 +29,7 @@
#ifdef __APPLE__ #ifdef __APPLE__
#include <sys/syscall.h> #include <sys/syscall.h>
#include <mach-o/dyld.h>
#endif #endif
#ifdef __linux__ #ifdef __linux__
@ -574,6 +575,20 @@ Path getHome()
static Path homeDir = []() static Path homeDir = []()
{ {
auto homeDir = getEnv("HOME"); auto homeDir = getEnv("HOME");
if (homeDir) {
// Only use $HOME if doesn't exist or is owned by the current user.
struct stat st;
int result = stat(homeDir->c_str(), &st);
if (result != 0) {
if (errno != ENOENT) {
warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno);
homeDir.reset();
}
} else if (st.st_uid != geteuid()) {
warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file", *homeDir);
homeDir.reset();
}
}
if (!homeDir) { if (!homeDir) {
std::vector<char> buf(16384); std::vector<char> buf(16384);
struct passwd pwbuf; struct passwd pwbuf;
@ -619,6 +634,27 @@ Path getDataDir()
} }
std::optional<Path> getSelfExe()
{
static auto cached = []() -> std::optional<Path>
{
#if __linux__
return readLink("/proc/self/exe");
#elif __APPLE__
char buf[1024];
uint32_t size = sizeof(buf);
if (_NSGetExecutablePath(buf, &size) == 0)
return buf;
else
return std::nullopt;
#else
return std::nullopt;
#endif
}();
return cached;
}
Paths createDirs(const Path & path) Paths createDirs(const Path & path)
{ {
Paths created; Paths created;

View file

@ -149,10 +149,14 @@ std::vector<Path> getConfigDirs();
/* Return $XDG_DATA_HOME or $HOME/.local/share. */ /* Return $XDG_DATA_HOME or $HOME/.local/share. */
Path getDataDir(); Path getDataDir();
/* Return the path of the current executable. */
std::optional<Path> getSelfExe();
/* Create a directory and all its parents, if necessary. Returns the /* Create a directory and all its parents, if necessary. Returns the
list of created directories, in order of creation. */ list of created directories, in order of creation. */
Paths createDirs(const Path & path); Paths createDirs(const Path & path);
inline Paths createDirs(PathView path) { inline Paths createDirs(PathView path)
{
return createDirs(Path(path)); return createDirs(Path(path));
} }
@ -700,4 +704,19 @@ template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string showBytes(uint64_t bytes); std::string showBytes(uint64_t bytes);
/* Provide an addition operator between strings and string_views
inexplicably omitted from the standard library. */
inline std::string operator + (const std::string & s1, std::string_view s2)
{
auto s = s1;
s.append(s2);
return s;
}
inline std::string operator + (std::string && s, std::string_view s2)
{
s.append(s2);
return std::move(s);
}
} }

View file

@ -257,11 +257,12 @@ static void main_nix_build(int argc, char * * argv)
auto autoArgs = myArgs.getAutoArgs(*state); auto autoArgs = myArgs.getAutoArgs(*state);
auto autoArgsWithInNixShell = autoArgs;
if (runEnv) { if (runEnv) {
auto newArgs = state->buildBindings(autoArgs->size() + 1); auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1);
newArgs.alloc("inNixShell").mkBool(true); newArgs.alloc("inNixShell").mkBool(true);
for (auto & i : *autoArgs) newArgs.insert(i); for (auto & i : *autoArgs) newArgs.insert(i);
autoArgs = newArgs.finish(); autoArgsWithInNixShell = newArgs.finish();
} }
if (packages) { if (packages) {
@ -316,10 +317,39 @@ static void main_nix_build(int argc, char * * argv)
Value vRoot; Value vRoot;
state->eval(e, vRoot); state->eval(e, vRoot);
std::function<bool(const Value & v)> takesNixShellAttr;
takesNixShellAttr = [&](const Value & v) {
if (!runEnv) {
return false;
}
bool add = false;
if (v.type() == nFunction && v.lambda.fun->hasFormals()) {
for (auto & i : v.lambda.fun->formals->formals) {
if (state->symbols[i.name] == "inNixShell") {
add = true;
break;
}
}
}
return add;
};
for (auto & i : attrPaths) { for (auto & i : attrPaths) {
Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first); Value & v(*findAlongAttrPath(
*state,
i,
takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs,
vRoot
).first);
state->forceValue(v, [&]() { return v.determinePos(noPos); }); state->forceValue(v, [&]() { return v.determinePos(noPos); });
getDerivations(*state, v, "", *autoArgs, drvs, false); getDerivations(
*state,
v,
"",
takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs,
drvs,
false
);
} }
} }
@ -543,6 +573,8 @@ static void main_nix_build(int argc, char * * argv)
restoreProcessContext(); restoreProcessContext();
logger->stop();
execvp(shell->c_str(), argPtrs.data()); execvp(shell->c_str(), argPtrs.data());
throw SysError("executing shell '%s'", *shell); throw SysError("executing shell '%s'", *shell);
@ -601,6 +633,8 @@ static void main_nix_build(int argc, char * * argv)
outPaths.push_back(outputPath); outPaths.push_back(outputPath);
} }
logger->stop();
for (auto & path : outPaths) for (auto & path : outPaths)
std::cout << store->printStorePath(path) << '\n'; std::cout << store->printStorePath(path) << '\n';
} }

View file

@ -37,6 +37,7 @@ void removeOldGenerations(std::string dir)
link = readLink(path); link = readLink(path);
} catch (SysError & e) { } catch (SysError & e) {
if (e.errNo == ENOENT) continue; if (e.errNo == ENOENT) continue;
throw;
} }
if (link.find("link") != std::string::npos) { if (link.find("link") != std::string::npos) {
printInfo(format("removing old generations of profile %1%") % path); printInfo(format("removing old generations of profile %1%") % path);

View file

@ -1485,7 +1485,7 @@ static int main_nix_env(int argc, char * * argv)
if (globals.profile == "") if (globals.profile == "")
globals.profile = getDefaultProfile(); globals.profile = getDefaultProfile();
op(globals, opFlags, opArgs); op(globals, std::move(opFlags), std::move(opArgs));
globals.state->printStats(); globals.state->printStats();

View file

@ -1093,7 +1093,7 @@ static int main_nix_store(int argc, char * * argv)
if (op != opDump && op != opRestore) /* !!! hack */ if (op != opDump && op != opRestore) /* !!! hack */
store = openStore(); store = openStore();
op(opFlags, opArgs); op(std::move(opFlags), std::move(opArgs));
return 0; return 0;
} }

View file

@ -276,15 +276,25 @@ struct Common : InstallableCommand, MixProfile
const BuildEnvironment & buildEnvironment, const BuildEnvironment & buildEnvironment,
const Path & outputsDir = absPath(".") + "/outputs") const Path & outputsDir = absPath(".") + "/outputs")
{ {
// A list of colon-separated environment variables that should be
// prepended to, rather than overwritten, in order to keep the shell usable.
// Please keep this list minimal in order to avoid impurities.
static const char * const savedVars[] = {
"PATH", // for commands
"XDG_DATA_DIRS", // for loadable completion
};
std::ostringstream out; std::ostringstream out;
out << "unset shellHook\n"; out << "unset shellHook\n";
out << "nix_saved_PATH=\"$PATH\"\n"; for (auto & var : savedVars)
out << fmt("nix_saved_%s=\"$%s\"\n", var, var);
buildEnvironment.toBash(out, ignoreVars); buildEnvironment.toBash(out, ignoreVars);
out << "PATH=\"$PATH:$nix_saved_PATH\"\n"; for (auto & var : savedVars)
out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n"; out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"}) for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})

View file

@ -740,7 +740,8 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
"If you've set '%s' to a string, try using a path instead.", "If you've set '%s' to a string, try using a path instead.",
templateDir, templateDirAttr->getAttrPathStr()); templateDir, templateDirAttr->getAttrPathStr());
std::vector<Path> files; std::vector<Path> changedFiles;
std::vector<Path> conflictedFiles;
std::function<void(const Path & from, const Path & to)> copyDir; std::function<void(const Path & from, const Path & to)> copyDir;
copyDir = [&](const Path & from, const Path & to) copyDir = [&](const Path & from, const Path & to)
@ -757,31 +758,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
auto contents = readFile(from2); auto contents = readFile(from2);
if (pathExists(to2)) { if (pathExists(to2)) {
auto contents2 = readFile(to2); auto contents2 = readFile(to2);
if (contents != contents2) if (contents != contents2) {
throw Error("refusing to overwrite existing file '%s'", to2); printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
conflictedFiles.push_back(to2);
} else {
notice("skipping identical file: %s", from2);
}
continue;
} else } else
writeFile(to2, contents); writeFile(to2, contents);
} }
else if (S_ISLNK(st.st_mode)) { else if (S_ISLNK(st.st_mode)) {
auto target = readLink(from2); auto target = readLink(from2);
if (pathExists(to2)) { if (pathExists(to2)) {
if (readLink(to2) != target) if (readLink(to2) != target) {
throw Error("refusing to overwrite existing symlink '%s'", to2); printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
conflictedFiles.push_back(to2);
} else {
notice("skipping identical file: %s", from2);
}
continue;
} else } else
createSymlink(target, to2); createSymlink(target, to2);
} }
else else
throw Error("file '%s' has unsupported type", from2); throw Error("file '%s' has unsupported type", from2);
files.push_back(to2); changedFiles.push_back(to2);
notice("wrote: %s", to2); notice("wrote: %s", to2);
} }
}; };
copyDir(templateDir, flakeDir); copyDir(templateDir, flakeDir);
if (pathExists(flakeDir + "/.git")) { if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) {
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" }; Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
for (auto & s : files) args.push_back(s); for (auto & s : changedFiles) args.push_back(s);
runProgram("git", true, args); runProgram("git", true, args);
} }
auto welcomeText = cursor->maybeGetAttr("welcomeText"); auto welcomeText = cursor->maybeGetAttr("welcomeText");
@ -789,6 +800,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
notice("\n"); notice("\n");
notice(renderMarkdownToTerminal(welcomeText->getString())); notice(renderMarkdownToTerminal(welcomeText->getString()));
} }
if (!conflictedFiles.empty())
throw Error("Encountered %d conflicts - see above", conflictedFiles.size());
} }
}; };

View file

@ -30,7 +30,7 @@ convert-secret-to-public` to get the corresponding public key for
verifying signed store paths. verifying signed store paths.
The mandatory argument `--key-name` specifies a key name (such as The mandatory argument `--key-name` specifies a key name (such as
`cache.example.org-1). It is used to look up keys on the client when `cache.example.org-1`). It is used to look up keys on the client when
it verifies signatures. It can be anything, but its suggested to use it verifies signatures. It can be anything, but its suggested to use
the host name of your cache (e.g. `cache.example.org`) with a suffix the host name of your cache (e.g. `cache.example.org`) with a suffix
denoting the number of the key (to be incremented every time you need denoting the number of the key (to be incremented every time you need

View file

@ -266,6 +266,11 @@ void mainWrapped(int argc, char * * argv)
programPath = argv[0]; programPath = argv[0];
auto programName = std::string(baseNameOf(programPath)); auto programName = std::string(baseNameOf(programPath));
if (argc > 0 && std::string_view(argv[0]) == "__build-remote") {
programName = "build-remote";
argv++; argc--;
}
{ {
auto legacy = (*RegisterLegacyCommand::commands)[programName]; auto legacy = (*RegisterLegacyCommand::commands)[programName];
if (legacy) return legacy(argc, argv); if (legacy) return legacy(argc, argv);
@ -380,6 +385,9 @@ void mainWrapped(int argc, char * * argv)
settings.ttlPositiveNarInfoCache = 0; settings.ttlPositiveNarInfoCache = 0;
} }
if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
evalSettings.pureEval = false;
}
args.command->second->prepare(); args.command->second->prepare();
args.command->second->run(); args.command->second->run();
} }

View file

@ -29,7 +29,7 @@ highest precedence:
can be specified using the NixOS option `nix.registry`. can be specified using the NixOS option `nix.registry`.
* The user registry `~/.config/nix/registry.json`. This registry can * The user registry `~/.config/nix/registry.json`. This registry can
be modified by commands such as `nix flake pin`. be modified by commands such as `nix registry pin`.
* Overrides specified on the command line using the option * Overrides specified on the command line using the option
`--override-flake`. `--override-flake`.

View file

@ -24,10 +24,34 @@ R""(
* Interact with Nixpkgs in the REPL: * Interact with Nixpkgs in the REPL:
```console ```console
# nix repl '<nixpkgs>' # nix repl --file example.nix
Loading Installable ''...
Added 3 variables.
Loading '<nixpkgs>'... # nix repl --expr '{a={b=3;c=4;};}'
Added 12428 variables. Loading Installable ''...
Added 1 variables.
# nix repl --expr '{a={b=3;c=4;};}' a
Loading Installable ''...
Added 1 variables.
# nix repl --extra_experimental_features 'flakes repl-flake' nixpkgs
Loading Installable 'flake:nixpkgs#'...
Added 5 variables.
nix-repl> legacyPackages.x86_64-linux.emacs.name
"emacs-27.1"
nix-repl> legacyPackages.x86_64-linux.emacs.name
"emacs-27.1"
nix-repl> :q
# nix repl --expr 'import <nixpkgs>{}'
Loading Installable ''...
Added 12439 variables.
nix-repl> emacs.name nix-repl> emacs.name
"emacs-27.1" "emacs-27.1"

View file

@ -47,7 +47,7 @@ void runProgramInStore(ref<Store> store,
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program }; Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
for (auto & arg : args) helperArgs.push_back(arg); for (auto & arg : args) helperArgs.push_back(arg);
execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data()); execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data());
throw SysError("could not execute chroot helper"); throw SysError("could not execute chroot helper");
} }

View file

@ -18,16 +18,26 @@ using namespace nix;
std::string wrap(std::string prefix, std::string s) std::string wrap(std::string prefix, std::string s)
{ {
return prefix + s + ANSI_NORMAL; return concatStrings(prefix, s, ANSI_NORMAL);
} }
struct CmdSearch : InstallableCommand, MixJSON struct CmdSearch : InstallableCommand, MixJSON
{ {
std::vector<std::string> res; std::vector<std::string> res;
std::vector<std::string> excludeRes;
CmdSearch() CmdSearch()
{ {
expectArgs("regex", &res); expectArgs("regex", &res);
addFlag(Flag {
.longName = "exclude",
.shortName = 'e',
.description = "Hide packages whose attribute path, name or description contain *regex*.",
.labels = {"regex"},
.handler = {[this](std::string s) {
excludeRes.push_back(s);
}},
});
} }
std::string description() override std::string description() override
@ -62,11 +72,16 @@ struct CmdSearch : InstallableCommand, MixJSON
res.push_back("^"); res.push_back("^");
std::vector<std::regex> regexes; std::vector<std::regex> regexes;
std::vector<std::regex> excludeRegexes;
regexes.reserve(res.size()); regexes.reserve(res.size());
excludeRegexes.reserve(excludeRes.size());
for (auto & re : res) for (auto & re : res)
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase)); regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
for (auto & re : excludeRes)
excludeRegexes.emplace_back(re, std::regex::extended | std::regex::icase);
auto state = getEvalState(); auto state = getEvalState();
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr; auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
@ -106,6 +121,14 @@ struct CmdSearch : InstallableCommand, MixJSON
std::vector<std::smatch> nameMatches; std::vector<std::smatch> nameMatches;
bool found = false; bool found = false;
for (auto & regex : excludeRegexes) {
if (
std::regex_search(attrPath2, regex)
|| std::regex_search(name.name, regex)
|| std::regex_search(description, regex))
return;
}
for (auto & regex : regexes) { for (auto & regex : regexes) {
found = false; found = false;
auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) { auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) {
@ -133,15 +156,15 @@ struct CmdSearch : InstallableCommand, MixJSON
jsonElem.attr("version", name.version); jsonElem.attr("version", name.version);
jsonElem.attr("description", description); jsonElem.attr("description", description);
} else { } else {
auto name2 = hiliteMatches(name.name, std::move(nameMatches), ANSI_GREEN, "\e[0;2m"); auto name2 = hiliteMatches(name.name, nameMatches, ANSI_GREEN, "\e[0;2m");
if (results > 1) logger->cout(""); if (results > 1) logger->cout("");
logger->cout( logger->cout(
"* %s%s", "* %s%s",
wrap("\e[0;1m", hiliteMatches(attrPath2, std::move(attrPathMatches), ANSI_GREEN, "\e[0;1m")), wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")),
name.version != "" ? " (" + name.version + ")" : ""); name.version != "" ? " (" + name.version + ")" : "");
if (description != "") if (description != "")
logger->cout( logger->cout(
" %s", hiliteMatches(description, std::move(descriptionMatches), ANSI_GREEN, ANSI_NORMAL)); " %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL));
} }
} }
} }

View file

@ -43,12 +43,23 @@ R""(
# nix search nixpkgs 'firefox|chromium' # nix search nixpkgs 'firefox|chromium'
``` ```
* Search for packages containing `git'`and either `frontend` or `gui`: * Search for packages containing `git` and either `frontend` or `gui`:
```console ```console
# nix search nixpkgs git 'frontend|gui' # nix search nixpkgs git 'frontend|gui'
``` ```
* Search for packages containing `neovim` but hide ones containing either `gui` or `python`:
```console
# nix search nixpkgs neovim -e 'python|gui'
```
or
```console
# nix search nixpkgs neovim -e 'python' -e 'gui'
```
# Description # Description
`nix search` searches *installable* (which must be evaluatable, e.g. a `nix search` searches *installable* (which must be evaluatable, e.g. a

View file

@ -34,7 +34,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
std::string description() override std::string description() override
{ {
return "upgrade Nix to the latest stable version"; return "upgrade Nix to the stable version declared in Nixpkgs";
} }
std::string doc() override std::string doc() override

View file

@ -2,7 +2,7 @@ R""(
# Examples # Examples
* Upgrade Nix to the latest stable version: * Upgrade Nix to the stable version declared in Nixpkgs:
```console ```console
# nix upgrade-nix # nix upgrade-nix
@ -16,8 +16,11 @@ R""(
# Description # Description
This command upgrades Nix to the latest version. By default, it This command upgrades Nix to the stable version declared in Nixpkgs.
locates the directory containing the `nix` binary in the `$PATH` This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
and updated manually. It may not always be the latest tagged release.
By default, it locates the directory containing the `nix` binary in the `$PATH`
environment variable. If that directory is a Nix profile, it will environment variable. If that directory is a Nix profile, it will
upgrade the `nix` package in that profile to the latest stable binary upgrade the `nix` package in that profile to the latest stable binary
release. release.

View file

@ -1 +1 @@
{ ... }@args: import ./shell.nix (args // { contentAddressed = true; }) { inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })

View file

@ -75,7 +75,7 @@ rec {
buildCommand = '' buildCommand = ''
mkdir -p $out/bin mkdir -p $out/bin
echo ${rootCA} # Just to make it depend on it echo ${rootCA} # Just to make it depend on it
echo "" > $out/bin/${name} echo "#! ${shell}" > $out/bin/${name}
chmod +x $out/bin/${name} chmod +x $out/bin/${name}
''; '';
}; };

View file

@ -50,6 +50,8 @@ export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@ export version=@PACKAGE_VERSION@
export system=@system@ export system=@system@
export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
export IMPURE_VAR1=foo export IMPURE_VAR1=foo
export IMPURE_VAR2=bar export IMPURE_VAR2=bar

View file

@ -58,7 +58,7 @@ EOF
nix eval --file - <<EOF nix eval --file - <<EOF
with (builtins.fromJSON (builtins.readFile ./flake.lock)); with (builtins.fromJSON (builtins.readFile ./flake.lock));
# Url inputs whose extension doesnt match a know archive format should # Url inputs whose extension doesnt match a known archive format should
# not be unpacked by default # not be unpacked by default
assert (nodes.no_ext_default_no_unpack.locked.type == "file"); assert (nodes.no_ext_default_no_unpack.locked.type == "file");
assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false); assert (nodes.no_ext_default_no_unpack.locked.unpack or false == false);

View file

@ -32,7 +32,7 @@ for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeD
rm -rf $repo $repo.tmp rm -rf $repo $repo.tmp
mkdir -p $repo mkdir -p $repo
# Give one repo a non-master initial branch. # Give one repo a non-main initial branch.
extraArgs= extraArgs=
if [[ $repo == $flake2Dir ]]; then if [[ $repo == $flake2Dir ]]; then
extraArgs="--initial-branch=main" extraArgs="--initial-branch=main"
@ -173,11 +173,11 @@ nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes' nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
[[ -e $flake2Dir/flake.lock ]] [[ -e $flake2Dir/flake.lock ]]
[[ -z $(git -C $flake2Dir diff master) ]] [[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Rerunning the build should not change the lockfile. # Rerunning the build should not change the lockfile.
nix build -o $TEST_ROOT/result $flake2Dir#bar nix build -o $TEST_ROOT/result $flake2Dir#bar
[[ -z $(git -C $flake2Dir diff master) ]] [[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Building with a lockfile should not require a fetch of the registry. # Building with a lockfile should not require a fetch of the registry.
nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh
@ -186,7 +186,7 @@ nix build -o $TEST_ROOT/result --no-use-registries $flake2Dir#bar --refresh
# Updating the flake should not change the lockfile. # Updating the flake should not change the lockfile.
nix flake lock $flake2Dir nix flake lock $flake2Dir
[[ -z $(git -C $flake2Dir diff master) ]] [[ -z $(git -C $flake2Dir diff main || echo failed) ]]
# Now we should be able to build the flake in pure mode. # Now we should be able to build the flake in pure mode.
nix build -o $TEST_ROOT/result flake2#bar nix build -o $TEST_ROOT/result flake2#bar
@ -221,7 +221,7 @@ nix build -o $TEST_ROOT/result $flake3Dir#"sth sth"
nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth" nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth"
# Check whether it saved the lockfile # Check whether it saved the lockfile
(! [[ -z $(git -C $flake3Dir diff master) ]]) [[ -n $(git -C $flake3Dir diff master) ]]
git -C $flake3Dir add flake.lock git -C $flake3Dir add flake.lock
@ -321,10 +321,10 @@ nix build -o $TEST_ROOT/result flake4#xyzzy
# Test 'nix flake update' and --override-flake. # Test 'nix flake update' and --override-flake.
nix flake lock $flake3Dir nix flake lock $flake3Dir
[[ -z $(git -C $flake3Dir diff master) ]] [[ -z $(git -C $flake3Dir diff master || echo failed) ]]
nix flake update $flake3Dir --override-flake flake2 nixpkgs nix flake update $flake3Dir --override-flake flake2 nixpkgs
[[ ! -z $(git -C $flake3Dir diff master) ]] [[ ! -z $(git -C $flake3Dir diff master || echo failed) ]]
# Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore # Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore
git -C $flake3Dir checkout -b removeXyzzy git -C $flake3Dir checkout -b removeXyzzy
@ -408,8 +408,10 @@ cat > $templatesDir/trivial/flake.nix <<EOF
}; };
} }
EOF EOF
echo a > $templatesDir/trivial/a
echo b > $templatesDir/trivial/b
git -C $templatesDir add flake.nix trivial/flake.nix git -C $templatesDir add flake.nix trivial/
git -C $templatesDir commit -m 'Initial' git -C $templatesDir commit -m 'Initial'
nix flake check templates nix flake check templates
@ -424,6 +426,18 @@ nix flake show $flake7Dir
nix flake show $flake7Dir --json | jq nix flake show $flake7Dir --json | jq
git -C $flake7Dir commit -a -m 'Initial' git -C $flake7Dir commit -a -m 'Initial'
# Test 'nix flake init' with benign conflicts
rm -rf $flake7Dir && mkdir $flake7Dir && git -C $flake7Dir init
echo a > $flake7Dir/a
(cd $flake7Dir && nix flake init) # check idempotence
# Test 'nix flake init' with conflicts
rm -rf $flake7Dir && mkdir $flake7Dir && git -C $flake7Dir init
echo b > $flake7Dir/a
pushd $flake7Dir
(! nix flake init) |& grep "refusing to overwrite existing file '$flake7Dir/a'"
popd
# Test 'nix flake new'. # Test 'nix flake new'.
rm -rf $flake6Dir rm -rf $flake6Dir
nix flake new -t templates#trivial $flake6Dir nix flake new -t templates#trivial $flake6Dir

View file

@ -18,7 +18,12 @@ cat << EOF > flake.nix
with import ./config.nix; with import ./config.nix;
mkDerivation { mkDerivation {
name = "formatter"; name = "formatter";
buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter"; buildCommand = ''
mkdir -p \$out/bin
echo "#! ${shell}" > \$out/bin/formatter
cat \${./fmt.simple.sh} >> \$out/bin/formatter
chmod +x \$out/bin/formatter
'';
}; };
}; };
} }

View file

@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
let let
# Generate a fake root CA and a fake github.com certificate. # Generate a fake root CA and a fake api.github.com / channels.nixos.org certificate.
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; } cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
'' ''
mkdir -p $out mkdir -p $out
@ -18,7 +18,7 @@ let
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \ openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr -subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \ openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:channels.nixos.org") \
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt -days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
''; '';
@ -67,7 +67,7 @@ makeTest (
name = "github-flakes"; name = "github-flakes";
nodes = nodes =
{ # Impersonate github.com and api.github.com. {
github = github =
{ config, pkgs, ... }: { config, pkgs, ... }:
{ networking.firewall.allowedTCPPorts = [ 80 443 ]; { networking.firewall.allowedTCPPorts = [ 80 443 ];
@ -77,12 +77,12 @@ makeTest (
services.httpd.extraConfig = '' services.httpd.extraConfig = ''
ErrorLog syslog:local6 ErrorLog syslog:local6
''; '';
services.httpd.virtualHosts."github.com" = services.httpd.virtualHosts."channels.nixos.org" =
{ forceSSL = true; { forceSSL = true;
sslServerKey = "${cert}/server.key"; sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt"; sslServerCert = "${cert}/server.crt";
servedDirs = servedDirs =
[ { urlPath = "/NixOS/flake-registry/raw/master"; [ { urlPath = "/";
dir = registry; dir = registry;
} }
]; ];
@ -103,13 +103,13 @@ makeTest (
{ config, lib, pkgs, nodes, ... }: { config, lib, pkgs, nodes, ... }:
{ virtualisation.writableStore = true; { virtualisation.writableStore = true;
virtualisation.diskSize = 2048; virtualisation.diskSize = 2048;
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ]; virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096; virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ]; nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = "experimental-features = nix-command flakes"; nix.extraOptions = "experimental-features = nix-command flakes";
environment.systemPackages = [ pkgs.jq ]; environment.systemPackages = [ pkgs.jq ];
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} = networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
[ "github.com" "api.github.com" "raw.githubusercontent.com" ]; [ "channels.nixos.org" "api.github.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ];
}; };
}; };
@ -123,7 +123,7 @@ makeTest (
github.wait_for_unit("httpd.service") github.wait_for_unit("httpd.service")
client.succeed("curl -v https://github.com/ >&2") client.succeed("curl -v https://api.github.com/ >&2")
client.succeed("nix registry list | grep nixpkgs") client.succeed("nix registry list | grep nixpkgs")
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision") rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")

View file

@ -5,6 +5,8 @@ export NIX_REMOTE=dummy://
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello) (! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello

View file

@ -114,4 +114,8 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT) test-deps += tests/common.sh tests/config.nix tests/ca/config.nix
ifeq ($(BUILD_SHARED_LIBS), 1)
test-deps += tests/plugins/libplugintest.$(SO_EXT)
endif

View file

@ -14,7 +14,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ client = { client =
{ config, lib, pkgs, ... }: { config, lib, pkgs, ... }:
{ virtualisation.writableStore = true; { virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgA pkgD.drvPath ]; virtualisation.additionalPaths = [ pkgA pkgD.drvPath ];
nix.binaryCaches = lib.mkForce [ ]; nix.binaryCaches = lib.mkForce [ ];
}; };
@ -22,7 +22,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ config, pkgs, ... }: { config, pkgs, ... }:
{ services.openssh.enable = true; { services.openssh.enable = true;
virtualisation.writableStore = true; virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgB pkgC ]; virtualisation.additionalPaths = [ pkgB pkgC ];
}; };
}; };

View file

@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv)
[[ ${arr2[1]} = $'\n' ]] [[ ${arr2[1]} = $'\n' ]]
[[ ${arr2[2]} = $'x\ny' ]] [[ ${arr2[2]} = $'x\ny' ]]
[[ $(fun) = blabla ]] [[ $(fun) = blabla ]]
# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
{ system ? "x86_64-linux", ... }@args:
assert (!(args ? inNixShell));
(import $shellDotNix { }).shellDrv
EOF
nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true"

View file

@ -5,6 +5,42 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ]; extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
}; };
let
nix-fetch = pkgs.writeText "fetch.nix" ''
derivation {
# This derivation is an copy from what is available over at
# nix.git:corepkgs/fetchurl.nix
builder = "builtin:fetchurl";
# We're going to fetch data from the http_dns instance created before
# we expect the content to be the same as the content available there.
# ```
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
# ```
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
outputHashAlgo = "sha256";
outputHashMode = "flat";
name = "example.com";
url = "http://example.com";
unpack = false;
executable = false;
system = "builtin";
preferLocalBuild = true;
impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
];
urls = [ "http://example.com" ];
}
'';
in
makeTest ( makeTest (
rec { rec {
@ -68,40 +104,6 @@ rec {
}; };
}; };
nix-fetch = pkgs.writeText "fetch.nix" ''
derivation {
# This derivation is an copy from what is available over at
# nix.git:corepkgs/fetchurl.nix
builder = "builtin:fetchurl";
# We're going to fetch data from the http_dns instance created before
# we expect the content to be the same as the content available there.
# ```
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
# ```
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
outputHashAlgo = "sha256";
outputHashMode = "flat";
name = "example.com";
url = "http://example.com";
unpack = false;
executable = false;
system = "builtin";
preferLocalBuild = true;
impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
];
urls = [ "http://example.com" ];
}
'';
testScript = { nodes, ... }: '' testScript = { nodes, ... }: ''
http_dns.wait_for_unit("nginx") http_dns.wait_for_unit("nginx")
http_dns.wait_for_open_port(80) http_dns.wait_for_open_port(80)

View file

@ -2,6 +2,11 @@ source common.sh
set -o pipefail set -o pipefail
if [[ $BUILD_SHARED_LIBS != 1 ]]; then
echo "plugins are not supported"
exit 99
fi
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull) res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
[ "$res"x = "nullx" ] [ "$res"x = "nullx" ]

View file

@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + "
rm -rf $TEST_ROOT/eval-out rm -rf $TEST_ROOT/eval-out
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }') (! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
(! nix eval --expr '~/foo')

View file

@ -61,7 +61,7 @@ in
} }
]; ];
virtualisation.writableStore = true; virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ config.system.build.extraUtils ]; virtualisation.additionalPaths = [ config.system.build.extraUtils ];
nix.binaryCaches = lib.mkForce [ ]; nix.binaryCaches = lib.mkForce [ ];
programs.ssh.extraConfig = "ConnectTimeout 30"; programs.ssh.extraConfig = "ConnectTimeout 30";
}; };

View file

@ -42,6 +42,11 @@ testRepl () {
echo "$replOutput" echo "$replOutput"
echo "$replOutput" | grep -qs "while evaluating the file" \ echo "$replOutput" | grep -qs "while evaluating the file" \
|| fail "nix repl --show-trace doesn't show the trace" || fail "nix repl --show-trace doesn't show the trace"
nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
| grep "attribute 'currentSystem' missing"
nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \
| grep "$(nix-instantiate --eval -E 'builtins.currentSystem')"
} }
# Simple test, try building a drv # Simple test, try building a drv
@ -50,15 +55,17 @@ testRepl
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR" testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
testReplResponse () { testReplResponse () {
local response="$(nix repl <<< "$1")" local commands="$1"; shift
echo "$response" | grep -qs "$2" \ local expectedResponse="$1"; shift
local response="$(nix repl "$@" <<< "$commands")"
echo "$response" | grep -qs "$expectedResponse" \
|| fail "repl command set: || fail "repl command set:
$1 $commands
does not respond with: does not respond with:
$2 $expectedResponse
but with: but with:
@ -71,3 +78,48 @@ testReplResponse '
:a { a = "2"; } :a { a = "2"; }
"result: ${a}" "result: ${a}"
' "result: 2" ' "result: 2"
testReplResponse '
drvPath
' '".*-simple.drv"' \
$testDir/simple.nix
testReplResponse '
drvPath
' '".*-simple.drv"' \
--file $testDir/simple.nix --experimental-features 'ca-derivations'
testReplResponse '
drvPath
' '".*-simple.drv"' \
--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations'
mkdir -p flake && cat <<EOF > flake/flake.nix
{
outputs = { self }: {
foo = 1;
bar.baz = 2;
changingThing = "beforeChange";
};
}
EOF
testReplResponse '
foo + baz
' "3" \
./flake ./flake\#bar --experimental-features 'flakes repl-flake'
# Test the `:reload` mechansim with flakes:
# - Eval `./flake#changingThing`
# - Modify the flake
# - Re-eval it
# - Check that the result has changed
replResult=$( (
echo "changingThing"
sleep 1 # Leave the repl the time to eval 'foo'
sed -i 's/beforeChange/afterChange/' flake/flake.nix
echo ":reload"
echo "changingThing"
) | nix repl ./flake --experimental-features 'flakes repl-flake')
echo "$replResult" | grep -qs beforeChange
echo "$replResult" | grep -qs afterChange

View file

@ -28,11 +28,19 @@ nix search -f search.nix '' |grep -q hello
e=$'\x1b' # grep doesn't support \e, \033 or even \x1b e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
# Multiple overlapping regexes # Multiple overlapping regexes
(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep "$e\[32;1mfoo$e\\[0;1m" | wc -l) == 1 )) (( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep -c "$e\[32;1mfoo$e\\[0;1m") == 1 ))
(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep "$e\[32;1mbroken bar$e\\[0m" | wc -l) == 1 )) (( $(nix search -f search.nix '' 'broken b' 'en bar' | grep -c "$e\[32;1mbroken bar$e\\[0m") == 1 ))
# Multiple matches # Multiple matches
# Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello # Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello
(( $(nix search -f search.nix '' 'o' | grep -Eo "$e\[32;1mo{1,2}$e\[(0|0;1)m" | wc -l) == 3 )) (( $(nix search -f search.nix '' 'o' | grep -Eoc "$e\[32;1mo{1,2}$e\[(0|0;1)m") == 3 ))
# Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar' # Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar'
# NOTE: This does not work with `grep -c` because it counts the two 'b's in 'broken bar' as one matched line
(( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 )) (( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 ))
## Tests for --exclude
(( $(nix search -f search.nix -e hello | grep -c hello) == 0 ))
(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 ))
(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 ))
[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]]

View file

@ -10,12 +10,12 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
makeTest { makeTest {
name = "setuid"; name = "setuid";
machine = nodes.machine =
{ config, lib, pkgs, ... }: { config, lib, pkgs, ... }:
{ virtualisation.writableStore = true; { virtualisation.writableStore = true;
nix.binaryCaches = lib.mkForce [ ]; nix.binaryCaches = lib.mkForce [ ];
nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ]; nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ];
virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ]; virtualisation.additionalPaths = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
}; };
testScript = { nodes }: '' testScript = { nodes }: ''

View file

@ -59,7 +59,7 @@ let
echo 'ref: refs/heads/master' > $out/HEAD echo 'ref: refs/heads/master' > $out/HEAD
mkdir -p $out/info mkdir -p $out/info
echo -e '${nixpkgs.rev}\trefs/heads/master' > $out/info/refs echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs
''; '';
in in
@ -106,7 +106,7 @@ makeTest (
{ {
virtualisation.writableStore = true; virtualisation.writableStore = true;
virtualisation.diskSize = 2048; virtualisation.diskSize = 2048;
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ]; virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096; virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ]; nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = '' nix.extraOptions = ''
@ -132,6 +132,17 @@ makeTest (
client.succeed("curl -v https://git.sr.ht/ >&2") client.succeed("curl -v https://git.sr.ht/ >&2")
client.succeed("nix registry list | grep nixpkgs") client.succeed("nix registry list | grep nixpkgs")
# Test that it resolves HEAD
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Test that it resolves branches
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Test that it resolves tags
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Registry and pinning test
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision") rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch" assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"