Merge branch 'master' into parallel-nix-copy

This commit is contained in:
Théophane Hufschmitt 2022-07-20 10:05:34 +02:00 committed by GitHub
commit 7ed91d6c6a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
98 changed files with 1538 additions and 741 deletions

9
.github/stale.yml vendored
View file

@ -1,10 +1,9 @@
# Configuration for probot-stale - https://github.com/probot/stale
daysUntilStale: 180
daysUntilClose: 365
daysUntilClose: false
exemptLabels:
- "critical"
- "never-stale"
staleLabel: "stale"
markComment: |
I marked this as stale due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
closeComment: |
I closed this issue due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
markComment: false
closeComment: false

View file

@ -4,6 +4,8 @@ on:
pull_request:
push:
permissions: read-all
jobs:
tests:
@ -28,6 +30,8 @@ jobs:
- run: nix --experimental-features 'nix-command flakes' flake check -L
check_cachix:
permissions:
contents: none
name: Cachix secret present for installer tests
runs-on: ubuntu-latest
outputs:
@ -88,7 +92,7 @@ jobs:
fetch-depth: 0
- uses: cachix/install-nix-action@v17
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
if: needs.check_cachix.outputs.secret == 'true'
with:

View file

@ -1,8 +1,12 @@
name: Hydra status
permissions: read-all
on:
schedule:
- cron: "12,42 * * * *"
workflow_dispatch:
jobs:
check_hydra_status:
name: Check Hydra status

View file

@ -1,4 +1,3 @@
HOST_OS = @host_os@
AR = @AR@
BDW_GC_LIBS = @BDW_GC_LIBS@
BOOST_LDFLAGS = @BOOST_LDFLAGS@
@ -13,13 +12,14 @@ ENABLE_S3 = @ENABLE_S3@
GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@
HOST_OS = @host_os@
LDFLAGS = @LDFLAGS@
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
LIBCURL_LIBS = @LIBCURL_LIBS@
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
LOWDOWN_LIBS = @LOWDOWN_LIBS@
OPENSSL_LIBS = @OPENSSL_LIBS@
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_VERSION = @PACKAGE_VERSION@
SHELL = @bash@
@ -31,6 +31,7 @@ datadir = @datadir@
datarootdir = @datarootdir@
doc_generate = @doc_generate@
docdir = @docdir@
embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@
includedir = @includedir@
libdir = @libdir@

View file

@ -320,6 +320,14 @@ if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
fi
fi
AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
AC_SUBST(embedded_sandbox_shell)
if test "$embedded_sandbox_shell" = yes; then
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
fi
# Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix
test "$exec_prefix" = NONE && exec_prefix='${prefix}'

View file

@ -72,6 +72,7 @@
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)

View file

@ -12,6 +12,12 @@
[`--dry-run`]
[{`--out-link` | `-o`} *outlink*]
# Disambiguation
This man page describes the command `nix-build`, which is distinct from `nix
build`. For documentation on the latter, run `nix build --help` or see `man
nix3-build`.
# Description
The `nix-build` command builds the derivations described by the Nix

View file

@ -31,7 +31,7 @@ subcommand to be performed. These are documented below.
Several commands, such as `nix-env -q` and `nix-env -i`, take a list of
arguments that specify the packages on which to operate. These are
extended regular expressions that must match the entire name of the
package. (For details on regular expressions, see regex7.) The match is
package. (For details on regular expressions, see **regex**(7).) The match is
case-sensitive. The regular expression can optionally be followed by a
dash and a version number; if omitted, any version of the package will
match. Here are some examples:
@ -412,7 +412,7 @@ The upgrade operation determines whether a derivation `y` is an upgrade
of a derivation `x` by looking at their respective `name` attributes.
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
(`gcc`), and the version (`3.3.1`). The version part starts after the
first dash not followed by a letter. `x` is considered an upgrade of `y`
first dash not followed by a letter. `y` is considered an upgrade of `x`
if their package names match, and the version of `y` is higher than that
of `x`.

View file

@ -15,6 +15,12 @@
[`--keep` *name*]
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
# Disambiguation
This man page describes the command `nix-shell`, which is distinct from `nix
shell`. For documentation on the latter, run `nix shell --help` or see `man
nix3-shell`.
# Description
The command `nix-shell` will build the dependencies of the specified

View file

@ -0,0 +1,31 @@
# Release 2.10 (2022-07-11)
* `nix repl` now takes installables on the command line, unifying the usage
with other commands that use `--file` and `--expr`. Primary breaking change
is for the common usage of `nix repl '<nixpkgs>'` which can be recovered with
`nix repl --file '<nixpkgs>'` or `nix repl --expr 'import <nixpkgs>{}'`.
This is currently guarded by the `repl-flake` experimental feature.
* A new function `builtins.traceVerbose` is available. It is similar
to `builtins.trace` if the `trace-verbose` setting is set to true,
and it is a no-op otherwise.
* `nix search` has a new flag `--exclude` to filter out packages.
* On Linux, if `/nix` doesn't exist and cannot be created and you're
not running as root, Nix will automatically use
`~/.local/share/nix/root` as a chroot store. This enables non-root
users to download the statically linked Nix binary and have it work
out of the box, e.g.
```
# ~/nix run nixpkgs#hello
warning: '/nix' does not exists, so Nix will use '/home/ubuntu/.local/share/nix/root' as a chroot store
Hello, world!
```
* `flake-registry.json` is now fetched from `channels.nixos.org`.
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
LTO is currently only supported when building with GCC.

View file

@ -1,8 +1,5 @@
# Release X.Y (202?-??-??)
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
LTO is currently only supported when building with GCC.
* `nix copy` now copies the store paths in parallel as much as possible (again).
This doesn't apply for the `daemon` and `ssh-ng` stores which copy everything
in one batch to avoid latencies issues.
in one batch to avoid latencies issues.

View file

@ -4,6 +4,8 @@
, tag ? "latest"
, channelName ? "nixpkgs"
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
, extraPkgs ? []
, maxLayers ? 100
}:
let
defaultPkgs = with pkgs; [
@ -23,7 +25,7 @@ let
iana-etc
git
openssh
];
] ++ extraPkgs;
users = {
@ -229,7 +231,7 @@ let
in
pkgs.dockerTools.buildLayeredImageWithNixDb {
inherit name tag;
inherit name tag maxLayers;
contents = [ baseSystem ];

View file

@ -18,16 +18,16 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1645296114,
"narHash": "sha256-y53N7TyIkXsjMpOG7RhvqJFGDacLs9HlyHeSTBioqYU=",
"lastModified": 1657693803,
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "530a53dcbc9437363471167a5e4762c5fcfa34a1",
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-21.05-small",
"ref": "nixos-22.05-small",
"repo": "nixpkgs",
"type": "github"
}

128
flake.nix
View file

@ -1,7 +1,7 @@
{
description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-21.05-small";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
@ -36,7 +36,7 @@
)
);
forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
forAllStdenvs = f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
# Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor =
@ -54,7 +54,7 @@
# we want most of the time and for backwards compatibility
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
commonDeps = pkgs: with pkgs; rec {
commonDeps = { pkgs, isStatic ? false }: with pkgs; rec {
# Use "busybox-sandbox-shell" if present,
# if not (legacy) fallback and hope it's sufficient.
sh = pkgs.busybox-sandbox-shell or (busybox.override {
@ -85,10 +85,11 @@
lib.optionals stdenv.isLinux [
"--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
]
++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
"LDFLAGS=-fuse-ld=gold"
];
nativeBuildDeps =
[
buildPackages.bison
@ -107,7 +108,7 @@
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps =
[ curl
[ (curl.override { patchNetrcRegression = true; })
bzip2 xz brotli editline
openssl sqlite
libarchive
@ -171,7 +172,7 @@
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
NIX_DAEMON_PACKAGE = daemon;
NIX_CLIENT_PACKAGE = client;
name =
@ -282,7 +283,7 @@
# Forward from the previous stage as we dont want it to pick the lowdown override
nixUnstable = prev.nixUnstable;
nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
nix = with final; with commonDeps { inherit pkgs; }; currentStdenv.mkDerivation {
name = "nix-${version}";
inherit version;
@ -314,6 +315,7 @@
for LIB in $out/lib/*.dylib; do
chmod u+w $LIB
install_name_tool -id $LIB $LIB
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
done
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
''}
@ -361,7 +363,7 @@
buildInputs =
[ nix
curl
(curl.override { patchNetrcRegression = true; })
bzip2
xz
pkgs.perl
@ -370,10 +372,10 @@
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
configureFlags = ''
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
'';
configureFlags = [
"--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}"
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}"
];
enableParallelBuilding = true;
@ -405,7 +407,7 @@
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
overlay = overlayFor (p: p.stdenv);
overlays.default = overlayFor (p: p.stdenv);
hydraJobs = {
@ -430,7 +432,7 @@
value = let
nixpkgsCross = import nixpkgs {
inherit system crossSystem;
overlays = [ self.overlay ];
overlays = [ self.overlays.default ];
};
in binaryTarball nixpkgsFor.${system} self.packages.${system}."nix-${crossSystem}" nixpkgsCross;
}) crossSystems));
@ -448,7 +450,7 @@
# Line coverage analysis.
coverage =
with nixpkgsFor.x86_64-linux;
with commonDeps pkgs;
with commonDeps { inherit pkgs; };
releaseTools.coverageAnalysis {
name = "nix-coverage-${version}";
@ -476,31 +478,31 @@
tests.remoteBuilds = import ./tests/remote-builds.nix {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
overlay = self.overlays.default;
};
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
overlay = self.overlays.default;
};
tests.nssPreload = (import ./tests/nss-preload.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
overlay = self.overlays.default;
});
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
overlay = self.overlays.default;
});
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
overlay = self.overlays.default;
});
tests.setuid = nixpkgs.lib.genAttrs
@ -508,7 +510,7 @@
(system:
import ./tests/setuid.nix rec {
inherit nixpkgs system;
inherit (self) overlay;
overlay = self.overlays.default;
});
# Make sure that nix-env still produces the exact same result
@ -553,12 +555,13 @@
dockerImage = self.hydraJobs.dockerImage.${system};
});
packages = forAllSystems (system: {
packages = forAllSystems (system: rec {
inherit (nixpkgsFor.${system}) nix;
default = nix;
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems) {
nix-static = let
nixpkgs = nixpkgsFor.${system}.pkgsStatic;
in with commonDeps nixpkgs; nixpkgs.stdenv.mkDerivation {
in with commonDeps { pkgs = nixpkgs; isStatic = true; }; nixpkgs.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@ -570,14 +573,24 @@
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps;
configureFlags = [ "--sysconfdir=/etc" ];
# Work around pkgsStatic disabling all tests.
# Remove in NixOS 22.11, see https://github.com/NixOS/nixpkgs/pull/140271.
preHook =
''
doCheck=1
doInstallCheck=1
'';
configureFlags =
configureFlags ++
[ "--sysconfdir=/etc"
"--enable-embedded-sandbox-shell"
];
enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d";
doCheck = true;
installFlags = "sysconfdir=$(out)/etc";
postInstall = ''
@ -587,7 +600,6 @@
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'';
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
stripAllList = ["bin"];
@ -596,6 +608,7 @@
hardeningDisable = [ "pie" ];
};
dockerImage =
let
pkgs = nixpkgsFor.${system};
@ -610,14 +623,16 @@
ln -s ${image} $image
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
'';
} // builtins.listToAttrs (map (crossSystem: {
}
// builtins.listToAttrs (map (crossSystem: {
name = "nix-${crossSystem}";
value = let
nixpkgsCross = import nixpkgs {
inherit system crossSystem;
overlays = [ self.overlay ];
overlays = [ self.overlays.default ];
};
in with commonDeps nixpkgsCross; nixpkgsCross.stdenv.mkDerivation {
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
name = "nix-${version}";
src = self;
@ -649,44 +664,45 @@
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
};
}) crossSystems)) // (builtins.listToAttrs (map (stdenvName:
}) (if system == "x86_64-linux" then crossSystems else [])))
// (builtins.listToAttrs (map (stdenvName:
nixpkgsFor.${system}.lib.nameValuePair
"nix-${stdenvName}"
nixpkgsFor.${system}."${stdenvName}Packages".nix
) stdenvs)));
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
devShells = forAllSystems (system:
forAllStdenvs (stdenv:
with nixpkgsFor.${system};
with commonDeps { inherit pkgs; };
nixpkgsFor.${system}.${stdenv}.mkDerivation {
name = "nix";
devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages);
outputs = [ "out" "dev" "doc" ];
devShells = forAllSystemsAndStdenvs (system: stdenv:
with nixpkgsFor.${system};
with commonDeps pkgs;
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
nixpkgsFor.${system}.${stdenv}.mkDerivation {
name = "nix";
inherit configureFlags;
outputs = [ "out" "dev" "doc" ];
enableParallelBuilding = true;
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps ++ awsDeps;
installFlags = "sysconfdir=$(out)/etc";
inherit configureFlags;
shellHook =
''
PATH=$prefix/bin:$PATH
unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH
enableParallelBuilding = true;
installFlags = "sysconfdir=$(out)/etc";
shellHook =
''
PATH=$prefix/bin:$PATH
unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH
# Make bash completion work.
XDG_DATA_DIRS+=:$out/share
'';
});
# Make bash completion work.
XDG_DATA_DIRS+=:$out/share
'';
}
)
// { default = self.devShells.${system}.stdenv; }
);
};
}

View file

@ -125,7 +125,7 @@ define build-library
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
+$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
+$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$^
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)

View file

@ -638,6 +638,17 @@ place_channel_configuration() {
fi
}
check_selinux() {
if command -v getenforce > /dev/null 2>&1; then
if ! [ "$(getenforce)" = "Disabled" ]; then
failure <<EOF
Nix does not work with selinux enabled yet!
see https://github.com/NixOS/nix/issues/2374
EOF
fi
fi
}
welcome_to_nix() {
ok "Welcome to the Multi-User Nix Installation"
@ -866,6 +877,8 @@ when I need to.
EOF
fi
check_selinux
if [ "$(uname -s)" = "Darwin" ]; then
# shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"

View file

@ -148,7 +148,9 @@ if ! [ -w "$dest" ]; then
exit 1
fi
mkdir -p "$dest/store"
# The auto-chroot code in openFromNonUri() checks for the
# non-existence of /nix/var/nix, so we need to create it here.
mkdir -p "$dest/store" "$dest/var/nix"
printf "copying Nix to %s..." "${dest}/store" >&2
# Insert a newline if no progress is shown.

View file

@ -120,7 +120,7 @@ ref<EvalState> EvalCommand::getEvalState()
;
if (startReplOnEvalErrors) {
evalState->debugRepl = &runRepl;
evalState->debugRepl = &runRepl;
};
}
return ref<EvalState>(evalState);

View file

@ -58,6 +58,7 @@ struct CopyCommand : virtual StoreCommand
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
bool startReplOnEvalErrors = false;
bool ignoreExceptionsDuringTry = false;
EvalCommand();
@ -77,10 +78,16 @@ struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
std::optional<std::string> needsFlakeInputCompletion = {};
MixFlakeOptions();
virtual std::optional<FlakeRef> getFlakeRefForCompletion()
virtual std::vector<std::string> getFlakesForCompletion()
{ return {}; }
void completeFlakeInput(std::string_view prefix);
void completionHook() override;
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
@ -116,12 +123,13 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
InstallablesCommand();
void prepare() override;
Installables load();
virtual bool useDefaultInstallables() { return true; }
std::optional<FlakeRef> getFlakeRefForCompletion() override;
std::vector<std::string> getFlakesForCompletion() override;
private:
protected:
std::vector<std::string> _installables;
};
@ -135,9 +143,9 @@ struct InstallableCommand : virtual Args, SourceExprCommand
void prepare() override;
std::optional<FlakeRef> getFlakeRefForCompletion() override
std::vector<std::string> getFlakesForCompletion() override
{
return parseFlakeRefWithFragment(_installable, absPath(".")).first;
return {_installable};
}
private:

View file

@ -23,17 +23,6 @@
namespace nix {
void completeFlakeInputPath(
ref<EvalState> evalState,
const FlakeRef & flakeRef,
std::string_view prefix)
{
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions->add(input.first);
}
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
@ -86,8 +75,7 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
if (auto flakeRef = getFlakeRefForCompletion())
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
needsFlakeInputCompletion = {std::string(prefix)};
}}
});
@ -103,12 +91,10 @@ MixFlakeOptions::MixFlakeOptions()
parseFlakeRef(flakeRef, absPath("."), true));
}},
.completer = {[&](size_t n, std::string_view prefix) {
if (n == 0) {
if (auto flakeRef = getFlakeRefForCompletion())
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
} else if (n == 1) {
if (n == 0)
needsFlakeInputCompletion = {std::string(prefix)};
else if (n == 1)
completeFlakeRef(getEvalState()->store, prefix);
}
}}
});
@ -139,6 +125,24 @@ MixFlakeOptions::MixFlakeOptions()
});
}
void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
{
auto evalState = getEvalState();
for (auto & flakeRefS : getFlakesForCompletion()) {
auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions->add(input.first);
}
}
void MixFlakeOptions::completionHook()
{
if (auto & prefix = needsFlakeInputCompletion)
completeFlakeInput(*prefix);
}
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
{
addFlag({
@ -146,7 +150,8 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
.shortName = 'f',
.description =
"Interpret installables as attribute paths relative to the Nix expression stored in *file*. "
"If *file* is the character -, then a Nix expression will be read from standard input.",
"If *file* is the character -, then a Nix expression will be read from standard input. "
"Implies `--impure`.",
.category = installablesCategory,
.labels = {"file"},
.handler = {&file},
@ -919,6 +924,9 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
break;
case Realise::Outputs: {
if (settings.printMissing)
printMissing(store, pathsToBuild, lvlInfo);
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
if (!buildResult.success())
buildResult.rethrow();
@ -1032,21 +1040,26 @@ InstallablesCommand::InstallablesCommand()
void InstallablesCommand::prepare()
{
installables = load();
}
Installables InstallablesCommand::load() {
Installables installables;
if (_installables.empty() && useDefaultInstallables())
// FIXME: commands like "nix profile install" should not have a
// default, probably.
_installables.push_back(".");
installables = parseInstallables(getStore(), _installables);
return parseInstallables(getStore(), _installables);
}
std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
std::vector<std::string> InstallablesCommand::getFlakesForCompletion()
{
if (_installables.empty()) {
if (useDefaultInstallables())
return parseFlakeRefWithFragment(".", absPath(".")).first;
return {"."};
return {};
}
return parseFlakeRefWithFragment(_installables.front(), absPath(".")).first;
return _installables;
}
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)

View file

@ -132,6 +132,8 @@ struct Installable
const std::vector<std::shared_ptr<Installable>> & installables);
};
typedef std::vector<std::shared_ptr<Installable>> Installables;
struct InstallableValue : Installable
{
ref<EvalState> state;

View file

@ -22,6 +22,7 @@ extern "C" {
#include "ansicolor.hh"
#include "shared.hh"
#include "eval.hh"
#include "eval-cache.hh"
#include "eval-inline.hh"
#include "attr-path.hh"
#include "store-api.hh"
@ -54,6 +55,8 @@ struct NixRepl
size_t debugTraceIndex;
Strings loadedFiles;
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
std::function<AnnotatedValues()> getValues;
const static int envSize = 32768;
std::shared_ptr<StaticEnv> staticEnv;
@ -63,13 +66,15 @@ struct NixRepl
const Path historyFile;
NixRepl(ref<EvalState> state);
NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
~NixRepl();
void mainLoop(const std::vector<std::string> & files);
void mainLoop();
StringSet completePrefix(const std::string & prefix);
bool getLine(std::string & input, const std::string & prompt);
StorePath getDerivationPath(Value & v);
bool processLine(std::string line);
void loadFile(const Path & path);
void loadFlake(const std::string & flakeRef);
void initEnv();
@ -96,9 +101,11 @@ std::string removeWhitespace(std::string s)
}
NixRepl::NixRepl(ref<EvalState> state)
NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<NixRepl::AnnotatedValues()> getValues)
: state(state)
, debugTraceIndex(0)
, getValues(getValues)
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history")
{
@ -111,23 +118,20 @@ NixRepl::~NixRepl()
write_history(historyFile.c_str());
}
std::string runNix(Path program, const Strings & args,
void runNix(Path program, const Strings & args,
const std::optional<std::string> & input = {})
{
auto subprocessEnv = getEnv();
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
auto res = runProgram(RunOptions {
runProgram2(RunOptions {
.program = settings.nixBinDir+ "/" + program,
.args = args,
.environment = subprocessEnv,
.input = input,
});
if (!statusOk(res.first))
throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first));
return res.second;
return;
}
static NixRepl * curRepl; // ugly
@ -228,18 +232,12 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
return out;
}
void NixRepl::mainLoop(const std::vector<std::string> & files)
void NixRepl::mainLoop()
{
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
if (!files.empty()) {
for (auto & i : files)
loadedFiles.push_back(i);
}
loadFiles();
if (!loadedFiles.empty()) notice("");
// Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl";
@ -749,7 +747,6 @@ bool NixRepl::processLine(std::string line)
return true;
}
void NixRepl::loadFile(const Path & path)
{
loadedFiles.remove(path);
@ -809,13 +806,15 @@ void NixRepl::loadFiles()
Strings old = loadedFiles;
loadedFiles.clear();
bool first = true;
for (auto & i : old) {
if (!first) notice("");
first = false;
notice("Loading '%1%'...", i);
loadFile(i);
}
for (auto & [i, what] : getValues()) {
notice("Loading installable '%1%'...", what);
addAttrsToScope(*i);
}
}
@ -1015,7 +1014,17 @@ void runRepl(
ref<EvalState>evalState,
const ValMap & extraEnv)
{
auto repl = std::make_unique<NixRepl>(evalState);
auto getValues = [&]()->NixRepl::AnnotatedValues{
NixRepl::AnnotatedValues values;
return values;
};
const Strings & searchPath = {};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),
evalState,
getValues
);
repl->initEnv();
@ -1023,20 +1032,40 @@ void runRepl(
for (auto & [name, value] : extraEnv)
repl->addVarToScope(repl->state->symbols.create(name), *value);
repl->mainLoop({});
repl->mainLoop();
}
struct CmdRepl : StoreCommand, MixEvalArgs
struct CmdRepl : InstallablesCommand
{
std::vector<std::string> files;
CmdRepl()
CmdRepl(){
evalSettings.pureEval = false;
}
void prepare()
{
expectArgs({
.label = "files",
.handler = {&files},
.completer = completePath
});
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
warn("future versions of Nix will require using `--file` to load a file");
if (this->_installables.size() > 1)
warn("more than one input file is not currently supported");
auto filePath = this->_installables[0].data();
file = std::optional(filePath);
_installables.front() = _installables.back();
_installables.pop_back();
}
installables = InstallablesCommand::load();
}
std::vector<std::string> files;
Strings getDefaultFlakeAttrPaths() override
{
return {""};
}
virtual bool useDefaultInstallables() override
{
return file.has_value() or expr.has_value();
}
bool forceImpureByDefault() override
{
return true;
}
std::string description() override
@ -1053,14 +1082,37 @@ struct CmdRepl : StoreCommand, MixEvalArgs
void run(ref<Store> store) override
{
evalSettings.pureEval = false;
auto evalState = make_ref<EvalState>(searchPath, store);
auto repl = std::make_unique<NixRepl>(evalState);
auto state = getEvalState();
auto getValues = [&]()->NixRepl::AnnotatedValues{
auto installables = load();
NixRepl::AnnotatedValues values;
for (auto & installable: installables){
auto what = installable->what();
if (file){
auto [val, pos] = installable->toValue(*state);
auto what = installable->what();
state->forceValue(*val, pos);
auto autoArgs = getAutoArgs(*state);
auto valPost = state->allocValue();
state->autoCallFunction(*autoArgs, *val, *valPost);
state->forceValue(*valPost, pos);
values.push_back( {valPost, what });
} else {
auto [val, pos] = installable->toValue(*state);
values.push_back( {val, what} );
}
}
return values;
};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),
state,
getValues
);
repl->autoArgs = getAutoArgs(*repl->state);
repl->initEnv();
repl->mainLoop(files);
repl->mainLoop();
}
};

View file

@ -282,7 +282,7 @@ struct AttrDb
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
if (!queryAttribute.next()) return {};
auto rowId = (AttrType) queryAttribute.getInt(0);
auto rowId = (AttrId) queryAttribute.getInt(0);
auto type = (AttrType) queryAttribute.getInt(1);
switch (type) {
@ -486,7 +486,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
return nullptr;
else if (std::get_if<failed_t>(&attr->second)) {
if (forceErrors)
debug("reevaluating failed cached attribute '%s'");
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
else
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
} else

View file

@ -464,9 +464,10 @@ EvalState::EvalState(
, emptyBindings(0)
, store(store)
, buildStore(buildStore ? buildStore : store)
, debugRepl(0)
, debugRepl(nullptr)
, debugStop(false)
, debugQuit(false)
, trylevel(0)
, regexCache(makeRegexCache())
#if HAVE_BOEHMGC
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
@ -832,7 +833,14 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
: nullptr;
if (error)
printError("%s\n\n" ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL, error->what());
{
printError("%s\n\n", error->what());
if (trylevel > 0 && error->info().level != lvlInfo)
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
}
auto se = getStaticEnv(expr);
if (se) {

View file

@ -130,6 +130,7 @@ public:
void (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
bool debugStop;
bool debugQuit;
int trylevel;
std::list<DebugTrace> debugTraces;
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
@ -646,6 +647,15 @@ struct EvalSettings : Config
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
R"(
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
)"};
Setting<bool> traceVerbose{this, false, "trace-verbose",
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
};
extern EvalSettings evalSettings;

View file

@ -384,6 +384,18 @@ LockedFlake lockFlake(
}
}
/* Check whether this input has overrides for a
non-existent input. */
for (auto [inputPath, inputOverride] : overrides) {
auto inputPath2(inputPath);
auto follow = inputPath2.back();
inputPath2.pop_back();
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
warn(
"input '%s' has an override for a non-existent input '%s'",
printInputPath(inputPathPrefix), follow);
}
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
@ -513,6 +525,15 @@ LockedFlake lockFlake(
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto ref = input2.ref ? *input2.ref : *input.ref;
if (input.isFlake) {
Path localPath = parentPath;
FlakeRef localRef = *input.ref;
@ -524,15 +545,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto childNode = std::make_shared<LockedNode>(
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode);
@ -560,7 +573,7 @@ LockedFlake lockFlake(
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
std::make_shared<LockedNode>(lockedRef, ref, false));
}
}

View file

@ -520,6 +520,12 @@ path_start
$$ = new ExprPath(path);
}
| HPATH {
if (evalSettings.pureEval) {
throw Error(
"the path '%s' can not be resolved in pure mode",
std::string_view($1.p, $1.l)
);
}
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
}

View file

@ -851,6 +851,18 @@ static RegisterPrimOp primop_floor({
static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto attrs = state.buildBindings(2);
/* increment state.trylevel, and decrement it when this function returns. */
MaintainCount trylevel(state.trylevel);
void (* savedDebugRepl)(ref<EvalState> es, const ValMap & extraEnv) = nullptr;
if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
{
/* to prevent starting the repl from exceptions withing a tryEval, null it. */
savedDebugRepl = state.debugRepl;
state.debugRepl = nullptr;
}
try {
state.forceValue(*args[0], pos);
attrs.insert(state.sValue, args[0]);
@ -859,6 +871,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
attrs.alloc(state.sValue).mkBool(false);
attrs.alloc("success").mkBool(false);
}
// restore the debugRepl pointer if we saved it earlier.
if (savedDebugRepl)
state.debugRepl = savedDebugRepl;
v.mkAttrs(attrs);
}
@ -970,6 +987,15 @@ static RegisterPrimOp primop_trace({
});
/* Takes two arguments and evaluates to the second one. Used as the
* builtins.traceVerbose implementation when --trace-verbose is not enabled
*/
static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceValue(*args[1], pos);
v = *args[1];
}
/*************************************************************
* Derivations
*************************************************************/
@ -3926,6 +3952,18 @@ void EvalState::createBaseEnv()
addPrimOp("__exec", 1, prim_exec);
}
addPrimOp({
.fun = evalSettings.traceVerbose ? prim_trace : prim_second,
.arity = 2,
.name = "__traceVerbose",
.args = { "e1", "e2" },
.doc = R"(
Evaluate *e1* and print its abstract syntax representation on standard
error if `--trace-verbose` is enabled. Then return *e2*. This function
is useful for debugging.
)",
});
/* Add a value containing the current Nix expression search path. */
mkList(v, searchPath.size());
int n = 0;

View file

@ -364,6 +364,10 @@ static RegisterPrimOp primop_fetchGit({
A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`.
- shallow\
A Boolean parameter that specifies whether fetching a shallow clone
is allowed. Defaults to `false`.
- allRefs\
Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only

View file

@ -540,22 +540,22 @@ namespace nix {
ASSERT_THAT(v, IsStringEq(output));
}
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " #input), std::string_view(output)))
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P(
toString,
ToStringPrimOpTest,
testing::Values(
CASE("foo", "foo"),
CASE(1, "1"),
CASE([1 2 3], "1 2 3"),
CASE(.123, "0.123000"),
CASE(true, "1"),
CASE(false, ""),
CASE(null, ""),
CASE({ v = "bar"; __toString = self: self.v; }, "bar"),
CASE({ v = "bar"; __toString = self: self.v; outPath = "foo"; }, "bar"),
CASE({ outPath = "foo"; }, "foo"),
CASE(./test, "/test")
CASE(R"("foo")", "foo"),
CASE(R"(1)", "1"),
CASE(R"([1 2 3])", "1 2 3"),
CASE(R"(.123)", "0.123000"),
CASE(R"(true)", "1"),
CASE(R"(false)", ""),
CASE(R"(null)", ""),
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
CASE(R"({ outPath = "foo"; })", "foo"),
CASE(R"(./test)", "/test")
)
);
#undef CASE

View file

@ -70,7 +70,7 @@ struct FetchSettings : public Config
Setting<bool> warnDirty{this, true, "warn-dirty",
"Whether to warn about dirty Git/Mercurial trees."};
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."};
Setting<bool> useRegistries{this, true, "use-registries",

View file

@ -85,8 +85,9 @@ std::optional<std::string> readHead(const Path & path)
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
{
Path cacheDir = getCachePath(actualUrl);
auto gitDir = ".";
try {
runProgram("git", true, { "-C", cacheDir, "symbolic-ref", "--", "HEAD", headRef });
runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef });
} catch (ExecError &e) {
if (!WIFEXITED(e.status)) throw;
return false;
@ -182,7 +183,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
if (hasHead) {
// Using git diff is preferrable over lower-level operations here,
// because its conceptually simpler and we only need the exit code anyways.
auto gitDiffOpts = Strings({ "-C", workdir, "diff", "HEAD", "--quiet"});
auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
if (!submodules) {
// Changes in submodules should only make the tree dirty
// when those submodules will be copied as well.
@ -203,6 +204,7 @@ WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
{
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
auto gitDir = ".git";
if (!fetchSettings.allowDirty)
throw Error("Git tree '%s' is dirty", workdir);
@ -210,7 +212,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
if (fetchSettings.warnDirty)
warn("Git tree '%s' is dirty", workdir);
auto gitOpts = Strings({ "-C", workdir, "ls-files", "-z" });
auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
if (submodules)
gitOpts.emplace_back("--recurse-submodules");
@ -240,7 +242,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
// modified dirty file?
input.attrs.insert_or_assign(
"lastModified",
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return {std::move(storePath), input};
}
@ -572,7 +574,7 @@ struct GitInputScheme : InputScheme
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
if (isShallow && !shallow)
throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
// FIXME: check whether rev is an ancestor of ref.

View file

@ -381,7 +381,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
Headers headers = makeHeadersWithAuthTokens(host);
std::string ref_uri;
std::string refUri;
if (ref == "HEAD") {
auto file = store->toRealPath(
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
@ -393,10 +393,11 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if (!remoteLine) {
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
}
ref_uri = remoteLine->target;
refUri = remoteLine->target;
} else {
ref_uri = fmt("refs/(heads|tags)/%s", ref);
refUri = fmt("refs/(heads|tags)/%s", ref);
}
std::regex refRegex(refUri);
auto file = store->toRealPath(
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
@ -406,7 +407,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
std::optional<std::string> id;
while(!id && getline(is, line)) {
auto parsedLine = git::parseLsRemoteLine(line);
if (parsedLine && parsedLine->reference == ref_uri)
if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
id = parsedLine->target;
}

View file

@ -7,6 +7,22 @@ HookInstance::HookInstance()
{
debug("starting build hook '%s'", settings.buildHook);
auto buildHookArgs = tokenizeString<std::list<std::string>>(settings.buildHook.get());
if (buildHookArgs.empty())
throw Error("'build-hook' setting is empty");
auto buildHook = buildHookArgs.front();
buildHookArgs.pop_front();
Strings args;
for (auto & arg : buildHookArgs)
args.push_back(arg);
args.push_back(std::string(baseNameOf(settings.buildHook.get())));
args.push_back(std::to_string(verbosity));
/* Create a pipe to get the output of the child. */
fromHook.create();
@ -36,14 +52,9 @@ HookInstance::HookInstance()
if (dup2(builderOut.readSide.get(), 5) == -1)
throw SysError("dupping builder's stdout/stderr");
Strings args = {
std::string(baseNameOf(settings.buildHook.get())),
std::to_string(verbosity),
};
execv(buildHook.c_str(), stringsToCharPtrs(args).data());
execv(settings.buildHook.get().c_str(), stringsToCharPtrs(args).data());
throw SysError("executing '%s'", settings.buildHook);
throw SysError("executing '%s'", buildHook);
});
pid.setSeparatePG(true);

View file

@ -1717,7 +1717,19 @@ void LocalDerivationGoal::runChild()
for (auto & i : dirsInChroot) {
if (i.second.source == "/proc") continue; // backwards compatibility
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
#if HAVE_EMBEDDED_SANDBOX_SHELL
if (i.second.source == "__embedded_sandbox_shell__") {
static unsigned char sh[] = {
#include "embedded-sandbox-shell.gen.hh"
};
auto dst = chrootRootDir + i.first;
createDirs(dirOf(dst));
writeFile(dst, std::string_view((const char *) sh, sizeof(sh)));
chmod_(dst, 0555);
} else
#endif
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
}
/* Bind a new instance of procfs on /proc. */

View file

@ -154,7 +154,7 @@ void PathSubstitutionGoal::tryNext()
only after we've downloaded the path. */
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
{
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
worker.store.printStorePath(storePath), sub->getUri());
tryNext();
return;

View file

@ -135,6 +135,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
throw;
}
}
@ -153,6 +154,7 @@ void LocalStore::addTempRoot(const StorePath & path)
state->fdRootsSocket.close();
goto restart;
}
throw;
} catch (EndOfFile & e) {
debug("GC socket disconnected");
state->fdRootsSocket.close();

View file

@ -36,7 +36,6 @@ Settings::Settings()
, nixStateDir(canonPath(getEnv("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
, nixConfDir(canonPath(getEnv("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
, nixUserConfFiles(getUserConfigFiles())
, nixLibexecDir(canonPath(getEnv("NIX_LIBEXEC_DIR").value_or(NIX_LIBEXEC_DIR)))
, nixBinDir(canonPath(getEnv("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
, nixManDir(canonPath(NIX_MAN_DIR))
, nixDaemonSocketFile(canonPath(getEnv("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
@ -67,12 +66,13 @@ Settings::Settings()
sandboxPaths = tokenizeString<StringSet>("/bin/sh=" SANDBOX_SHELL);
#endif
/* chroot-like behavior from Apple's sandbox */
/* chroot-like behavior from Apple's sandbox */
#if __APPLE__
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
#endif
buildHook = getSelfExe().value_or("nix") + " __build-remote";
}
void loadConfFile()

View file

@ -79,9 +79,6 @@ public:
/* A list of user configuration files to load. */
std::vector<Path> nixUserConfFiles;
/* The directory where internal helper programs are stored. */
Path nixLibexecDir;
/* The directory where the main programs are stored. */
Path nixBinDir;
@ -195,7 +192,7 @@ public:
)",
{"build-timeout"}};
PathSetting buildHook{this, true, nixLibexecDir + "/nix/build-remote", "build-hook",
PathSetting buildHook{this, true, "", "build-hook",
"The path of the helper program that executes builds to remote machines."};
Setting<std::string> builders{
@ -802,7 +799,7 @@ public:
)"};
Setting<StringSet> ignoredAcls{
this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
R"(
A list of ACLs that should be ignored, normally Nix attempts to
remove all ACLs from files and directories in the Nix store, but

View file

@ -69,6 +69,7 @@ protected:
} catch (SysError & e) {
if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);
throw;
}
}

View file

@ -39,14 +39,23 @@ libstore_CXXFLAGS += \
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
-DNIX_LIBEXEC_DIR=\"$(libexecdir)\" \
-DNIX_BIN_DIR=\"$(bindir)\" \
-DNIX_MAN_DIR=\"$(mandir)\" \
-DLSOF=\"$(lsof)\"
ifeq ($(embedded_sandbox_shell),yes)
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
@mv $@.tmp $@
else
ifneq ($(sandbox_shell),)
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
endif
endif
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh

View file

@ -67,13 +67,26 @@ bool UserLock::findFreeUser() {
#if __linux__
/* Get the list of supplementary groups of this build user. This
is usually either empty or contains a group such as "kvm". */
supplementaryGIDs.resize(10);
int ngroups = supplementaryGIDs.size();
int err = getgrouplist(pw->pw_name, pw->pw_gid,
supplementaryGIDs.data(), &ngroups);
if (err == -1)
throw Error("failed to get list of supplementary groups for '%1%'", pw->pw_name);
int ngroups = 32; // arbitrary initial guess
supplementaryGIDs.resize(ngroups);
int err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
&ngroups);
// Our initial size of 32 wasn't sufficient, the correct size has
// been stored in ngroups, so we try again.
if (err == -1) {
supplementaryGIDs.resize(ngroups);
err = getgrouplist(pw->pw_name, pw->pw_gid, supplementaryGIDs.data(),
&ngroups);
}
// If it failed once more, then something must be broken.
if (err == -1)
throw Error("failed to get list of supplementary groups for '%1%'",
pw->pw_name);
// Finally, trim back the GID list to its real size
supplementaryGIDs.resize(ngroups);
#endif

View file

@ -62,6 +62,9 @@ public:
/* How often to purge expired entries from the cache. */
const int purgeInterval = 24 * 3600;
/* How long to cache binary cache info (i.e. /nix-cache-info) */
const int cacheInfoTtl = 7 * 24 * 3600;
struct Cache
{
int id;
@ -98,7 +101,7 @@ public:
"insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
state->queryCache.create(state->db,
"select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?");
"select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ? and timestamp > ?");
state->insertNAR.create(state->db,
"insert or replace into NARs(cache, hashPart, namePart, url, compression, fileHash, fileSize, narHash, "
@ -183,7 +186,7 @@ public:
auto i = state->caches.find(uri);
if (i == state->caches.end()) {
auto queryCache(state->queryCache.use()(uri));
auto queryCache(state->queryCache.use()(uri)(time(0) - cacheInfoTtl));
if (!queryCache.next())
return std::nullopt;
state->caches.emplace(uri,

View file

@ -69,8 +69,6 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
if (value != "unknown-deriver")
deriver = StorePath(value);
}
else if (name == "System")
system = value;
else if (name == "Sig")
sigs.insert(value);
else if (name == "CA") {
@ -106,9 +104,6 @@ std::string NarInfo::to_string(const Store & store) const
if (deriver)
res += "Deriver: " + std::string(deriver->to_string()) + "\n";
if (!system.empty())
res += "System: " + system + "\n";
for (auto sig : sigs)
res += "Sig: " + sig + "\n";

View file

@ -14,7 +14,6 @@ struct NarInfo : ValidPathInfo
std::string compression;
std::optional<Hash> fileHash;
uint64_t fileSize = 0;
std::string system;
NarInfo() = delete;
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }

View file

@ -98,7 +98,9 @@
(allow file*
(literal "/private/var/select/sh"))
; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin (and vice versa).
(allow file-read*
(subpath "/Library/Apple/usr/libexec/oah")
(subpath "/System/Library/Apple/usr/libexec/oah"))
(subpath "/System/Library/Apple/usr/libexec/oah")
(subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
(subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))

View file

@ -1320,7 +1320,8 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params};
}
static bool isNonUriPath(const std::string & spec) {
static bool isNonUriPath(const std::string & spec)
{
return
// is not a URL
spec.find("://") == std::string::npos
@ -1337,6 +1338,31 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile))
return std::make_shared<UDSRemoteStore>(params);
#if __linux__
else if (!pathExists(stateDir)
&& params.empty()
&& getuid() != 0
&& !getEnv("NIX_STORE_DIR").has_value()
&& !getEnv("NIX_STATE_DIR").has_value())
{
/* If /nix doesn't exist, there is no daemon socket, and
we're not root, then automatically set up a chroot
store in ~/.local/share/nix/root. */
auto chrootStore = getDataDir() + "/nix/root";
if (!pathExists(chrootStore)) {
try {
createDirs(chrootStore);
} catch (Error & e) {
return std::make_shared<LocalStore>(params);
}
warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
} else
debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
Store::Params params2;
params2["root"] = chrootStore;
return std::make_shared<LocalStore>(params2);
}
#endif
else
return std::make_shared<LocalStore>(params);
} else if (uri == "daemon") {

View file

@ -124,7 +124,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
bool anyCompleted = false;
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
if (flag.handler.arity == ArityAny) break;
if (flag.handler.arity == ArityAny || anyCompleted) break;
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
}
if (auto prefix = needsCompletion(*pos)) {
@ -362,6 +362,14 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
return Args::processArgs(args, finish);
}
void MultiCommand::completionHook()
{
if (command)
return command->second->completionHook();
else
return Args::completionHook();
}
nlohmann::json MultiCommand::toJSON()
{
auto cmds = nlohmann::json::object();

View file

@ -25,6 +25,8 @@ public:
/* Return a short one-line description of the command. */
virtual std::string description() { return ""; }
virtual bool forceImpureByDefault() { return false; }
/* Return documentation about this command, in Markdown format. */
virtual std::string doc() { return ""; }
@ -146,6 +148,11 @@ protected:
argument (if any) have been processed. */
virtual void initialFlagsProcessed() {}
/* Called after the command line has been processed if we need to generate
completions. Useful for commands that need to know the whole command line
in order to know what completions to generate. */
virtual void completionHook() { }
public:
void addFlag(Flag && flag);
@ -221,6 +228,8 @@ public:
bool processArgs(const Strings & args, bool finish) override;
void completionHook() override;
nlohmann::json toJSON() override;
};

View file

@ -13,6 +13,7 @@ std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
{ Xp::RecursiveNix, "recursive-nix" },
{ Xp::NoUrlLiterals, "no-url-literals" },
{ Xp::FetchClosure, "fetch-closure" },
{ Xp::ReplFlake, "repl-flake" },
};
const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)

View file

@ -22,6 +22,7 @@ enum struct ExperimentalFeature
RecursiveNix,
NoUrlLiterals,
FetchClosure,
ReplFlake,
};
/**

View file

@ -8,9 +8,9 @@ std::string hiliteMatches(
std::string_view prefix,
std::string_view postfix)
{
// Avoid copy on zero matches
// Avoid extra work on zero matches
if (matches.size() == 0)
return (std::string) s;
return std::string(s);
std::sort(matches.begin(), matches.end(), [](const auto & a, const auto & b) {
return a.position() < b.position();

View file

@ -29,6 +29,7 @@
#ifdef __APPLE__
#include <sys/syscall.h>
#include <mach-o/dyld.h>
#endif
#ifdef __linux__
@ -574,6 +575,20 @@ Path getHome()
static Path homeDir = []()
{
auto homeDir = getEnv("HOME");
if (homeDir) {
// Only use $HOME if doesn't exist or is owned by the current user.
struct stat st;
int result = stat(homeDir->c_str(), &st);
if (result != 0) {
if (errno != ENOENT) {
warn("couldn't stat $HOME ('%s') for reason other than not existing ('%d'), falling back to the one defined in the 'passwd' file", *homeDir, errno);
homeDir.reset();
}
} else if (st.st_uid != geteuid()) {
warn("$HOME ('%s') is not owned by you, falling back to the one defined in the 'passwd' file", *homeDir);
homeDir.reset();
}
}
if (!homeDir) {
std::vector<char> buf(16384);
struct passwd pwbuf;
@ -619,6 +634,27 @@ Path getDataDir()
}
std::optional<Path> getSelfExe()
{
static auto cached = []() -> std::optional<Path>
{
#if __linux__
return readLink("/proc/self/exe");
#elif __APPLE__
char buf[1024];
uint32_t size = sizeof(buf);
if (_NSGetExecutablePath(buf, &size) == 0)
return buf;
else
return std::nullopt;
#else
return std::nullopt;
#endif
}();
return cached;
}
Paths createDirs(const Path & path)
{
Paths created;

View file

@ -149,10 +149,14 @@ std::vector<Path> getConfigDirs();
/* Return $XDG_DATA_HOME or $HOME/.local/share. */
Path getDataDir();
/* Return the path of the current executable. */
std::optional<Path> getSelfExe();
/* Create a directory and all its parents, if necessary. Returns the
list of created directories, in order of creation. */
Paths createDirs(const Path & path);
inline Paths createDirs(PathView path) {
inline Paths createDirs(PathView path)
{
return createDirs(Path(path));
}

View file

@ -257,11 +257,12 @@ static void main_nix_build(int argc, char * * argv)
auto autoArgs = myArgs.getAutoArgs(*state);
auto autoArgsWithInNixShell = autoArgs;
if (runEnv) {
auto newArgs = state->buildBindings(autoArgs->size() + 1);
auto newArgs = state->buildBindings(autoArgsWithInNixShell->size() + 1);
newArgs.alloc("inNixShell").mkBool(true);
for (auto & i : *autoArgs) newArgs.insert(i);
autoArgs = newArgs.finish();
autoArgsWithInNixShell = newArgs.finish();
}
if (packages) {
@ -316,10 +317,39 @@ static void main_nix_build(int argc, char * * argv)
Value vRoot;
state->eval(e, vRoot);
std::function<bool(const Value & v)> takesNixShellAttr;
takesNixShellAttr = [&](const Value & v) {
if (!runEnv) {
return false;
}
bool add = false;
if (v.type() == nFunction && v.lambda.fun->hasFormals()) {
for (auto & i : v.lambda.fun->formals->formals) {
if (state->symbols[i.name] == "inNixShell") {
add = true;
break;
}
}
}
return add;
};
for (auto & i : attrPaths) {
Value & v(*findAlongAttrPath(*state, i, *autoArgs, vRoot).first);
Value & v(*findAlongAttrPath(
*state,
i,
takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs,
vRoot
).first);
state->forceValue(v, [&]() { return v.determinePos(noPos); });
getDerivations(*state, v, "", *autoArgs, drvs, false);
getDerivations(
*state,
v,
"",
takesNixShellAttr(v) ? *autoArgsWithInNixShell : *autoArgs,
drvs,
false
);
}
}
@ -543,6 +573,8 @@ static void main_nix_build(int argc, char * * argv)
restoreProcessContext();
logger->stop();
execvp(shell->c_str(), argPtrs.data());
throw SysError("executing shell '%s'", *shell);
@ -601,6 +633,8 @@ static void main_nix_build(int argc, char * * argv)
outPaths.push_back(outputPath);
}
logger->stop();
for (auto & path : outPaths)
std::cout << store->printStorePath(path) << '\n';
}

View file

@ -37,6 +37,7 @@ void removeOldGenerations(std::string dir)
link = readLink(path);
} catch (SysError & e) {
if (e.errNo == ENOENT) continue;
throw;
}
if (link.find("link") != std::string::npos) {
printInfo(format("removing old generations of profile %1%") % path);

View file

@ -1485,7 +1485,7 @@ static int main_nix_env(int argc, char * * argv)
if (globals.profile == "")
globals.profile = getDefaultProfile();
op(globals, opFlags, opArgs);
op(globals, std::move(opFlags), std::move(opArgs));
globals.state->printStats();

View file

@ -1093,7 +1093,7 @@ static int main_nix_store(int argc, char * * argv)
if (op != opDump && op != opRestore) /* !!! hack */
store = openStore();
op(opFlags, opArgs);
op(std::move(opFlags), std::move(opArgs));
return 0;
}

View file

@ -276,15 +276,27 @@ struct Common : InstallableCommand, MixProfile
const BuildEnvironment & buildEnvironment,
const Path & outputsDir = absPath(".") + "/outputs")
{
// A list of colon-separated environment variables that should be
// prepended to, rather than overwritten, in order to keep the shell usable.
// Please keep this list minimal in order to avoid impurities.
static const char * const savedVars[] = {
"PATH", // for commands
"XDG_DATA_DIRS", // for loadable completion
};
std::ostringstream out;
out << "unset shellHook\n";
out << "nix_saved_PATH=\"$PATH\"\n";
for (auto & var : savedVars) {
out << fmt("%s=${%s:-}\n", var, var);
out << fmt("nix_saved_%s=\"$%s\"\n", var, var);
}
buildEnvironment.toBash(out, ignoreVars);
out << "PATH=\"$PATH:$nix_saved_PATH\"\n";
for (auto & var : savedVars)
out << fmt("%s=\"$%s:$nix_saved_%s\"\n", var, var, var);
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})

View file

@ -50,9 +50,9 @@ public:
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
}
std::optional<FlakeRef> getFlakeRefForCompletion() override
std::vector<std::string> getFlakesForCompletion() override
{
return getFlakeRef();
return {flakeUrl};
}
};
@ -740,7 +740,8 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
"If you've set '%s' to a string, try using a path instead.",
templateDir, templateDirAttr->getAttrPathStr());
std::vector<Path> files;
std::vector<Path> changedFiles;
std::vector<Path> conflictedFiles;
std::function<void(const Path & from, const Path & to)> copyDir;
copyDir = [&](const Path & from, const Path & to)
@ -757,31 +758,41 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
auto contents = readFile(from2);
if (pathExists(to2)) {
auto contents2 = readFile(to2);
if (contents != contents2)
throw Error("refusing to overwrite existing file '%s'", to2);
if (contents != contents2) {
printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
conflictedFiles.push_back(to2);
} else {
notice("skipping identical file: %s", from2);
}
continue;
} else
writeFile(to2, contents);
}
else if (S_ISLNK(st.st_mode)) {
auto target = readLink(from2);
if (pathExists(to2)) {
if (readLink(to2) != target)
throw Error("refusing to overwrite existing symlink '%s'", to2);
if (readLink(to2) != target) {
printError("refusing to overwrite existing file '%s'\n please merge it manually with '%s'", to2, from2);
conflictedFiles.push_back(to2);
} else {
notice("skipping identical file: %s", from2);
}
continue;
} else
createSymlink(target, to2);
}
else
throw Error("file '%s' has unsupported type", from2);
files.push_back(to2);
changedFiles.push_back(to2);
notice("wrote: %s", to2);
}
};
copyDir(templateDir, flakeDir);
if (pathExists(flakeDir + "/.git")) {
if (!changedFiles.empty() && pathExists(flakeDir + "/.git")) {
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
for (auto & s : files) args.push_back(s);
for (auto & s : changedFiles) args.push_back(s);
runProgram("git", true, args);
}
auto welcomeText = cursor->maybeGetAttr("welcomeText");
@ -789,6 +800,9 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
notice("\n");
notice(renderMarkdownToTerminal(welcomeText->getString()));
}
if (!conflictedFiles.empty())
throw Error("Encountered %d conflicts - see above", conflictedFiles.size());
}
};

View file

@ -30,7 +30,7 @@ convert-secret-to-public` to get the corresponding public key for
verifying signed store paths.
The mandatory argument `--key-name` specifies a key name (such as
`cache.example.org-1). It is used to look up keys on the client when
`cache.example.org-1`). It is used to look up keys on the client when
it verifies signatures. It can be anything, but its suggested to use
the host name of your cache (e.g. `cache.example.org`) with a suffix
denoting the number of the key (to be incremented every time you need

View file

@ -266,6 +266,11 @@ void mainWrapped(int argc, char * * argv)
programPath = argv[0];
auto programName = std::string(baseNameOf(programPath));
if (argc > 0 && std::string_view(argv[0]) == "__build-remote") {
programName = "build-remote";
argv++; argc--;
}
{
auto legacy = (*RegisterLegacyCommand::commands)[programName];
if (legacy) return legacy(argc, argv);
@ -342,7 +347,10 @@ void mainWrapped(int argc, char * * argv)
if (!completions) throw;
}
if (completions) return;
if (completions) {
args.completionHook();
return;
}
if (args.showVersion) {
printVersion(programName);
@ -380,6 +388,9 @@ void mainWrapped(int argc, char * * argv)
settings.ttlPositiveNarInfoCache = 0;
}
if (args.command->second->forceImpureByDefault() && !evalSettings.pureEval.overridden) {
evalSettings.pureEval = false;
}
args.command->second->prepare();
args.command->second->run();
}

View file

@ -29,7 +29,7 @@ highest precedence:
can be specified using the NixOS option `nix.registry`.
* The user registry `~/.config/nix/registry.json`. This registry can
be modified by commands such as `nix flake pin`.
be modified by commands such as `nix registry pin`.
* Overrides specified on the command line using the option
`--override-flake`.

View file

@ -24,10 +24,34 @@ R""(
* Interact with Nixpkgs in the REPL:
```console
# nix repl '<nixpkgs>'
# nix repl --file example.nix
Loading Installable ''...
Added 3 variables.
Loading '<nixpkgs>'...
Added 12428 variables.
# nix repl --expr '{a={b=3;c=4;};}'
Loading Installable ''...
Added 1 variables.
# nix repl --expr '{a={b=3;c=4;};}' a
Loading Installable ''...
Added 1 variables.
# nix repl --extra_experimental_features 'flakes repl-flake' nixpkgs
Loading Installable 'flake:nixpkgs#'...
Added 5 variables.
nix-repl> legacyPackages.x86_64-linux.emacs.name
"emacs-27.1"
nix-repl> legacyPackages.x86_64-linux.emacs.name
"emacs-27.1"
nix-repl> :q
# nix repl --expr 'import <nixpkgs>{}'
Loading Installable ''...
Added 12439 variables.
nix-repl> emacs.name
"emacs-27.1"

View file

@ -47,7 +47,7 @@ void runProgramInStore(ref<Store> store,
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
for (auto & arg : args) helperArgs.push_back(arg);
execv(readLink("/proc/self/exe").c_str(), stringsToCharPtrs(helperArgs).data());
execv(getSelfExe().value_or("nix").c_str(), stringsToCharPtrs(helperArgs).data());
throw SysError("could not execute chroot helper");
}

View file

@ -18,16 +18,26 @@ using namespace nix;
std::string wrap(std::string prefix, std::string s)
{
return prefix + s + ANSI_NORMAL;
return concatStrings(prefix, s, ANSI_NORMAL);
}
struct CmdSearch : InstallableCommand, MixJSON
{
std::vector<std::string> res;
std::vector<std::string> excludeRes;
CmdSearch()
{
expectArgs("regex", &res);
addFlag(Flag {
.longName = "exclude",
.shortName = 'e',
.description = "Hide packages whose attribute path, name or description contain *regex*.",
.labels = {"regex"},
.handler = {[this](std::string s) {
excludeRes.push_back(s);
}},
});
}
std::string description() override
@ -62,11 +72,16 @@ struct CmdSearch : InstallableCommand, MixJSON
res.push_back("^");
std::vector<std::regex> regexes;
std::vector<std::regex> excludeRegexes;
regexes.reserve(res.size());
excludeRegexes.reserve(excludeRes.size());
for (auto & re : res)
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
for (auto & re : excludeRes)
excludeRegexes.emplace_back(re, std::regex::extended | std::regex::icase);
auto state = getEvalState();
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
@ -106,6 +121,14 @@ struct CmdSearch : InstallableCommand, MixJSON
std::vector<std::smatch> nameMatches;
bool found = false;
for (auto & regex : excludeRegexes) {
if (
std::regex_search(attrPath2, regex)
|| std::regex_search(name.name, regex)
|| std::regex_search(description, regex))
return;
}
for (auto & regex : regexes) {
found = false;
auto addAll = [&found](std::sregex_iterator it, std::vector<std::smatch> & vec) {
@ -133,15 +156,15 @@ struct CmdSearch : InstallableCommand, MixJSON
jsonElem.attr("version", name.version);
jsonElem.attr("description", description);
} else {
auto name2 = hiliteMatches(name.name, std::move(nameMatches), ANSI_GREEN, "\e[0;2m");
auto name2 = hiliteMatches(name.name, nameMatches, ANSI_GREEN, "\e[0;2m");
if (results > 1) logger->cout("");
logger->cout(
"* %s%s",
wrap("\e[0;1m", hiliteMatches(attrPath2, std::move(attrPathMatches), ANSI_GREEN, "\e[0;1m")),
wrap("\e[0;1m", hiliteMatches(attrPath2, attrPathMatches, ANSI_GREEN, "\e[0;1m")),
name.version != "" ? " (" + name.version + ")" : "");
if (description != "")
logger->cout(
" %s", hiliteMatches(description, std::move(descriptionMatches), ANSI_GREEN, ANSI_NORMAL));
" %s", hiliteMatches(description, descriptionMatches, ANSI_GREEN, ANSI_NORMAL));
}
}
}

View file

@ -43,12 +43,23 @@ R""(
# nix search nixpkgs 'firefox|chromium'
```
* Search for packages containing `git'`and either `frontend` or `gui`:
* Search for packages containing `git` and either `frontend` or `gui`:
```console
# nix search nixpkgs git 'frontend|gui'
```
* Search for packages containing `neovim` but hide ones containing either `gui` or `python`:
```console
# nix search nixpkgs neovim -e 'python|gui'
```
or
```console
# nix search nixpkgs neovim -e 'python' -e 'gui'
```
# Description
`nix search` searches *installable* (which must be evaluatable, e.g. a

View file

@ -34,7 +34,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
std::string description() override
{
return "upgrade Nix to the latest stable version";
return "upgrade Nix to the stable version declared in Nixpkgs";
}
std::string doc() override

View file

@ -2,7 +2,7 @@ R""(
# Examples
* Upgrade Nix to the latest stable version:
* Upgrade Nix to the stable version declared in Nixpkgs:
```console
# nix upgrade-nix
@ -16,8 +16,11 @@ R""(
# Description
This command upgrades Nix to the latest version. By default, it
locates the directory containing the `nix` binary in the `$PATH`
This command upgrades Nix to the stable version declared in Nixpkgs.
This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
and updated manually. It may not always be the latest tagged release.
By default, it locates the directory containing the `nix` binary in the `$PATH`
environment variable. If that directory is a Nix profile, it will
upgrade the `nix` package in that profile to the latest stable binary
release.

View file

@ -1 +1 @@
{ ... }@args: import ./shell.nix (args // { contentAddressed = true; })
{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })

View file

@ -75,7 +75,7 @@ rec {
buildCommand = ''
mkdir -p $out/bin
echo ${rootCA} # Just to make it depend on it
echo "" > $out/bin/${name}
echo "#! ${shell}" > $out/bin/${name}
chmod +x $out/bin/${name}
'';
};

View file

@ -50,6 +50,8 @@ export busybox="@sandbox_shell@"
export version=@PACKAGE_VERSION@
export system=@system@
export BUILD_SHARED_LIBS=@BUILD_SHARED_LIBS@
export IMPURE_VAR1=foo
export IMPURE_VAR2=bar
@ -117,11 +119,11 @@ killDaemon() {
}
restartDaemon() {
[[ -z "${pidDaemon:-}" ]] && return 0
[[ -z "${pidDaemon:-}" ]] && return 0
killDaemon
unset NIX_REMOTE
startDaemon
killDaemon
unset NIX_REMOTE
startDaemon
}
if [[ $(uname) == Linux ]] && [[ -L /proc/self/ns/user ]] && unshare --user true; then
@ -188,4 +190,15 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then
startDaemon
fi
onError() {
set +x
echo "$0: test failed at:" >&2
for ((i = 1; i < 16; i++)); do
if [[ -z ${BASH_SOURCE[i]} ]]; then break; fi
echo " ${FUNCNAME[i]} in ${BASH_SOURCE[i]}:${BASH_LINENO[i-1]}" >&2
done
}
trap onError ERR
fi # COMMON_SH_SOURCED

62
tests/completions.sh Normal file
View file

@ -0,0 +1,62 @@
source common.sh
cd "$TEST_ROOT"
mkdir -p dep
cat <<EOF > dep/flake.nix
{
outputs = i: { };
}
EOF
mkdir -p foo
cat <<EOF > foo/flake.nix
{
inputs.a.url = "path:$(realpath dep)";
outputs = i: {
sampleOutput = 1;
};
}
EOF
mkdir -p bar
cat <<EOF > bar/flake.nix
{
inputs.b.url = "path:$(realpath dep)";
outputs = i: {
sampleOutput = 1;
};
}
EOF
# Test the completion of a subcommand
[[ "$(NIX_GET_COMPLETIONS=1 nix buil)" == $'normal\nbuild\t' ]]
[[ "$(NIX_GET_COMPLETIONS=2 nix flake metad)" == $'normal\nmetadata\t' ]]
# Filename completion
[[ "$(NIX_GET_COMPLETIONS=2 nix build ./f)" == $'filenames\n./foo\t' ]]
[[ "$(NIX_GET_COMPLETIONS=2 nix build ./nonexistent)" == $'filenames' ]]
# Input override completion
[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --override-input '')" == $'normal\na\t' ]]
[[ "$(NIX_GET_COMPLETIONS=5 nix flake show ./foo --override-input '')" == $'normal\na\t' ]]
## With multiple input flakes
[[ "$(NIX_GET_COMPLETIONS=5 nix build ./foo ./bar --override-input '')" == $'normal\na\t\nb\t' ]]
## With tilde expansion
[[ "$(HOME=$PWD NIX_GET_COMPLETIONS=4 nix build '~/foo' --override-input '')" == $'normal\na\t' ]]
## Out of order
[[ "$(NIX_GET_COMPLETIONS=3 nix build --update-input '' ./foo)" == $'normal\na\t' ]]
[[ "$(NIX_GET_COMPLETIONS=4 nix build ./foo --update-input '' ./bar)" == $'normal\na\t\nb\t' ]]
# Cli flag completion
NIX_GET_COMPLETIONS=2 nix build --log-form | grep -- "--log-format"
# Config option completion
## With `--option`
NIX_GET_COMPLETIONS=3 nix build --option allow-import-from | grep -- "allow-import-from-derivation"
## As a cli flag not working atm
# NIX_GET_COMPLETIONS=2 nix build --allow-import-from | grep -- "allow-import-from-derivation"
# Attr path completions
[[ "$(NIX_GET_COMPLETIONS=2 nix eval ./foo\#sam)" == $'attrs\n./foo#sampleOutput\t' ]]
[[ "$(NIX_GET_COMPLETIONS=4 nix eval --file ./foo/flake.nix outp)" == $'attrs\noutputs\t' ]]

View file

@ -1,9 +1,6 @@
source common.sh
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME
@ -25,6 +22,7 @@ cat <<EOF > flake.nix
};
}
EOF
nix build .#
nix bundle --bundler .# .#
nix bundle --bundler .#bundlers.$system.default .#packages.$system.default
@ -32,6 +30,3 @@ nix bundle --bundler .#bundlers.$system.simple .#packages.$system.default
nix bundle --bundler .#bundlers.$system.default .#apps.$system.default
nix bundle --bundler .#bundlers.$system.simple .#apps.$system.default
clearStore

89
tests/flakes/check.sh Normal file
View file

@ -0,0 +1,89 @@
source common.sh
flakeDir=$TEST_ROOT/flake3
mkdir -p $flakeDir
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
overlay = final: prev: {
};
};
}
EOF
nix flake check $flakeDir
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
overlay = finalll: prev: {
};
};
}
EOF
(! nix flake check $flakeDir)
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
nixosModules.foo = {
a.b.c = 123;
foo = true;
};
};
}
EOF
nix flake check $flakeDir
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
nixosModules.foo = {
a.b.c = 123;
foo = assert false; true;
};
};
}
EOF
(! nix flake check $flakeDir)
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
nixosModule = { config, pkgs, ... }: {
a.b.c = 123;
};
};
}
EOF
nix flake check $flakeDir
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
nixosModule = { config, pkgs }: {
a.b.c = 123;
};
};
}
EOF
(! nix flake check $flakeDir)
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
packages.system-1.default = "foo";
packages.system-2.default = "bar";
};
}
EOF
checkRes=$(nix flake check --keep-going $flakeDir 2>&1 && fail "nix flake check should have failed" || true)
echo "$checkRes" | grep -q "packages.system-1.default"
echo "$checkRes" | grep -q "packages.system-2.default"

49
tests/flakes/circular.sh Normal file
View file

@ -0,0 +1,49 @@
# Test circular flake dependencies.
source ./common.sh
requireGit
flakeA=$TEST_ROOT/flakeA
flakeB=$TEST_ROOT/flakeB
createGitRepo $flakeA
createGitRepo $flakeB
cat > $flakeA/flake.nix <<EOF
{
inputs.b.url = git+file://$flakeB;
inputs.b.inputs.a.follows = "/";
outputs = { self, b }: {
foo = 123 + b.bar;
xyzzy = 1000;
};
}
EOF
git -C $flakeA add flake.nix
cat > $flakeB/flake.nix <<EOF
{
inputs.a.url = git+file://$flakeA;
outputs = { self, a }: {
bar = 456 + a.xyzzy;
};
}
EOF
git -C $flakeB add flake.nix
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval $flakeA#foo) = 1579 ]]
[[ $(nix eval $flakeA#foo) = 1579 ]]
sed -i $flakeB/flake.nix -e 's/456/789/'
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
# Test list-inputs with circular dependencies
nix flake metadata $flakeA

73
tests/flakes/common.sh Normal file
View file

@ -0,0 +1,73 @@
source ../common.sh
registry=$TEST_ROOT/registry.json
requireGit() {
if [[ -z $(type -p git) ]]; then
echo "Git not installed; skipping flake tests"
exit 99
fi
}
writeSimpleFlake() {
local flakeDir="$1"
cat > $flakeDir/flake.nix <<EOF
{
description = "Bla bla";
outputs = inputs: rec {
packages.$system = rec {
foo = import ./simple.nix;
default = foo;
};
# To test "nix flake init".
legacyPackages.x86_64-linux.hello = import ./simple.nix;
};
}
EOF
cp ../simple.nix ../simple.builder.sh ../config.nix $flakeDir/
}
createSimpleGitFlake() {
local flakeDir="$1"
writeSimpleFlake $flakeDir
git -C $flakeDir add flake.nix simple.nix simple.builder.sh config.nix
git -C $flakeDir commit -m 'Initial'
}
writeDependentFlake() {
local flakeDir="$1"
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self, flake1 }: {
packages.$system.default = flake1.packages.$system.default;
expr = assert builtins.pathExists ./flake.lock; 123;
};
}
EOF
}
writeTrivialFlake() {
local flakeDir="$1"
cat > $flakeDir/flake.nix <<EOF
{
outputs = { self }: {
expr = 123;
};
}
EOF
}
createGitRepo() {
local repo="$1"
local extraArgs="$2"
rm -rf $repo $repo.tmp
mkdir -p $repo
git -C $repo init $extraArgs
git -C $repo config user.email "foobar@example.com"
git -C $repo config user.name "Foobar"
}

View file

@ -1,9 +1,6 @@
source common.sh
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
cp ../simple.nix ../simple.builder.sh ../config.nix $TEST_HOME
cd $TEST_HOME

View file

@ -1,67 +1,30 @@
source common.sh
source ./common.sh
if [[ -z $(type -p git) ]]; then
echo "Git not installed; skipping flake tests"
exit 99
fi
requireGit
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
registry=$TEST_ROOT/registry.json
flake1Dir=$TEST_ROOT/flake1
flake2Dir=$TEST_ROOT/flake2
flake3Dir=$TEST_ROOT/flake3
flake5Dir=$TEST_ROOT/flake5
flake6Dir=$TEST_ROOT/flake6
flake7Dir=$TEST_ROOT/flake7
templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
badFlakeDir=$TEST_ROOT/badFlake
flakeA=$TEST_ROOT/flakeA
flakeB=$TEST_ROOT/flakeB
flakeGitBare=$TEST_ROOT/flakeGitBare
flakeFollowsA=$TEST_ROOT/follows/flakeA
flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
rm -rf $repo $repo.tmp
mkdir -p $repo
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $nonFlakeDir; do
# Give one repo a non-main initial branch.
extraArgs=
if [[ $repo == $flake2Dir ]]; then
extraArgs="--initial-branch=main"
fi
git -C $repo init $extraArgs
git -C $repo config user.email "foobar@example.com"
git -C $repo config user.name "Foobar"
createGitRepo "$repo" "$extraArgs"
done
cat > $flake1Dir/flake.nix <<EOF
{
description = "Bla bla";
outputs = inputs: rec {
packages.$system = rec {
foo = import ./simple.nix;
default = foo;
};
# To test "nix flake init".
legacyPackages.x86_64-linux.hello = import ./simple.nix;
};
}
EOF
cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/
git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix
git -C $flake1Dir commit -m 'Initial'
createSimpleGitFlake $flake1Dir
cat > $flake2Dir/flake.nix <<EOF
{
@ -105,12 +68,10 @@ nix registry add --registry $registry flake1 git+file://$flake1Dir
nix registry add --registry $registry flake2 git+file://$flake2Dir
nix registry add --registry $registry flake3 git+file://$flake3Dir
nix registry add --registry $registry flake4 flake3
nix registry add --registry $registry flake5 hg+file://$flake5Dir
nix registry add --registry $registry nixpkgs flake1
nix registry add --registry $registry templates git+file://$templatesDir
# Test 'nix flake list'.
[[ $(nix registry list | wc -l) == 7 ]]
[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake metadata'.
nix flake metadata flake1
@ -291,7 +252,7 @@ cat > $flake3Dir/flake.nix <<EOF
}
EOF
cp ./config.nix $flake3Dir
cp ../config.nix $flake3Dir
git -C $flake3Dir add flake.nix config.nix
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
@ -366,161 +327,19 @@ nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth
# Testing the nix CLI
nix registry add flake1 flake3
[[ $(nix registry list | wc -l) == 8 ]]
[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1
[[ $(nix registry list | wc -l) == 8 ]]
[[ $(nix registry list | wc -l) == 6 ]]
nix registry pin flake1 flake3
[[ $(nix registry list | wc -l) == 8 ]]
[[ $(nix registry list | wc -l) == 6 ]]
nix registry remove flake1
[[ $(nix registry list | wc -l) == 7 ]]
# Test 'nix flake init'.
cat > $templatesDir/flake.nix <<EOF
{
description = "Some templates";
outputs = { self }: {
templates = rec {
trivial = {
path = ./trivial;
description = "A trivial flake";
welcomeText = ''
Welcome to my trivial flake
'';
};
default = trivial;
};
};
}
EOF
mkdir $templatesDir/trivial
cat > $templatesDir/trivial/flake.nix <<EOF
{
description = "A flake for building Hello World";
outputs = { self, nixpkgs }: {
packages.x86_64-linux = rec {
hello = nixpkgs.legacyPackages.x86_64-linux.hello;
default = hello;
};
};
}
EOF
git -C $templatesDir add flake.nix trivial/flake.nix
git -C $templatesDir commit -m 'Initial'
nix flake check templates
nix flake show templates
nix flake show templates --json | jq
(cd $flake7Dir && nix flake init)
(cd $flake7Dir && nix flake init) # check idempotence
git -C $flake7Dir add flake.nix
nix flake check $flake7Dir
nix flake show $flake7Dir
nix flake show $flake7Dir --json | jq
git -C $flake7Dir commit -a -m 'Initial'
# Test 'nix flake new'.
rm -rf $flake6Dir
nix flake new -t templates#trivial $flake6Dir
nix flake new -t templates#trivial $flake6Dir # check idempotence
nix flake check $flake6Dir
[[ $(nix registry list | wc -l) == 5 ]]
# Test 'nix flake clone'.
rm -rf $TEST_ROOT/flake1-v2
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
# More 'nix flake check' tests.
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
overlay = final: prev: {
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
overlay = finalll: prev: {
};
};
}
EOF
(! nix flake check $flake3Dir)
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModules.foo = {
a.b.c = 123;
foo = true;
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModules.foo = {
a.b.c = 123;
foo = assert false; true;
};
};
}
EOF
(! nix flake check $flake3Dir)
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModule = { config, pkgs, ... }: {
a.b.c = 123;
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModule = { config, pkgs }: {
a.b.c = 123;
};
};
}
EOF
(! nix flake check $flake3Dir)
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
packages.system-1.default = "foo";
packages.system-2.default = "bar";
};
}
EOF
checkRes=$(nix flake check --keep-going $flake3Dir 2>&1 && fail "nix flake check should have failed" || true)
echo "$checkRes" | grep -q "packages.system-1.default"
echo "$checkRes" | grep -q "packages.system-2.default"
# Test 'follows' inputs.
cat > $flake3Dir/flake.nix <<EOF
{
@ -563,6 +382,10 @@ nix flake lock $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
# Test overriding inputs of inputs.
writeTrivialFlake $flake7Dir
git -C $flake7Dir add flake.nix
git -C $flake7Dir commit -m 'Initial'
cat > $flake3Dir/flake.nix <<EOF
{
inputs.flake2.inputs.flake1 = {
@ -597,50 +420,9 @@ rm -rf $flakeGitBare
git clone --bare $flake1Dir $flakeGitBare
nix build -o $TEST_ROOT/result git+file://$flakeGitBare
# Test Mercurial flakes.
rm -rf $flake5Dir
mkdir $flake5Dir
cat > $flake5Dir/flake.nix <<EOF
{
outputs = { self, flake1 }: {
packages.$system.default = flake1.packages.$system.default;
expr = assert builtins.pathExists ./flake.lock; 123;
};
}
EOF
if [[ -n $(type -p hg) ]]; then
hg init $flake5Dir
hg add $flake5Dir/flake.nix
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
nix build -o $TEST_ROOT/result hg+file://$flake5Dir
[[ -e $TEST_ROOT/result/hello ]]
(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
nix eval hg+file://$flake5Dir#expr
nix eval hg+file://$flake5Dir#expr
(! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
(! nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revision)
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
nix flake metadata --json hg+file://$flake5Dir --refresh | jq -e -r .revision
nix flake metadata --json hg+file://$flake5Dir
[[ $(nix flake metadata --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-use-registries --no-allow-dirty
fi
# Test path flakes.
rm -rf $flake5Dir/.hg $flake5Dir/flake.lock
mkdir -p $flake5Dir
writeDependentFlake $flake5Dir
nix flake lock path://$flake5Dir
# Test tarball flakes.
@ -678,165 +460,6 @@ nix flake lock $flake3Dir --update-input flake2/flake1
# Test 'nix flake metadata --json'.
nix flake metadata $flake3Dir --json | jq .
# Test circular flake dependencies.
cat > $flakeA/flake.nix <<EOF
{
inputs.b.url = git+file://$flakeB;
inputs.b.inputs.a.follows = "/";
outputs = { self, nixpkgs, b }: {
foo = 123 + b.bar;
xyzzy = 1000;
};
}
EOF
git -C $flakeA add flake.nix
cat > $flakeB/flake.nix <<EOF
{
inputs.a.url = git+file://$flakeA;
outputs = { self, nixpkgs, a }: {
bar = 456 + a.xyzzy;
};
}
EOF
git -C $flakeB add flake.nix
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval $flakeA#foo) = 1579 ]]
[[ $(nix eval $flakeA#foo) = 1579 ]]
sed -i $flakeB/flake.nix -e 's/456/789/'
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
# Test list-inputs with circular dependencies
nix flake metadata $flakeA
# Test flake follow paths
mkdir -p $flakeFollowsB
mkdir -p $flakeFollowsC
mkdir -p $flakeFollowsD
mkdir -p $flakeFollowsE
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B = {
url = "path:./flakeB";
inputs.foobar.follows = "foobar";
};
foobar.url = "path:$flakeFollowsA/flakeE";
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsB/flake.nix <<EOF
{
description = "Flake B";
inputs = {
foobar.url = "path:$flakeFollowsA/flakeE";
goodoo.follows = "C/goodoo";
C = {
url = "path:./flakeC";
inputs.foobar.follows = "foobar";
};
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsC/flake.nix <<EOF
{
description = "Flake C";
inputs = {
foobar.url = "path:$flakeFollowsA/flakeE";
goodoo.follows = "foobar";
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsD/flake.nix <<EOF
{
description = "Flake D";
inputs = {};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsE/flake.nix <<EOF
{
description = "Flake E";
inputs = {};
outputs = { ... }: {};
}
EOF
git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
nix flake metadata $flakeFollowsA
nix flake update $flakeFollowsA
oldLock="$(cat "$flakeFollowsA/flake.lock")"
# Ensure that locking twice doesn't change anything
nix flake lock $flakeFollowsA
newLock="$(cat "$flakeFollowsA/flake.lock")"
diff <(echo "$newLock") <(echo "$oldLock")
[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
# Ensure removing follows from flake.nix removes them from the lockfile
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B = {
url = "path:./flakeB";
inputs.nonFlake.follows = "D";
};
D.url = "path:./flakeD";
};
outputs = { ... }: {};
}
EOF
nix flake lock $flakeFollowsA
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
# Ensure a relative path is not allowed to go outside the store path
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B.url = "path:../flakeB";
};
outputs = { ... }: {};
}
EOF
git -C $flakeFollowsA add flake.nix
nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
# Test flake in store does not evaluate
rm -rf $badFlakeDir
mkdir $badFlakeDir

View file

@ -0,0 +1,150 @@
source ./common.sh
requireGit
flakeFollowsA=$TEST_ROOT/follows/flakeA
flakeFollowsB=$TEST_ROOT/follows/flakeA/flakeB
flakeFollowsC=$TEST_ROOT/follows/flakeA/flakeB/flakeC
flakeFollowsD=$TEST_ROOT/follows/flakeA/flakeD
flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
# Test following path flakerefs.
createGitRepo $flakeFollowsA
mkdir -p $flakeFollowsB
mkdir -p $flakeFollowsC
mkdir -p $flakeFollowsD
mkdir -p $flakeFollowsE
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B = {
url = "path:./flakeB";
inputs.foobar.follows = "foobar";
};
foobar.url = "path:$flakeFollowsA/flakeE";
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsB/flake.nix <<EOF
{
description = "Flake B";
inputs = {
foobar.url = "path:$flakeFollowsA/flakeE";
goodoo.follows = "C/goodoo";
C = {
url = "path:./flakeC";
inputs.foobar.follows = "foobar";
};
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsC/flake.nix <<EOF
{
description = "Flake C";
inputs = {
foobar.url = "path:$flakeFollowsA/flakeE";
goodoo.follows = "foobar";
};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsD/flake.nix <<EOF
{
description = "Flake D";
inputs = {};
outputs = { ... }: {};
}
EOF
cat > $flakeFollowsE/flake.nix <<EOF
{
description = "Flake E";
inputs = {};
outputs = { ... }: {};
}
EOF
git -C $flakeFollowsA add flake.nix flakeB/flake.nix \
flakeB/flakeC/flake.nix flakeD/flake.nix flakeE/flake.nix
nix flake metadata $flakeFollowsA
nix flake update $flakeFollowsA
nix flake lock $flakeFollowsA
oldLock="$(cat "$flakeFollowsA/flake.lock")"
# Ensure that locking twice doesn't change anything
nix flake lock $flakeFollowsA
newLock="$(cat "$flakeFollowsA/flake.lock")"
diff <(echo "$newLock") <(echo "$oldLock")
[[ $(jq -c .nodes.B.inputs.C $flakeFollowsA/flake.lock) = '"C"' ]]
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '["foobar"]' ]]
[[ $(jq -c .nodes.C.inputs.foobar $flakeFollowsA/flake.lock) = '["B","foobar"]' ]]
# Ensure removing follows from flake.nix removes them from the lockfile
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B = {
url = "path:./flakeB";
};
D.url = "path:./flakeD";
};
outputs = { ... }: {};
}
EOF
nix flake lock $flakeFollowsA
[[ $(jq -c .nodes.B.inputs.foobar $flakeFollowsA/flake.lock) = '"foobar"' ]]
jq -r -c '.nodes | keys | .[]' $flakeFollowsA/flake.lock | grep "^foobar$"
# Ensure a relative path is not allowed to go outside the store path
cat > $flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs = {
B.url = "path:../flakeB";
};
outputs = { ... }: {};
}
EOF
git -C $flakeFollowsA add flake.nix
nix flake lock $flakeFollowsA 2>&1 | grep 'points outside'
# Non-existant follows should print a warning.
cat >$flakeFollowsA/flake.nix <<EOF
{
description = "Flake A";
inputs.B = {
url = "path:./flakeB";
inputs.invalid.follows = "D";
inputs.invalid2.url = "path:./flakeD";
};
inputs.D.url = "path:./flakeD";
outputs = { ... }: {};
}
EOF
git -C $flakeFollowsA add flake.nix
nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid'"
nix flake lock $flakeFollowsA 2>&1 | grep "warning: input 'B' has an override for a non-existent input 'invalid2'"

87
tests/flakes/init.sh Normal file
View file

@ -0,0 +1,87 @@
source ./common.sh
requireGit
templatesDir=$TEST_ROOT/templates
flakeDir=$TEST_ROOT/flake
nixpkgsDir=$TEST_ROOT/nixpkgs
nix registry add --registry $registry templates git+file://$templatesDir
nix registry add --registry $registry nixpkgs git+file://$nixpkgsDir
createGitRepo $nixpkgsDir
createSimpleGitFlake $nixpkgsDir
# Test 'nix flake init'.
createGitRepo $templatesDir
cat > $templatesDir/flake.nix <<EOF
{
description = "Some templates";
outputs = { self }: {
templates = rec {
trivial = {
path = ./trivial;
description = "A trivial flake";
welcomeText = ''
Welcome to my trivial flake
'';
};
default = trivial;
};
};
}
EOF
mkdir $templatesDir/trivial
cat > $templatesDir/trivial/flake.nix <<EOF
{
description = "A flake for building Hello World";
outputs = { self, nixpkgs }: {
packages.x86_64-linux = rec {
hello = nixpkgs.legacyPackages.x86_64-linux.hello;
default = hello;
};
};
}
EOF
echo a > $templatesDir/trivial/a
echo b > $templatesDir/trivial/b
git -C $templatesDir add flake.nix trivial/
git -C $templatesDir commit -m 'Initial'
nix flake check templates
nix flake show templates
nix flake show templates --json | jq
createGitRepo $flakeDir
(cd $flakeDir && nix flake init)
(cd $flakeDir && nix flake init) # check idempotence
git -C $flakeDir add flake.nix
nix flake check $flakeDir
nix flake show $flakeDir
nix flake show $flakeDir --json | jq
git -C $flakeDir commit -a -m 'Initial'
# Test 'nix flake init' with benign conflicts
createGitRepo "$flakeDir"
echo a > $flakeDir/a
(cd $flakeDir && nix flake init) # check idempotence
# Test 'nix flake init' with conflicts
createGitRepo "$flakeDir"
echo b > $flakeDir/a
pushd $flakeDir
(! nix flake init) |& grep "refusing to overwrite existing file '$flakeDir/a'"
popd
git -C $flakeDir commit -a -m 'Changed'
# Test 'nix flake new'.
rm -rf $flakeDir
nix flake new -t templates#trivial $flakeDir
nix flake new -t templates#trivial $flakeDir # check idempotence
nix flake check $flakeDir

46
tests/flakes/mercurial.sh Normal file
View file

@ -0,0 +1,46 @@
source ./common.sh
if [[ -z $(type -p hg) ]]; then
echo "Mercurial not installed; skipping"
exit 99
fi
flake1Dir=$TEST_ROOT/flake-hg1
mkdir -p $flake1Dir
writeSimpleFlake $flake1Dir
hg init $flake1Dir
nix registry add --registry $registry flake1 hg+file://$flake1Dir
flake2Dir=$TEST_ROOT/flake-hg2
mkdir -p $flake2Dir
writeDependentFlake $flake2Dir
hg init $flake2Dir
hg add $flake1Dir/*
hg commit --config ui.username=foobar@example.org $flake1Dir -m 'Initial commit'
hg add $flake2Dir/flake.nix
hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Initial commit'
nix build -o $TEST_ROOT/result hg+file://$flake2Dir
[[ -e $TEST_ROOT/result/hello ]]
(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
nix eval hg+file://$flake2Dir#expr
nix eval hg+file://$flake2Dir#expr
(! nix eval hg+file://$flake2Dir#expr --no-allow-dirty)
(! nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revision)
hg commit --config ui.username=foobar@example.org $flake2Dir -m 'Add lock file'
nix flake metadata --json hg+file://$flake2Dir --refresh | jq -e -r .revision
nix flake metadata --json hg+file://$flake2Dir
[[ $(nix flake metadata --json hg+file://$flake2Dir | jq -e -r .revCount) = 1 ]]
nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-registries --no-allow-dirty
nix build -o $TEST_ROOT/result hg+file://$flake2Dir --no-use-registries --no-allow-dirty

View file

@ -1,8 +1,8 @@
source common.sh
source ../common.sh
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
cp ./shell-hello.nix ./config.nix $TEST_HOME
cp ../shell-hello.nix ../config.nix $TEST_HOME
cd $TEST_HOME
cat <<EOF > flake.nix

View file

@ -1,15 +1,11 @@
source common.sh
if [[ -z $(type -p git) ]]; then
echo "Git not installed; skipping flake search tests"
exit 99
fi
clearStore
cp ./simple.nix ./simple.builder.sh ./config.nix $TEST_HOME
writeSimpleFlake $TEST_HOME
cd $TEST_HOME
mkdir -p foo/subdir
echo '{ outputs = _: {}; }' > foo/flake.nix
cat <<EOF > flake.nix
{
@ -43,10 +39,12 @@ nix build --override-input foo . || fail "flake should search up directories whe
sed "s,$PWD/foo,$PWD/foo/subdir,g" -i flake.nix
! nix build || fail "flake should not search upwards when part of inputs"
pushd subdir
git init
for i in "${success[@]}" "${failure[@]}"; do
! nix build $i || fail "flake should not search past a git repository"
done
rm -rf .git
popd
if [[ -n $(type -p git) ]]; then
pushd subdir
git init
for i in "${success[@]}" "${failure[@]}"; do
! nix build $i || fail "flake should not search past a git repository"
done
rm -rf .git
popd
fi

View file

@ -18,7 +18,12 @@ cat << EOF > flake.nix
with import ./config.nix;
mkDerivation {
name = "formatter";
buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter";
buildCommand = ''
mkdir -p \$out/bin
echo "#! ${shell}" > \$out/bin/formatter
cat \${./fmt.simple.sh} >> \$out/bin/formatter
chmod +x \$out/bin/formatter
'';
};
};
}

View file

@ -7,7 +7,7 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
let
# Generate a fake root CA and a fake github.com certificate.
# Generate a fake root CA and a fake api.github.com / channels.nixos.org certificate.
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
''
mkdir -p $out
@ -18,7 +18,7 @@ let
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:channels.nixos.org") \
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
'';
@ -67,7 +67,7 @@ makeTest (
name = "github-flakes";
nodes =
{ # Impersonate github.com and api.github.com.
{
github =
{ config, pkgs, ... }:
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
@ -77,12 +77,12 @@ makeTest (
services.httpd.extraConfig = ''
ErrorLog syslog:local6
'';
services.httpd.virtualHosts."github.com" =
services.httpd.virtualHosts."channels.nixos.org" =
{ forceSSL = true;
sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt";
servedDirs =
[ { urlPath = "/NixOS/flake-registry/raw/master";
[ { urlPath = "/";
dir = registry;
}
];
@ -103,13 +103,13 @@ makeTest (
{ config, lib, pkgs, nodes, ... }:
{ virtualisation.writableStore = true;
virtualisation.diskSize = 2048;
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = "experimental-features = nix-command flakes";
environment.systemPackages = [ pkgs.jq ];
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
[ "github.com" "api.github.com" "raw.githubusercontent.com" ];
[ "channels.nixos.org" "api.github.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
};
@ -123,7 +123,7 @@ makeTest (
github.wait_for_unit("httpd.service")
client.succeed("curl -v https://github.com/ >&2")
client.succeed("curl -v https://api.github.com/ >&2")
client.succeed("nix registry list | grep nixpkgs")
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")

View file

@ -5,6 +5,8 @@ export NIX_REMOTE=dummy://
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
nix-instantiate --trace-verbose --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello
(! nix-instantiate --eval -E 'builtins.traceVerbose "Hello" 123' 2>&1 | grep -q Hello)
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello

View file

@ -1,6 +1,12 @@
nix_tests = \
flakes.sh \
flakes-run.sh \
flakes/flakes.sh \
flakes/run.sh \
flakes/mercurial.sh \
flakes/circular.sh \
flakes/init.sh \
flakes/follow-paths.sh \
flakes/bundle.sh \
flakes/check.sh \
ca/gc.sh \
gc.sh \
remote-store.sh \
@ -44,7 +50,7 @@ nix_tests = \
secure-drv-outputs.sh \
restricted.sh \
fetchGitSubmodules.sh \
flake-searching.sh \
flakes/search-root.sh \
ca/duplicate-realisation-in-closure.sh \
readfile-context.sh \
nix-channel.sh \
@ -80,7 +86,7 @@ nix_tests = \
nix-copy-ssh.sh \
post-hook.sh \
function-trace.sh \
flake-local-settings.sh \
flakes/config.sh \
fmt.sh \
eval-store.sh \
why-depends.sh \
@ -102,6 +108,7 @@ nix_tests = \
suggestions.sh \
store-ping.sh \
fetchClosure.sh \
completions.sh \
impure-derivations.sh
ifeq ($(HAVE_LIBCPUID), 1)
@ -114,4 +121,8 @@ tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh $(d)/config.nix $(d)/ca/config.nix
test-deps += tests/common.sh tests/config.nix tests/ca/config.nix tests/plugins/libplugintest.$(SO_EXT)
test-deps += tests/common.sh tests/config.nix tests/ca/config.nix
ifeq ($(BUILD_SHARED_LIBS), 1)
test-deps += tests/plugins/libplugintest.$(SO_EXT)
endif

View file

@ -14,7 +14,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ client =
{ config, lib, pkgs, ... }:
{ virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgA pkgD.drvPath ];
virtualisation.additionalPaths = [ pkgA pkgD.drvPath ];
nix.binaryCaches = lib.mkForce [ ];
};
@ -22,7 +22,7 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; pkgD = pk
{ config, pkgs, ... }:
{ services.openssh.enable = true;
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgB pkgC ];
virtualisation.additionalPaths = [ pkgB pkgC ];
};
};

View file

@ -102,3 +102,11 @@ source <(nix print-dev-env -f "$shellDotNix" shellDrv)
[[ ${arr2[1]} = $'\n' ]]
[[ ${arr2[2]} = $'x\ny' ]]
[[ $(fun) = blabla ]]
# Test nix-shell with ellipsis and no `inNixShell` argument (for backwards compat with old nixpkgs)
cat >$TEST_ROOT/shell-ellipsis.nix <<EOF
{ system ? "x86_64-linux", ... }@args:
assert (!(args ? inNixShell));
(import $shellDotNix { }).shellDrv
EOF
nix-shell $TEST_ROOT/shell-ellipsis.nix --run "true"

View file

@ -5,6 +5,42 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
};
let
nix-fetch = pkgs.writeText "fetch.nix" ''
derivation {
# This derivation is an copy from what is available over at
# nix.git:corepkgs/fetchurl.nix
builder = "builtin:fetchurl";
# We're going to fetch data from the http_dns instance created before
# we expect the content to be the same as the content available there.
# ```
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
# ```
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
outputHashAlgo = "sha256";
outputHashMode = "flat";
name = "example.com";
url = "http://example.com";
unpack = false;
executable = false;
system = "builtin";
preferLocalBuild = true;
impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
];
urls = [ "http://example.com" ];
}
'';
in
makeTest (
rec {
@ -68,40 +104,6 @@ rec {
};
};
nix-fetch = pkgs.writeText "fetch.nix" ''
derivation {
# This derivation is an copy from what is available over at
# nix.git:corepkgs/fetchurl.nix
builder = "builtin:fetchurl";
# We're going to fetch data from the http_dns instance created before
# we expect the content to be the same as the content available there.
# ```
# $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1)
# 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59
# ```
outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59";
outputHashAlgo = "sha256";
outputHashMode = "flat";
name = "example.com";
url = "http://example.com";
unpack = false;
executable = false;
system = "builtin";
preferLocalBuild = true;
impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
];
urls = [ "http://example.com" ];
}
'';
testScript = { nodes, ... }: ''
http_dns.wait_for_unit("nginx")
http_dns.wait_for_open_port(80)

View file

@ -2,6 +2,11 @@ source common.sh
set -o pipefail
if [[ $BUILD_SHARED_LIBS != 1 ]]; then
echo "plugins are not supported"
exit 99
fi
res=$(nix --option setting-set true --option plugin-files $PWD/plugins/libplugintest* eval --expr builtins.anotherNull)
[ "$res"x = "nullx" ]

View file

@ -30,3 +30,5 @@ nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ x = "foo" + "
rm -rf $TEST_ROOT/eval-out
(! nix eval --store dummy:// --write-to $TEST_ROOT/eval-out --expr '{ "." = "bla"; }')
(! nix eval --expr '~/foo')

View file

@ -61,7 +61,7 @@ in
}
];
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ config.system.build.extraUtils ];
virtualisation.additionalPaths = [ config.system.build.extraUtils ];
nix.binaryCaches = lib.mkForce [ ];
programs.ssh.extraConfig = "ConnectTimeout 30";
};

View file

@ -42,6 +42,11 @@ testRepl () {
echo "$replOutput"
echo "$replOutput" | grep -qs "while evaluating the file" \
|| fail "nix repl --show-trace doesn't show the trace"
nix repl "${nixArgs[@]}" --option pure-eval true 2>&1 <<< "builtins.currentSystem" \
| grep "attribute 'currentSystem' missing"
nix repl "${nixArgs[@]}" 2>&1 <<< "builtins.currentSystem" \
| grep "$(nix-instantiate --eval -E 'builtins.currentSystem')"
}
# Simple test, try building a drv
@ -50,15 +55,17 @@ testRepl
testRepl --store "$TEST_ROOT/store?real=$NIX_STORE_DIR"
testReplResponse () {
local response="$(nix repl <<< "$1")"
echo "$response" | grep -qs "$2" \
local commands="$1"; shift
local expectedResponse="$1"; shift
local response="$(nix repl "$@" <<< "$commands")"
echo "$response" | grep -qs "$expectedResponse" \
|| fail "repl command set:
$1
$commands
does not respond with:
$2
$expectedResponse
but with:
@ -71,3 +78,48 @@ testReplResponse '
:a { a = "2"; }
"result: ${a}"
' "result: 2"
testReplResponse '
drvPath
' '".*-simple.drv"' \
$testDir/simple.nix
testReplResponse '
drvPath
' '".*-simple.drv"' \
--file $testDir/simple.nix --experimental-features 'ca-derivations'
testReplResponse '
drvPath
' '".*-simple.drv"' \
--file $testDir/simple.nix --extra-experimental-features 'repl-flake ca-derivations'
mkdir -p flake && cat <<EOF > flake/flake.nix
{
outputs = { self }: {
foo = 1;
bar.baz = 2;
changingThing = "beforeChange";
};
}
EOF
testReplResponse '
foo + baz
' "3" \
./flake ./flake\#bar --experimental-features 'flakes repl-flake'
# Test the `:reload` mechansim with flakes:
# - Eval `./flake#changingThing`
# - Modify the flake
# - Re-eval it
# - Check that the result has changed
replResult=$( (
echo "changingThing"
sleep 1 # Leave the repl the time to eval 'foo'
sed -i 's/beforeChange/afterChange/' flake/flake.nix
echo ":reload"
echo "changingThing"
) | nix repl ./flake --experimental-features 'flakes repl-flake')
echo "$replResult" | grep -qs beforeChange
echo "$replResult" | grep -qs afterChange

View file

@ -28,11 +28,19 @@ nix search -f search.nix '' |grep -q hello
e=$'\x1b' # grep doesn't support \e, \033 or even \x1b
# Multiple overlapping regexes
(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep "$e\[32;1mfoo$e\\[0;1m" | wc -l) == 1 ))
(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep "$e\[32;1mbroken bar$e\\[0m" | wc -l) == 1 ))
(( $(nix search -f search.nix '' 'oo' 'foo' 'oo' | grep -c "$e\[32;1mfoo$e\\[0;1m") == 1 ))
(( $(nix search -f search.nix '' 'broken b' 'en bar' | grep -c "$e\[32;1mbroken bar$e\\[0m") == 1 ))
# Multiple matches
# Searching for 'o' should yield the 'o' in 'broken bar', the 'oo' in foo and 'o' in hello
(( $(nix search -f search.nix '' 'o' | grep -Eo "$e\[32;1mo{1,2}$e\[(0|0;1)m" | wc -l) == 3 ))
(( $(nix search -f search.nix '' 'o' | grep -Eoc "$e\[32;1mo{1,2}$e\[(0|0;1)m") == 3 ))
# Searching for 'b' should yield the 'b' in bar and the two 'b's in 'broken bar'
# NOTE: This does not work with `grep -c` because it counts the two 'b's in 'broken bar' as one matched line
(( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 ))
## Tests for --exclude
(( $(nix search -f search.nix -e hello | grep -c hello) == 0 ))
(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 ))
(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 ))
[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]]

View file

@ -10,12 +10,12 @@ with import (nixpkgs + "/nixos/lib/testing-python.nix") {
makeTest {
name = "setuid";
machine =
nodes.machine =
{ config, lib, pkgs, ... }:
{ virtualisation.writableStore = true;
nix.binaryCaches = lib.mkForce [ ];
nix.nixPath = [ "nixpkgs=${lib.cleanSource pkgs.path}" ];
virtualisation.pathsInNixDB = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
virtualisation.additionalPaths = [ pkgs.stdenv pkgs.pkgsi686Linux.stdenv ];
};
testScript = { nodes }: ''

View file

@ -59,7 +59,7 @@ let
echo 'ref: refs/heads/master' > $out/HEAD
mkdir -p $out/info
echo -e '${nixpkgs.rev}\trefs/heads/master' > $out/info/refs
echo -e '${nixpkgs.rev}\trefs/heads/master\n${nixpkgs.rev}\trefs/tags/foo-bar' > $out/info/refs
'';
in
@ -106,7 +106,7 @@ makeTest (
{
virtualisation.writableStore = true;
virtualisation.diskSize = 2048;
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
virtualisation.additionalPaths = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = ''
@ -132,6 +132,17 @@ makeTest (
client.succeed("curl -v https://git.sr.ht/ >&2")
client.succeed("nix registry list | grep nixpkgs")
# Test that it resolves HEAD
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Test that it resolves branches
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/master --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Test that it resolves tags
rev = client.succeed("nix flake info sourcehut:~NixOS/nixpkgs/foo-bar --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"
# Registry and pinning test
rev = client.succeed("nix flake info nixpkgs --json | jq -r .revision")
assert rev.strip() == "${nixpkgs.rev}", "revision mismatch"