Merge branch 'master' into lto
This commit is contained in:
commit
b092afe77d
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -15,7 +15,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v0.0.7
|
uses: zeebe-io/backport-action@v0.0.8
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -100,7 +100,7 @@ jobs:
|
||||||
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
|
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
|
||||||
- run: docker tag nix:$NIX_VERSION nixos/nix:master
|
- run: docker tag nix:$NIX_VERSION nixos/nix:master
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -35,6 +35,7 @@ perl/Makefile.config
|
||||||
/src/libexpr/parser-tab.hh
|
/src/libexpr/parser-tab.hh
|
||||||
/src/libexpr/parser-tab.output
|
/src/libexpr/parser-tab.output
|
||||||
/src/libexpr/nix.tbl
|
/src/libexpr/nix.tbl
|
||||||
|
/src/libexpr/tests/libexpr-tests
|
||||||
|
|
||||||
# /src/libstore/
|
# /src/libstore/
|
||||||
*.gen.*
|
*.gen.*
|
||||||
|
|
1
Makefile
1
Makefile
|
@ -8,6 +8,7 @@ makefiles = \
|
||||||
src/libfetchers/local.mk \
|
src/libfetchers/local.mk \
|
||||||
src/libmain/local.mk \
|
src/libmain/local.mk \
|
||||||
src/libexpr/local.mk \
|
src/libexpr/local.mk \
|
||||||
|
src/libexpr/tests/local.mk \
|
||||||
src/libcmd/local.mk \
|
src/libcmd/local.mk \
|
||||||
src/nix/local.mk \
|
src/nix/local.mk \
|
||||||
src/resolve-system-dependencies/local.mk \
|
src/resolve-system-dependencies/local.mk \
|
||||||
|
|
11
configure.ac
11
configure.ac
|
@ -308,6 +308,17 @@ esac
|
||||||
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
|
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
|
||||||
sandbox_shell=$withval)
|
sandbox_shell=$withval)
|
||||||
AC_SUBST(sandbox_shell)
|
AC_SUBST(sandbox_shell)
|
||||||
|
if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
|
||||||
|
AC_MSG_CHECKING([whether sandbox-shell has the standalone feature])
|
||||||
|
# busybox shell sometimes allows executing other busybox applets,
|
||||||
|
# even if they are not in the path, breaking our sandbox
|
||||||
|
if PATH= $sandbox_shell -c "busybox" 2>&1 | grep -qv "not found"; then
|
||||||
|
AC_MSG_RESULT(enabled)
|
||||||
|
AC_MSG_ERROR([Please disable busybox FEATURE_SH_STANDALONE])
|
||||||
|
else
|
||||||
|
AC_MSG_RESULT(disabled)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
# Expand all variables in config.status.
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||||
|
|
|
@ -71,18 +71,6 @@ To install it in `$(pwd)/outputs` and test it:
|
||||||
nix (Nix) 3.0
|
nix (Nix) 3.0
|
||||||
```
|
```
|
||||||
|
|
||||||
To run a functional test:
|
|
||||||
|
|
||||||
```console
|
|
||||||
make tests/test-name-should-auto-complete.sh.test
|
|
||||||
```
|
|
||||||
|
|
||||||
To run the unit-tests for C++ code:
|
|
||||||
|
|
||||||
```
|
|
||||||
make check
|
|
||||||
```
|
|
||||||
|
|
||||||
If you have a flakes-enabled Nix you can replace:
|
If you have a flakes-enabled Nix you can replace:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -94,3 +82,29 @@ by:
|
||||||
```console
|
```console
|
||||||
$ nix develop
|
$ nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
Nix comes with three different flavors of tests: unit, functional and integration.
|
||||||
|
|
||||||
|
### Unit-tests
|
||||||
|
|
||||||
|
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||||
|
under `src/{library_name}/tests` using the
|
||||||
|
[googletest](https://google.github.io/googletest/) framework.
|
||||||
|
|
||||||
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||||
|
|
||||||
|
### Functional tests
|
||||||
|
|
||||||
|
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
||||||
|
The whole testsuite can be run with `make install && make installcheck`.
|
||||||
|
Individual tests can be run with `make tests/{testName}.sh.test`.
|
||||||
|
|
||||||
|
### Integration tests
|
||||||
|
|
||||||
|
The integration tests are defined in the Nix flake under the `hydraJobs.tests` attribute.
|
||||||
|
These tests include everything that needs to interact with external services or run Nix in a non-trivial distributed setup.
|
||||||
|
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
||||||
|
|
||||||
|
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
* Nix now provides better integration with zsh's run-help feature. It is now
|
||||||
|
included in the Nix installation in the form of an autoloadable shell
|
||||||
|
function, run-help-nix. It picks up Nix subcommands from the currently typed
|
||||||
|
in command and directs the user to the associated man pages.
|
||||||
|
|
||||||
* `nix repl` has a new build-'n-link (`:bl`) command that builds a derivation
|
* `nix repl` has a new build-'n-link (`:bl`) command that builds a derivation
|
||||||
while creating GC root symlinks.
|
while creating GC root symlinks.
|
||||||
|
|
||||||
|
@ -12,3 +17,13 @@
|
||||||
|
|
||||||
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
|
* Nix can now be built with LTO by passing `--enable-lto` to `configure`.
|
||||||
LTO is currently only supported when building with GCC.
|
LTO is currently only supported when building with GCC.
|
||||||
|
|
||||||
|
* You can now specify which outputs of a derivation `nix` should
|
||||||
|
operate on using the syntax `installable^outputs`,
|
||||||
|
e.g. `nixpkgs#glibc^dev,static` or `nixpkgs#glibc^*`. By default,
|
||||||
|
`nix` will use the outputs specified by the derivation's
|
||||||
|
`meta.outputsToInstall` attribute if it exists, or all outputs
|
||||||
|
otherwise.
|
||||||
|
|
||||||
|
Selecting derivation outputs using the attribute selection syntax
|
||||||
|
(e.g. `nixpkgs#glibc.dev`) no longer works.
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
|
|
||||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" ];
|
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
|
||||||
|
|
||||||
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
||||||
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
||||||
|
|
|
@ -9,6 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
|
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
|
||||||
KillMode=process
|
KillMode=process
|
||||||
|
LimitNOFILE=4096
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
$(eval $(call install-file-as, $(d)/completion.zsh, $(datarootdir)/zsh/site-functions/_nix, 0644))
|
$(eval $(call install-file-as, $(d)/completion.zsh, $(datarootdir)/zsh/site-functions/_nix, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/run-help-nix, $(datarootdir)/zsh/site-functions/run-help-nix, 0644))
|
||||||
|
|
42
misc/zsh/run-help-nix
Normal file
42
misc/zsh/run-help-nix
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
emulate -L zsh
|
||||||
|
|
||||||
|
# run-help is a zsh widget that can be bound to a key. It mainly looks up the
|
||||||
|
# man page for the currently typed in command.
|
||||||
|
#
|
||||||
|
# Although run-help works for any command without requiring special support,
|
||||||
|
# it can only deduce the right man page based solely on the name of the
|
||||||
|
# command. Programs like Nix provide better integration with run-help by
|
||||||
|
# helping zsh identify Nix subcommands and their corresponding man pages. This
|
||||||
|
# is what this function does.
|
||||||
|
#
|
||||||
|
# To actually use run-help on zsh, place the following lines in your .zshrc:
|
||||||
|
#
|
||||||
|
# (( $+aliases[run-help] )) && unalias run-help
|
||||||
|
# autoload -Uz run-help run-help-nix
|
||||||
|
#
|
||||||
|
# Then also assign run-help to any key of choice:
|
||||||
|
#
|
||||||
|
# bindkey '^[h' run-help
|
||||||
|
|
||||||
|
while [[ "$#" != 0 && "$1" == -* ]]; do
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
local -a subcommands; subcommands=( nix3 )
|
||||||
|
|
||||||
|
local arg
|
||||||
|
for arg in "$@"; do
|
||||||
|
if man -w "${(j:-:)subcommands}-$arg" >/dev/null 2>&1; then
|
||||||
|
subcommands+="$arg"
|
||||||
|
else
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if (( $#subcommands > 1 )); then
|
||||||
|
man "${(j:-:)subcommands}"
|
||||||
|
else
|
||||||
|
man nix
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $?
|
|
@ -442,9 +442,13 @@ add_nix_vol_fstab_line() {
|
||||||
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
||||||
shift
|
shift
|
||||||
|
|
||||||
# wrap `ex` to work around a problem with vim plugins breaking exit codes;
|
# wrap `ex` to work around a problem with vim plugins breaking exit codes
|
||||||
# (see https://github.com/NixOS/nix/issues/5468)
|
# (see github.com/NixOS/nix/issues/5468)
|
||||||
# we'd prefer EDITOR="/usr/bin/ex --noplugin" but vifs doesn't word-split
|
#
|
||||||
|
# the first draft used `--noplugin`, but github.com/NixOS/nix/issues/6462
|
||||||
|
# suggests we need the less-semantic `-u NONE`
|
||||||
|
#
|
||||||
|
# we'd prefer EDITOR="/usr/bin/ex -u NONE" but vifs doesn't word-split
|
||||||
# the EDITOR env.
|
# the EDITOR env.
|
||||||
#
|
#
|
||||||
# TODO: at some point we should switch to `--clean`, but it wasn't added
|
# TODO: at some point we should switch to `--clean`, but it wasn't added
|
||||||
|
@ -452,7 +456,7 @@ add_nix_vol_fstab_line() {
|
||||||
# minver 10.12.6 seems to have released with vim 7.4
|
# minver 10.12.6 seems to have released with vim 7.4
|
||||||
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
|
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
/usr/bin/ex --noplugin "\$@"
|
/usr/bin/ex -u NONE "\$@"
|
||||||
EOF
|
EOF
|
||||||
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
|
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
|
||||||
|
|
||||||
|
@ -646,8 +650,9 @@ EOF
|
||||||
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
|
task "Configuring /etc/synthetic.conf to make a mount-point at $NIX_ROOT" >&2
|
||||||
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
||||||
# but handling both takes even more code...
|
# but handling both takes even more code...
|
||||||
|
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier
|
||||||
_sudo "to add Nix to /etc/synthetic.conf" \
|
_sudo "to add Nix to /etc/synthetic.conf" \
|
||||||
/usr/bin/ex --noplugin /etc/synthetic.conf <<EOF
|
/usr/bin/ex -u NONE /etc/synthetic.conf <<EOF
|
||||||
:a
|
:a
|
||||||
${NIX_ROOT:1}
|
${NIX_ROOT:1}
|
||||||
.
|
.
|
||||||
|
@ -815,7 +820,8 @@ setup_volume_daemon() {
|
||||||
local volume_uuid="$2"
|
local volume_uuid="$2"
|
||||||
if ! test_voldaemon; then
|
if ! test_voldaemon; then
|
||||||
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
||||||
_sudo "to install the Nix volume mounter" /usr/bin/ex --noplugin "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
# Note: `-u NONE` disables vim plugins/rc; see note on --clean earlier
|
||||||
|
_sudo "to install the Nix volume mounter" /usr/bin/ex -u NONE "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
||||||
:a
|
:a
|
||||||
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
||||||
.
|
.
|
||||||
|
|
|
@ -291,7 +291,7 @@ void completeFlakeRefWithFragment(
|
||||||
|
|
||||||
std::string lastAttr;
|
std::string lastAttr;
|
||||||
if (!attrPath.empty() && !hasSuffix(attrPathS, ".")) {
|
if (!attrPath.empty() && !hasSuffix(attrPathS, ".")) {
|
||||||
lastAttr = attrPath.back();
|
lastAttr = evalState->symbols[attrPath.back()];
|
||||||
attrPath.pop_back();
|
attrPath.pop_back();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -299,11 +299,11 @@ void completeFlakeRefWithFragment(
|
||||||
if (!attr) continue;
|
if (!attr) continue;
|
||||||
|
|
||||||
for (auto & attr2 : (*attr)->getAttrs()) {
|
for (auto & attr2 : (*attr)->getAttrs()) {
|
||||||
if (hasPrefix(attr2, lastAttr)) {
|
if (hasPrefix(evalState->symbols[attr2], lastAttr)) {
|
||||||
auto attrPath2 = (*attr)->getAttrPath(attr2);
|
auto attrPath2 = (*attr)->getAttrPath(attr2);
|
||||||
/* Strip the attrpath prefix. */
|
/* Strip the attrpath prefix. */
|
||||||
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
|
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
|
||||||
completions->add(flakeRefS + "#" + concatStringsSep(".", attrPath2));
|
completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -440,9 +440,7 @@ DerivedPaths InstallableValue::toDerivedPaths()
|
||||||
|
|
||||||
// Group by derivation, helps with .all in particular
|
// Group by derivation, helps with .all in particular
|
||||||
for (auto & drv : toDerivations()) {
|
for (auto & drv : toDerivations()) {
|
||||||
auto outputName = drv.outputName;
|
for (auto & outputName : drv.outputsToInstall)
|
||||||
if (outputName == "")
|
|
||||||
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath));
|
|
||||||
drvsToOutputs[drv.drvPath].insert(outputName);
|
drvsToOutputs[drv.drvPath].insert(outputName);
|
||||||
drvsToCopy.insert(drv.drvPath);
|
drvsToCopy.insert(drv.drvPath);
|
||||||
}
|
}
|
||||||
|
@ -466,9 +464,19 @@ struct InstallableAttrPath : InstallableValue
|
||||||
SourceExprCommand & cmd;
|
SourceExprCommand & cmd;
|
||||||
RootValue v;
|
RootValue v;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
|
OutputsSpec outputsSpec;
|
||||||
|
|
||||||
InstallableAttrPath(ref<EvalState> state, SourceExprCommand & cmd, Value * v, const std::string & attrPath)
|
InstallableAttrPath(
|
||||||
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
ref<EvalState> state,
|
||||||
|
SourceExprCommand & cmd,
|
||||||
|
Value * v,
|
||||||
|
const std::string & attrPath,
|
||||||
|
OutputsSpec outputsSpec)
|
||||||
|
: InstallableValue(state)
|
||||||
|
, cmd(cmd)
|
||||||
|
, v(allocRootValue(v))
|
||||||
|
, attrPath(attrPath)
|
||||||
|
, outputsSpec(std::move(outputsSpec))
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
std::string what() const override { return attrPath; }
|
std::string what() const override { return attrPath; }
|
||||||
|
@ -497,7 +505,19 @@ std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations
|
||||||
auto drvPath = drvInfo.queryDrvPath();
|
auto drvPath = drvInfo.queryDrvPath();
|
||||||
if (!drvPath)
|
if (!drvPath)
|
||||||
throw Error("'%s' is not a derivation", what());
|
throw Error("'%s' is not a derivation", what());
|
||||||
res.push_back({ *drvPath, drvInfo.queryOutputName() });
|
|
||||||
|
std::set<std::string> outputsToInstall;
|
||||||
|
|
||||||
|
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||||
|
outputsToInstall = *outputNames;
|
||||||
|
else
|
||||||
|
for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
|
||||||
|
outputsToInstall.insert(output.first);
|
||||||
|
|
||||||
|
res.push_back(DerivationInfo {
|
||||||
|
.drvPath = *drvPath,
|
||||||
|
.outputsToInstall = std::move(outputsToInstall)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
@ -574,6 +594,7 @@ InstallableFlake::InstallableFlake(
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
|
OutputsSpec outputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags)
|
const flake::LockFlags & lockFlags)
|
||||||
|
@ -581,6 +602,7 @@ InstallableFlake::InstallableFlake(
|
||||||
flakeRef(flakeRef),
|
flakeRef(flakeRef),
|
||||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||||
|
outputsSpec(std::move(outputsSpec)),
|
||||||
lockFlags(lockFlags)
|
lockFlags(lockFlags)
|
||||||
{
|
{
|
||||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||||
|
@ -598,9 +620,34 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
||||||
|
|
||||||
auto drvPath = attr->forceDerivation();
|
auto drvPath = attr->forceDerivation();
|
||||||
|
|
||||||
|
std::set<std::string> outputsToInstall;
|
||||||
|
std::optional<NixInt> priority;
|
||||||
|
|
||||||
|
if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||||
|
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||||
|
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||||
|
outputsToInstall.insert(s);
|
||||||
|
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||||
|
priority = aPriority->getInt();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
|
||||||
|
outputsToInstall.clear();
|
||||||
|
if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
|
||||||
|
for (auto & s : aOutputs->getListOfStrings())
|
||||||
|
outputsToInstall.insert(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (outputsToInstall.empty())
|
||||||
|
outputsToInstall.insert("out");
|
||||||
|
|
||||||
|
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||||
|
outputsToInstall = *outputNames;
|
||||||
|
|
||||||
auto drvInfo = DerivationInfo {
|
auto drvInfo = DerivationInfo {
|
||||||
std::move(drvPath),
|
.drvPath = std::move(drvPath),
|
||||||
attr->getAttr("outputName")->getString()
|
.outputsToInstall = std::move(outputsToInstall),
|
||||||
|
.priority = priority,
|
||||||
};
|
};
|
||||||
|
|
||||||
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
||||||
|
@ -723,8 +770,14 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & s : ss)
|
for (auto & s : ss) {
|
||||||
result.push_back(std::make_shared<InstallableAttrPath>(state, *this, vFile, s == "." ? "" : s));
|
auto [prefix, outputsSpec] = parseOutputsSpec(s);
|
||||||
|
result.push_back(
|
||||||
|
std::make_shared<InstallableAttrPath>(
|
||||||
|
state, *this, vFile,
|
||||||
|
prefix == "." ? "" : prefix,
|
||||||
|
outputsSpec));
|
||||||
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
|
@ -743,12 +796,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
|
auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
|
||||||
result.push_back(std::make_shared<InstallableFlake>(
|
result.push_back(std::make_shared<InstallableFlake>(
|
||||||
this,
|
this,
|
||||||
getEvalState(),
|
getEvalState(),
|
||||||
std::move(flakeRef),
|
std::move(flakeRef),
|
||||||
fragment,
|
fragment,
|
||||||
|
outputsSpec,
|
||||||
getDefaultFlakeAttrPaths(),
|
getDefaultFlakeAttrPaths(),
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
lockFlags));
|
lockFlags));
|
||||||
|
@ -822,12 +876,13 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
||||||
for (auto & output : bfd.outputs) {
|
for (auto & output : bfd.outputs) {
|
||||||
if (!outputHashes.count(output))
|
auto outputHash = get(outputHashes, output);
|
||||||
|
if (!outputHash)
|
||||||
throw Error(
|
throw Error(
|
||||||
"the derivation '%s' doesn't have an output named '%s'",
|
"the derivation '%s' doesn't have an output named '%s'",
|
||||||
store->printStorePath(bfd.drvPath), output);
|
store->printStorePath(bfd.drvPath), output);
|
||||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||||
DrvOutput outputId { outputHashes.at(output), output };
|
DrvOutput outputId { *outputHash, output };
|
||||||
auto realisation = store->queryRealisation(outputId);
|
auto realisation = store->queryRealisation(outputId);
|
||||||
if (!realisation)
|
if (!realisation)
|
||||||
throw Error(
|
throw Error(
|
||||||
|
@ -838,10 +893,11 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
} else {
|
} else {
|
||||||
// If ca-derivations isn't enabled, assume that
|
// If ca-derivations isn't enabled, assume that
|
||||||
// the output path is statically known.
|
// the output path is statically known.
|
||||||
assert(drvOutputs.count(output));
|
auto drvOutput = get(drvOutputs, output);
|
||||||
assert(drvOutputs.at(output).second);
|
assert(drvOutput);
|
||||||
|
assert(drvOutput->second);
|
||||||
outputs.insert_or_assign(
|
outputs.insert_or_assign(
|
||||||
output, *drvOutputs.at(output).second);
|
output, *drvOutput->second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
||||||
|
|
|
@ -141,7 +141,8 @@ struct InstallableValue : Installable
|
||||||
struct DerivationInfo
|
struct DerivationInfo
|
||||||
{
|
{
|
||||||
StorePath drvPath;
|
StorePath drvPath;
|
||||||
std::string outputName;
|
std::set<std::string> outputsToInstall;
|
||||||
|
std::optional<NixInt> priority;
|
||||||
};
|
};
|
||||||
|
|
||||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||||
|
@ -156,6 +157,7 @@ struct InstallableFlake : InstallableValue
|
||||||
FlakeRef flakeRef;
|
FlakeRef flakeRef;
|
||||||
Strings attrPaths;
|
Strings attrPaths;
|
||||||
Strings prefixes;
|
Strings prefixes;
|
||||||
|
OutputsSpec outputsSpec;
|
||||||
const flake::LockFlags & lockFlags;
|
const flake::LockFlags & lockFlags;
|
||||||
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
||||||
|
|
||||||
|
@ -164,6 +166,7 @@ struct InstallableFlake : InstallableValue
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
|
OutputsSpec outputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags);
|
const flake::LockFlags & lockFlags);
|
||||||
|
|
|
@ -9,10 +9,12 @@ namespace nix {
|
||||||
|
|
||||||
std::string renderMarkdownToTerminal(std::string_view markdown)
|
std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||||
{
|
{
|
||||||
|
int windowWidth = getWindowSize().second;
|
||||||
|
|
||||||
struct lowdown_opts opts {
|
struct lowdown_opts opts {
|
||||||
.type = LOWDOWN_TERM,
|
.type = LOWDOWN_TERM,
|
||||||
.maxdepth = 20,
|
.maxdepth = 20,
|
||||||
.cols = std::max(getWindowSize().second, (unsigned short) 80),
|
.cols = (size_t) std::max(windowWidth - 5, 60),
|
||||||
.hmargin = 0,
|
.hmargin = 0,
|
||||||
.vmargin = 0,
|
.vmargin = 0,
|
||||||
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
|
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
|
||||||
|
|
|
@ -36,7 +36,7 @@ std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
|
||||||
{
|
{
|
||||||
std::vector<Symbol> res;
|
std::vector<Symbol> res;
|
||||||
for (auto & a : parseAttrPath(s))
|
for (auto & a : parseAttrPath(s))
|
||||||
res.emplace_back(a);
|
res.push_back(state.symbols.create(a));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ Bindings * EvalState::allocBindings(size_t capacity)
|
||||||
/* Create a new attribute named 'name' on an existing attribute set stored
|
/* Create a new attribute named 'name' on an existing attribute set stored
|
||||||
in 'vAttrs' and return the newly allocated Value which is associated with
|
in 'vAttrs' and return the newly allocated Value which is associated with
|
||||||
this attribute. */
|
this attribute. */
|
||||||
Value * EvalState::allocAttr(Value & vAttrs, const SymbolIdx & name)
|
Value * EvalState::allocAttr(Value & vAttrs, Symbol name)
|
||||||
{
|
{
|
||||||
Value * v = allocValue();
|
Value * v = allocValue();
|
||||||
vAttrs.attrs->push_back(Attr(name, v));
|
vAttrs.attrs->push_back(Attr(name, v));
|
||||||
|
@ -40,7 +40,7 @@ Value * EvalState::allocAttr(Value & vAttrs, std::string_view name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Value & BindingsBuilder::alloc(const SymbolIdx & name, PosIdx pos)
|
Value & BindingsBuilder::alloc(Symbol name, PosIdx pos)
|
||||||
{
|
{
|
||||||
auto value = state.allocValue();
|
auto value = state.allocValue();
|
||||||
bindings->push_back(Attr(name, value, pos));
|
bindings->push_back(Attr(name, value, pos));
|
||||||
|
|
|
@ -19,10 +19,10 @@ struct Attr
|
||||||
both of them are uint32 wrappers, they are next to each other
|
both of them are uint32 wrappers, they are next to each other
|
||||||
to make sure that Attr has no padding on 64 bit machines. that
|
to make sure that Attr has no padding on 64 bit machines. that
|
||||||
way we keep Attr size at two words with no wasted space. */
|
way we keep Attr size at two words with no wasted space. */
|
||||||
SymbolIdx name;
|
Symbol name;
|
||||||
PosIdx pos;
|
PosIdx pos;
|
||||||
Value * value;
|
Value * value;
|
||||||
Attr(SymbolIdx name, Value * value, PosIdx pos = noPos)
|
Attr(Symbol name, Value * value, PosIdx pos = noPos)
|
||||||
: name(name), pos(pos), value(value) { };
|
: name(name), pos(pos), value(value) { };
|
||||||
Attr() { };
|
Attr() { };
|
||||||
bool operator < (const Attr & a) const
|
bool operator < (const Attr & a) const
|
||||||
|
@ -66,7 +66,7 @@ public:
|
||||||
attrs[size_++] = attr;
|
attrs[size_++] = attr;
|
||||||
}
|
}
|
||||||
|
|
||||||
iterator find(const SymbolIdx & name)
|
iterator find(Symbol name)
|
||||||
{
|
{
|
||||||
Attr key(name, 0);
|
Attr key(name, 0);
|
||||||
iterator i = std::lower_bound(begin(), end(), key);
|
iterator i = std::lower_bound(begin(), end(), key);
|
||||||
|
@ -74,7 +74,7 @@ public:
|
||||||
return end();
|
return end();
|
||||||
}
|
}
|
||||||
|
|
||||||
Attr * get(const SymbolIdx & name)
|
Attr * get(Symbol name)
|
||||||
{
|
{
|
||||||
Attr key(name, 0);
|
Attr key(name, 0);
|
||||||
iterator i = std::lower_bound(begin(), end(), key);
|
iterator i = std::lower_bound(begin(), end(), key);
|
||||||
|
@ -128,7 +128,7 @@ public:
|
||||||
: bindings(bindings), state(state)
|
: bindings(bindings), state(state)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
void insert(SymbolIdx name, Value * value, PosIdx pos = noPos)
|
void insert(Symbol name, Value * value, PosIdx pos = noPos)
|
||||||
{
|
{
|
||||||
insert(Attr(name, value, pos));
|
insert(Attr(name, value, pos));
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ public:
|
||||||
bindings->push_back(attr);
|
bindings->push_back(attr);
|
||||||
}
|
}
|
||||||
|
|
||||||
Value & alloc(const SymbolIdx & name, PosIdx pos = noPos);
|
Value & alloc(Symbol name, PosIdx pos = noPos);
|
||||||
|
|
||||||
Value & alloc(std::string_view name, PosIdx pos = noPos);
|
Value & alloc(std::string_view name, PosIdx pos = noPos);
|
||||||
|
|
||||||
|
|
|
@ -35,13 +35,19 @@ struct AttrDb
|
||||||
|
|
||||||
std::unique_ptr<Sync<State>> _state;
|
std::unique_ptr<Sync<State>> _state;
|
||||||
|
|
||||||
AttrDb(const Store & cfg, const Hash & fingerprint)
|
SymbolTable & symbols;
|
||||||
|
|
||||||
|
AttrDb(
|
||||||
|
const Store & cfg,
|
||||||
|
const Hash & fingerprint,
|
||||||
|
SymbolTable & symbols)
|
||||||
: cfg(cfg)
|
: cfg(cfg)
|
||||||
, _state(std::make_unique<Sync<State>>())
|
, _state(std::make_unique<Sync<State>>())
|
||||||
|
, symbols(symbols)
|
||||||
{
|
{
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/eval-cache-v2";
|
Path cacheDir = getCacheDir() + "/nix/eval-cache-v4";
|
||||||
createDirs(cacheDir);
|
createDirs(cacheDir);
|
||||||
|
|
||||||
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
||||||
|
@ -100,7 +106,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::FullAttrs)
|
(AttrType::FullAttrs)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -110,7 +116,7 @@ struct AttrDb
|
||||||
for (auto & attr : attrs)
|
for (auto & attr : attrs)
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(rowId)
|
(rowId)
|
||||||
(attr)
|
(symbols[attr])
|
||||||
(AttrType::Placeholder)
|
(AttrType::Placeholder)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -135,14 +141,14 @@ struct AttrDb
|
||||||
}
|
}
|
||||||
state->insertAttributeWithContext.use()
|
state->insertAttributeWithContext.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::String)
|
(AttrType::String)
|
||||||
(s)
|
(s)
|
||||||
(ctx).exec();
|
(ctx).exec();
|
||||||
} else {
|
} else {
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::String)
|
(AttrType::String)
|
||||||
(s).exec();
|
(s).exec();
|
||||||
}
|
}
|
||||||
|
@ -161,7 +167,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::Bool)
|
(AttrType::Bool)
|
||||||
(b ? 1 : 0).exec();
|
(b ? 1 : 0).exec();
|
||||||
|
|
||||||
|
@ -169,6 +175,42 @@ struct AttrDb
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AttrId setInt(
|
||||||
|
AttrKey key,
|
||||||
|
int n)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(symbols[key.second])
|
||||||
|
(AttrType::Int)
|
||||||
|
(n).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setListOfStrings(
|
||||||
|
AttrKey key,
|
||||||
|
const std::vector<std::string> & l)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(symbols[key.second])
|
||||||
|
(AttrType::ListOfStrings)
|
||||||
|
(concatStringsSep("\t", l)).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
AttrId setPlaceholder(AttrKey key)
|
AttrId setPlaceholder(AttrKey key)
|
||||||
{
|
{
|
||||||
return doSQLite([&]()
|
return doSQLite([&]()
|
||||||
|
@ -177,7 +219,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::Placeholder)
|
(AttrType::Placeholder)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -193,7 +235,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::Missing)
|
(AttrType::Missing)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -209,7 +251,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::Misc)
|
(AttrType::Misc)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -225,7 +267,7 @@ struct AttrDb
|
||||||
|
|
||||||
state->insertAttribute.use()
|
state->insertAttribute.use()
|
||||||
(key.first)
|
(key.first)
|
||||||
(key.second)
|
(symbols[key.second])
|
||||||
(AttrType::Failed)
|
(AttrType::Failed)
|
||||||
(0, false).exec();
|
(0, false).exec();
|
||||||
|
|
||||||
|
@ -233,13 +275,11 @@ struct AttrDb
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<std::pair<AttrId, AttrValue>> getAttr(
|
std::optional<std::pair<AttrId, AttrValue>> getAttr(AttrKey key)
|
||||||
AttrKey key,
|
|
||||||
SymbolTable & symbols)
|
|
||||||
{
|
{
|
||||||
auto state(_state->lock());
|
auto state(_state->lock());
|
||||||
|
|
||||||
auto queryAttribute(state->queryAttribute.use()(key.first)(key.second));
|
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
|
||||||
if (!queryAttribute.next()) return {};
|
if (!queryAttribute.next()) return {};
|
||||||
|
|
||||||
auto rowId = (AttrType) queryAttribute.getInt(0);
|
auto rowId = (AttrType) queryAttribute.getInt(0);
|
||||||
|
@ -253,7 +293,7 @@ struct AttrDb
|
||||||
std::vector<Symbol> attrs;
|
std::vector<Symbol> attrs;
|
||||||
auto queryAttributes(state->queryAttributes.use()(rowId));
|
auto queryAttributes(state->queryAttributes.use()(rowId));
|
||||||
while (queryAttributes.next())
|
while (queryAttributes.next())
|
||||||
attrs.emplace_back(queryAttributes.getStr(0));
|
attrs.emplace_back(symbols.create(queryAttributes.getStr(0)));
|
||||||
return {{rowId, attrs}};
|
return {{rowId, attrs}};
|
||||||
}
|
}
|
||||||
case AttrType::String: {
|
case AttrType::String: {
|
||||||
|
@ -265,6 +305,10 @@ struct AttrDb
|
||||||
}
|
}
|
||||||
case AttrType::Bool:
|
case AttrType::Bool:
|
||||||
return {{rowId, queryAttribute.getInt(2) != 0}};
|
return {{rowId, queryAttribute.getInt(2) != 0}};
|
||||||
|
case AttrType::Int:
|
||||||
|
return {{rowId, int_t{queryAttribute.getInt(2)}}};
|
||||||
|
case AttrType::ListOfStrings:
|
||||||
|
return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}};
|
||||||
case AttrType::Missing:
|
case AttrType::Missing:
|
||||||
return {{rowId, missing_t()}};
|
return {{rowId, missing_t()}};
|
||||||
case AttrType::Misc:
|
case AttrType::Misc:
|
||||||
|
@ -277,10 +321,13 @@ struct AttrDb
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static std::shared_ptr<AttrDb> makeAttrDb(const Store & cfg, const Hash & fingerprint)
|
static std::shared_ptr<AttrDb> makeAttrDb(
|
||||||
|
const Store & cfg,
|
||||||
|
const Hash & fingerprint,
|
||||||
|
SymbolTable & symbols)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
return std::make_shared<AttrDb>(cfg, fingerprint);
|
return std::make_shared<AttrDb>(cfg, fingerprint, symbols);
|
||||||
} catch (SQLiteError &) {
|
} catch (SQLiteError &) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -291,7 +338,7 @@ EvalCache::EvalCache(
|
||||||
std::optional<std::reference_wrapper<const Hash>> useCache,
|
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
RootLoader rootLoader)
|
RootLoader rootLoader)
|
||||||
: db(useCache ? makeAttrDb(*state.store, *useCache) : nullptr)
|
: db(useCache ? makeAttrDb(*state.store, *useCache, state.symbols) : nullptr)
|
||||||
, state(state)
|
, state(state)
|
||||||
, rootLoader(rootLoader)
|
, rootLoader(rootLoader)
|
||||||
{
|
{
|
||||||
|
@ -325,10 +372,9 @@ AttrCursor::AttrCursor(
|
||||||
AttrKey AttrCursor::getKey()
|
AttrKey AttrCursor::getKey()
|
||||||
{
|
{
|
||||||
if (!parent)
|
if (!parent)
|
||||||
return {0, {""}};
|
return {0, root->state.sEpsilon};
|
||||||
if (!parent->first->cachedValue) {
|
if (!parent->first->cachedValue) {
|
||||||
parent->first->cachedValue = root->db->getAttr(
|
parent->first->cachedValue = root->db->getAttr(parent->first->getKey());
|
||||||
parent->first->getKey(), root->state.symbols);
|
|
||||||
assert(parent->first->cachedValue);
|
assert(parent->first->cachedValue);
|
||||||
}
|
}
|
||||||
return {parent->first->cachedValue->first, parent->second};
|
return {parent->first->cachedValue->first, parent->second};
|
||||||
|
@ -340,7 +386,7 @@ Value & AttrCursor::getValue()
|
||||||
if (parent) {
|
if (parent) {
|
||||||
auto & vParent = parent->first->getValue();
|
auto & vParent = parent->first->getValue();
|
||||||
root->state.forceAttrs(vParent, noPos);
|
root->state.forceAttrs(vParent, noPos);
|
||||||
auto attr = vParent.attrs->get(root->state.symbols.create(parent->second));
|
auto attr = vParent.attrs->get(parent->second);
|
||||||
if (!attr)
|
if (!attr)
|
||||||
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||||
_value = allocRootValue(attr->value);
|
_value = allocRootValue(attr->value);
|
||||||
|
@ -369,17 +415,17 @@ std::vector<Symbol> AttrCursor::getAttrPath(Symbol name) const
|
||||||
|
|
||||||
std::string AttrCursor::getAttrPathStr() const
|
std::string AttrCursor::getAttrPathStr() const
|
||||||
{
|
{
|
||||||
return concatStringsSep(".", getAttrPath());
|
return concatStringsSep(".", root->state.symbols.resolve(getAttrPath()));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string AttrCursor::getAttrPathStr(Symbol name) const
|
std::string AttrCursor::getAttrPathStr(Symbol name) const
|
||||||
{
|
{
|
||||||
return concatStringsSep(".", getAttrPath(name));
|
return concatStringsSep(".", root->state.symbols.resolve(getAttrPath(name)));
|
||||||
}
|
}
|
||||||
|
|
||||||
Value & AttrCursor::forceValue()
|
Value & AttrCursor::forceValue()
|
||||||
{
|
{
|
||||||
debug("evaluating uncached attribute %s", getAttrPathStr());
|
debug("evaluating uncached attribute '%s'", getAttrPathStr());
|
||||||
|
|
||||||
auto & v = getValue();
|
auto & v = getValue();
|
||||||
|
|
||||||
|
@ -400,6 +446,8 @@ Value & AttrCursor::forceValue()
|
||||||
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
|
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
|
||||||
else if (v.type() == nBool)
|
else if (v.type() == nBool)
|
||||||
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
||||||
|
else if (v.type() == nInt)
|
||||||
|
cachedValue = {root->db->setInt(getKey(), v.integer), int_t{v.integer}};
|
||||||
else if (v.type() == nAttrs)
|
else if (v.type() == nAttrs)
|
||||||
; // FIXME: do something?
|
; // FIXME: do something?
|
||||||
else
|
else
|
||||||
|
@ -414,25 +462,25 @@ Suggestions AttrCursor::getSuggestionsForAttr(Symbol name)
|
||||||
auto attrNames = getAttrs();
|
auto attrNames = getAttrs();
|
||||||
std::set<std::string> strAttrNames;
|
std::set<std::string> strAttrNames;
|
||||||
for (auto & name : attrNames)
|
for (auto & name : attrNames)
|
||||||
strAttrNames.insert(std::string(name));
|
strAttrNames.insert(root->state.symbols[name]);
|
||||||
|
|
||||||
return Suggestions::bestMatches(strAttrNames, name);
|
return Suggestions::bestMatches(strAttrNames, root->state.symbols[name]);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name, bool forceErrors)
|
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErrors)
|
||||||
{
|
{
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
if (!cachedValue)
|
if (!cachedValue)
|
||||||
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
cachedValue = root->db->getAttr(getKey());
|
||||||
|
|
||||||
if (cachedValue) {
|
if (cachedValue) {
|
||||||
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
||||||
for (auto & attr : *attrs)
|
for (auto & attr : *attrs)
|
||||||
if (attr == name)
|
if (attr == name)
|
||||||
return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), name));
|
return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), attr));
|
||||||
return nullptr;
|
return nullptr;
|
||||||
} else if (std::get_if<placeholder_t>(&cachedValue->second)) {
|
} else if (std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
auto attr = root->db->getAttr({cachedValue->first, name}, root->state.symbols);
|
auto attr = root->db->getAttr({cachedValue->first, name});
|
||||||
if (attr) {
|
if (attr) {
|
||||||
if (std::get_if<missing_t>(&attr->second))
|
if (std::get_if<missing_t>(&attr->second))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
@ -461,10 +509,10 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name, bool
|
||||||
|
|
||||||
for (auto & attr : *v.attrs) {
|
for (auto & attr : *v.attrs) {
|
||||||
if (root->db)
|
if (root->db)
|
||||||
root->db->setPlaceholder({cachedValue->first, root->state.symbols[attr.name]});
|
root->db->setPlaceholder({cachedValue->first, attr.name});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto attr = v.attrs->get(root->state.symbols.create(name));
|
auto attr = v.attrs->get(name);
|
||||||
|
|
||||||
if (!attr) {
|
if (!attr) {
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
|
@ -486,7 +534,12 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name, bool
|
||||||
root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2));
|
root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2));
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<AttrCursor> AttrCursor::getAttr(std::string_view name, bool forceErrors)
|
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
|
||||||
|
{
|
||||||
|
return maybeGetAttr(root->state.symbols.create(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors)
|
||||||
{
|
{
|
||||||
auto p = maybeGetAttr(name, forceErrors);
|
auto p = maybeGetAttr(name, forceErrors);
|
||||||
if (!p)
|
if (!p)
|
||||||
|
@ -494,6 +547,11 @@ ref<AttrCursor> AttrCursor::getAttr(std::string_view name, bool forceErrors)
|
||||||
return ref(p);
|
return ref(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ref<AttrCursor> AttrCursor::getAttr(std::string_view name)
|
||||||
|
{
|
||||||
|
return getAttr(root->state.symbols.create(name));
|
||||||
|
}
|
||||||
|
|
||||||
OrSuggestions<ref<AttrCursor>> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
|
OrSuggestions<ref<AttrCursor>> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
|
||||||
{
|
{
|
||||||
auto res = shared_from_this();
|
auto res = shared_from_this();
|
||||||
|
@ -512,7 +570,7 @@ std::string AttrCursor::getString()
|
||||||
{
|
{
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
if (!cachedValue)
|
if (!cachedValue)
|
||||||
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
cachedValue = root->db->getAttr(getKey());
|
||||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
debug("using cached string attribute '%s'", getAttrPathStr());
|
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||||
|
@ -534,7 +592,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
{
|
{
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
if (!cachedValue)
|
if (!cachedValue)
|
||||||
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
cachedValue = root->db->getAttr(getKey());
|
||||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
bool valid = true;
|
bool valid = true;
|
||||||
|
@ -567,7 +625,7 @@ bool AttrCursor::getBool()
|
||||||
{
|
{
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
if (!cachedValue)
|
if (!cachedValue)
|
||||||
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
cachedValue = root->db->getAttr(getKey());
|
||||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
if (auto b = std::get_if<bool>(&cachedValue->second)) {
|
if (auto b = std::get_if<bool>(&cachedValue->second)) {
|
||||||
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||||
|
@ -585,11 +643,66 @@ bool AttrCursor::getBool()
|
||||||
return v.boolean;
|
return v.boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
NixInt AttrCursor::getInt()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey());
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
|
||||||
|
debug("using cached Integer attribute '%s'", getAttrPathStr());
|
||||||
|
return i->x;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type() != nInt)
|
||||||
|
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
||||||
|
|
||||||
|
return v.integer;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::string> AttrCursor::getListOfStrings()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey());
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto l = std::get_if<std::vector<std::string>>(&cachedValue->second)) {
|
||||||
|
debug("using cached list of strings attribute '%s'", getAttrPathStr());
|
||||||
|
return *l;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not a list of strings", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
debug("evaluating uncached attribute '%s'", getAttrPathStr());
|
||||||
|
|
||||||
|
auto & v = getValue();
|
||||||
|
root->state.forceValue(v, noPos);
|
||||||
|
|
||||||
|
if (v.type() != nList)
|
||||||
|
throw TypeError("'%s' is not a list", getAttrPathStr());
|
||||||
|
|
||||||
|
std::vector<std::string> res;
|
||||||
|
|
||||||
|
for (auto & elem : v.listItems())
|
||||||
|
res.push_back(std::string(root->state.forceStringNoCtx(*elem)));
|
||||||
|
|
||||||
|
if (root->db)
|
||||||
|
cachedValue = {root->db->setListOfStrings(getKey(), res), res};
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<Symbol> AttrCursor::getAttrs()
|
std::vector<Symbol> AttrCursor::getAttrs()
|
||||||
{
|
{
|
||||||
if (root->db) {
|
if (root->db) {
|
||||||
if (!cachedValue)
|
if (!cachedValue)
|
||||||
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
cachedValue = root->db->getAttr(getKey());
|
||||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
||||||
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||||
|
@ -606,9 +719,10 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
||||||
|
|
||||||
std::vector<Symbol> attrs;
|
std::vector<Symbol> attrs;
|
||||||
for (auto & attr : *getValue().attrs)
|
for (auto & attr : *getValue().attrs)
|
||||||
attrs.push_back(root->state.symbols[attr.name]);
|
attrs.push_back(attr.name);
|
||||||
std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) {
|
std::sort(attrs.begin(), attrs.end(), [&](Symbol a, Symbol b) {
|
||||||
return (const std::string &) a < (const std::string &) b;
|
std::string_view sa = root->state.symbols[a], sb = root->state.symbols[b];
|
||||||
|
return sa < sb;
|
||||||
});
|
});
|
||||||
|
|
||||||
if (root->db)
|
if (root->db)
|
||||||
|
@ -625,7 +739,7 @@ bool AttrCursor::isDerivation()
|
||||||
|
|
||||||
StorePath AttrCursor::forceDerivation()
|
StorePath AttrCursor::forceDerivation()
|
||||||
{
|
{
|
||||||
auto aDrvPath = getAttr("drvPath", true);
|
auto aDrvPath = getAttr(root->state.sDrvPath, true);
|
||||||
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
|
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
|
||||||
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
|
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
|
||||||
/* The eval cache contains 'drvPath', but the actual path has
|
/* The eval cache contains 'drvPath', but the actual path has
|
||||||
|
|
|
@ -44,12 +44,15 @@ enum AttrType {
|
||||||
Misc = 4,
|
Misc = 4,
|
||||||
Failed = 5,
|
Failed = 5,
|
||||||
Bool = 6,
|
Bool = 6,
|
||||||
|
ListOfStrings = 7,
|
||||||
|
Int = 8,
|
||||||
};
|
};
|
||||||
|
|
||||||
struct placeholder_t {};
|
struct placeholder_t {};
|
||||||
struct missing_t {};
|
struct missing_t {};
|
||||||
struct misc_t {};
|
struct misc_t {};
|
||||||
struct failed_t {};
|
struct failed_t {};
|
||||||
|
struct int_t { NixInt x; };
|
||||||
typedef uint64_t AttrId;
|
typedef uint64_t AttrId;
|
||||||
typedef std::pair<AttrId, Symbol> AttrKey;
|
typedef std::pair<AttrId, Symbol> AttrKey;
|
||||||
typedef std::pair<std::string, NixStringContext> string_t;
|
typedef std::pair<std::string, NixStringContext> string_t;
|
||||||
|
@ -61,7 +64,9 @@ typedef std::variant<
|
||||||
missing_t,
|
missing_t,
|
||||||
misc_t,
|
misc_t,
|
||||||
failed_t,
|
failed_t,
|
||||||
bool
|
bool,
|
||||||
|
int_t,
|
||||||
|
std::vector<std::string>
|
||||||
> AttrValue;
|
> AttrValue;
|
||||||
|
|
||||||
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
|
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
|
||||||
|
@ -96,9 +101,13 @@ public:
|
||||||
|
|
||||||
Suggestions getSuggestionsForAttr(Symbol name);
|
Suggestions getSuggestionsForAttr(Symbol name);
|
||||||
|
|
||||||
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name, bool forceErrors = false);
|
std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name, bool forceErrors = false);
|
||||||
|
|
||||||
ref<AttrCursor> getAttr(std::string_view name, bool forceErrors = false);
|
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
|
||||||
|
|
||||||
|
ref<AttrCursor> getAttr(Symbol name, bool forceErrors = false);
|
||||||
|
|
||||||
|
ref<AttrCursor> getAttr(std::string_view name);
|
||||||
|
|
||||||
/* Get an attribute along a chain of attrsets. Note that this does
|
/* Get an attribute along a chain of attrsets. Note that this does
|
||||||
not auto-call functors or functions. */
|
not auto-call functors or functions. */
|
||||||
|
@ -110,6 +119,10 @@ public:
|
||||||
|
|
||||||
bool getBool();
|
bool getBool();
|
||||||
|
|
||||||
|
NixInt getInt();
|
||||||
|
|
||||||
|
std::vector<std::string> getListOfStrings();
|
||||||
|
|
||||||
std::vector<Symbol> getAttrs();
|
std::vector<Symbol> getAttrs();
|
||||||
|
|
||||||
bool isDerivation();
|
bool isDerivation();
|
||||||
|
|
|
@ -147,7 +147,10 @@ void Value::print(const SymbolTable & symbols, std::ostream & str,
|
||||||
else {
|
else {
|
||||||
str << "[ ";
|
str << "[ ";
|
||||||
for (auto v2 : listItems()) {
|
for (auto v2 : listItems()) {
|
||||||
|
if (v2)
|
||||||
v2->print(symbols, str, seen);
|
v2->print(symbols, str, seen);
|
||||||
|
else
|
||||||
|
str << "(nullptr)";
|
||||||
str << " ";
|
str << " ";
|
||||||
}
|
}
|
||||||
str << "]";
|
str << "]";
|
||||||
|
@ -184,6 +187,11 @@ void Value::print(const SymbolTable & symbols, std::ostream & str, bool showRepe
|
||||||
print(symbols, str, showRepeated ? nullptr : &seen);
|
print(symbols, str, showRepeated ? nullptr : &seen);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Pretty print types for assertion errors
|
||||||
|
std::ostream & operator << (std::ostream & os, const ValueType t) {
|
||||||
|
os << showType(t);
|
||||||
|
return os;
|
||||||
|
}
|
||||||
|
|
||||||
std::string printValue(const EvalState & state, const Value & v)
|
std::string printValue(const EvalState & state, const Value & v)
|
||||||
{
|
{
|
||||||
|
@ -308,7 +316,7 @@ static BoehmGCStackAllocator boehmGCStackAllocator;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
static SymbolIdx getName(const AttrName & name, EvalState & state, Env & env)
|
static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
||||||
{
|
{
|
||||||
if (name.symbol) {
|
if (name.symbol) {
|
||||||
return name.symbol;
|
return name.symbol;
|
||||||
|
@ -769,7 +777,7 @@ void EvalState::throwEvalError(const PosIdx pos, const char * s, const std::stri
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::throwEvalError(const PosIdx p1, const char * s, const SymbolIdx sym, const PosIdx p2) const
|
void EvalState::throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2) const
|
||||||
{
|
{
|
||||||
// p1 is where the error occurred; p2 is a position mentioned in the message.
|
// p1 is where the error occurred; p2 is a position mentioned in the message.
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
|
@ -787,7 +795,7 @@ void EvalState::throwTypeError(const PosIdx pos, const char * s) const
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun,
|
void EvalState::throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun,
|
||||||
const SymbolIdx s2) const
|
const Symbol s2) const
|
||||||
{
|
{
|
||||||
throw TypeError({
|
throw TypeError({
|
||||||
.msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
|
.msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
|
||||||
|
@ -796,7 +804,7 @@ void EvalState::throwTypeError(const PosIdx pos, const char * s, const ExprLambd
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvalState::throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s,
|
void EvalState::throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s,
|
||||||
const ExprLambda & fun, const SymbolIdx s2) const
|
const ExprLambda & fun, const Symbol s2) const
|
||||||
{
|
{
|
||||||
throw TypeError(ErrorInfo {
|
throw TypeError(ErrorInfo {
|
||||||
.msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
|
.msg = hintfmt(s, fun.showNamePos(*this), symbols[s2]),
|
||||||
|
@ -1582,7 +1590,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||||
Nix attempted to evaluate a function as a top level expression; in
|
Nix attempted to evaluate a function as a top level expression; in
|
||||||
this case it must have its arguments supplied either by default
|
this case it must have its arguments supplied either by default
|
||||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||||
https://nixos.org/manual/nix/stable/#ss-functions.)", symbols[i.name]);
|
https://nixos.org/manual/nix/stable/expressions/language-constructs.html#functions.)", symbols[i.name]);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,6 +55,7 @@ typedef std::map<Path, StorePath> SrcToStore;
|
||||||
|
|
||||||
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
|
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
|
||||||
std::string printValue(const EvalState & state, const Value & v);
|
std::string printValue(const EvalState & state, const Value & v);
|
||||||
|
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||||
|
|
||||||
|
|
||||||
typedef std::pair<std::string, std::string> SearchPathElem;
|
typedef std::pair<std::string, std::string> SearchPathElem;
|
||||||
|
@ -78,7 +79,7 @@ public:
|
||||||
|
|
||||||
static inline std::string derivationNixPath = "//builtin/derivation.nix";
|
static inline std::string derivationNixPath = "//builtin/derivation.nix";
|
||||||
|
|
||||||
const SymbolIdx sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||||
sFile, sLine, sColumn, sFunctor, sToString,
|
sFile, sLine, sColumn, sFunctor, sToString,
|
||||||
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
|
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
|
||||||
|
@ -87,7 +88,7 @@ public:
|
||||||
sRecurseForDerivations,
|
sRecurseForDerivations,
|
||||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||||
sPrefix;
|
sPrefix;
|
||||||
SymbolIdx sDerivationNix;
|
Symbol sDerivationNix;
|
||||||
|
|
||||||
/* If set, force copying files to the Nix store even if they
|
/* If set, force copying files to the Nix store even if they
|
||||||
already exist there. */
|
already exist there. */
|
||||||
|
@ -269,14 +270,14 @@ public:
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3) const;
|
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwEvalError(const PosIdx p1, const char * s, const SymbolIdx sym, const PosIdx p2) const;
|
void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwTypeError(const PosIdx pos, const char * s) const;
|
void throwTypeError(const PosIdx pos, const char * s) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const SymbolIdx s2) const;
|
void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s,
|
void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s,
|
||||||
const ExprLambda & fun, const SymbolIdx s2) const;
|
const ExprLambda & fun, const Symbol s2) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void throwTypeError(const char * s, const Value & v) const;
|
void throwTypeError(const char * s, const Value & v) const;
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
|
@ -392,7 +393,7 @@ public:
|
||||||
inline Value * allocValue();
|
inline Value * allocValue();
|
||||||
inline Env & allocEnv(size_t size);
|
inline Env & allocEnv(size_t size);
|
||||||
|
|
||||||
Value * allocAttr(Value & vAttrs, const SymbolIdx & name);
|
Value * allocAttr(Value & vAttrs, Symbol name);
|
||||||
Value * allocAttr(Value & vAttrs, std::string_view name);
|
Value * allocAttr(Value & vAttrs, std::string_view name);
|
||||||
|
|
||||||
Bindings * allocBindings(size_t capacity);
|
Bindings * allocBindings(size_t capacity);
|
||||||
|
|
|
@ -31,7 +31,7 @@ static void writeTrustedList(const TrustedList & trustedList)
|
||||||
|
|
||||||
void ConfigFile::apply()
|
void ConfigFile::apply()
|
||||||
{
|
{
|
||||||
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix", "flake-registry"};
|
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-prefix", "bash-prompt-suffix", "flake-registry"};
|
||||||
|
|
||||||
for (auto & [name, value] : settings) {
|
for (auto & [name, value] : settings) {
|
||||||
|
|
||||||
|
@ -50,13 +50,11 @@ void ConfigFile::apply()
|
||||||
else
|
else
|
||||||
assert(false);
|
assert(false);
|
||||||
|
|
||||||
if (!whitelist.count(baseName)) {
|
if (!whitelist.count(baseName) && !nix::fetchSettings.acceptFlakeConfig) {
|
||||||
auto trustedList = readTrustedList();
|
|
||||||
|
|
||||||
bool trusted = false;
|
bool trusted = false;
|
||||||
if (nix::fetchSettings.acceptFlakeConfig){
|
auto trustedList = readTrustedList();
|
||||||
trusted = true;
|
auto tlname = get(trustedList, name);
|
||||||
} else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
|
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
|
||||||
trusted = *saved;
|
trusted = *saved;
|
||||||
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
|
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
|
||||||
} else {
|
} else {
|
||||||
|
@ -69,7 +67,6 @@ void ConfigFile::apply()
|
||||||
writeTrustedList(trustedList);
|
writeTrustedList(trustedList);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!trusted) {
|
if (!trusted) {
|
||||||
warn("ignoring untrusted flake configuration setting '%s'", name);
|
warn("ignoring untrusted flake configuration setting '%s'", name);
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -723,6 +723,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
lockFlake(state, flakeRef,
|
lockFlake(state, flakeRef,
|
||||||
LockFlags {
|
LockFlags {
|
||||||
.updateLockFile = false,
|
.updateLockFile = false,
|
||||||
|
.writeLockFile = false,
|
||||||
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
||||||
.allowMutable = !evalSettings.pureEval,
|
.allowMutable = !evalSettings.pureEval,
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -176,7 +176,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
parsedURL.query.insert_or_assign("shallow", "1");
|
parsedURL.query.insert_or_assign("shallow", "1");
|
||||||
|
|
||||||
return std::make_pair(
|
return std::make_pair(
|
||||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
FlakeRef(Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
|
||||||
fragment);
|
fragment);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,7 +189,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
if (!hasPrefix(path, "/"))
|
if (!hasPrefix(path, "/"))
|
||||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||||
auto query = decodeQuery(match[2]);
|
auto query = decodeQuery(match[2]);
|
||||||
path = canonPath(path + "/" + get(query, "dir").value_or(""));
|
path = canonPath(path + "/" + getOr(query, "dir", ""));
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
|
@ -208,7 +208,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
input.parent = baseDir;
|
input.parent = baseDir;
|
||||||
|
|
||||||
return std::make_pair(
|
return std::make_pair(
|
||||||
FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
|
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
|
||||||
fragment);
|
fragment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -238,4 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
||||||
|
const std::string & url,
|
||||||
|
const std::optional<Path> & baseDir,
|
||||||
|
bool allowMissing,
|
||||||
|
bool isFlake)
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec(url);
|
||||||
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
|
||||||
|
return {std::move(flakeRef), fragment, outputsSpec};
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
|
#include "path-with-outputs.hh"
|
||||||
|
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
|
@ -79,4 +80,11 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
|
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
||||||
|
const std::string & url,
|
||||||
|
const std::optional<Path> & baseDir = {},
|
||||||
|
bool allowMissing = false,
|
||||||
|
bool isFlake = true);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
||||||
|
|
||||||
outputName =
|
outputName =
|
||||||
selectedOutputs.empty()
|
selectedOutputs.empty()
|
||||||
? get(drv.env, "outputName").value_or("out")
|
? getOr(drv.env, "outputName", "out")
|
||||||
: *selectedOutputs.begin();
|
: *selectedOutputs.begin();
|
||||||
|
|
||||||
auto i = drv.outputs.find(outputName);
|
auto i = drv.outputs.find(outputName);
|
||||||
|
|
|
@ -24,8 +24,10 @@ static void showString(std::ostream & str, std::string_view s)
|
||||||
str << '"';
|
str << '"';
|
||||||
}
|
}
|
||||||
|
|
||||||
static void showId(std::ostream & str, std::string_view s)
|
std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol)
|
||||||
{
|
{
|
||||||
|
std::string_view s = symbol;
|
||||||
|
|
||||||
if (s.empty())
|
if (s.empty())
|
||||||
str << "\"\"";
|
str << "\"\"";
|
||||||
else if (s == "if") // FIXME: handle other keywords
|
else if (s == "if") // FIXME: handle other keywords
|
||||||
|
@ -34,7 +36,7 @@ static void showId(std::ostream & str, std::string_view s)
|
||||||
char c = s[0];
|
char c = s[0];
|
||||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_')) {
|
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_')) {
|
||||||
showString(str, s);
|
showString(str, s);
|
||||||
return;
|
return str;
|
||||||
}
|
}
|
||||||
for (auto c : s)
|
for (auto c : s)
|
||||||
if (!((c >= 'a' && c <= 'z') ||
|
if (!((c >= 'a' && c <= 'z') ||
|
||||||
|
@ -42,15 +44,10 @@ static void showId(std::ostream & str, std::string_view s)
|
||||||
(c >= '0' && c <= '9') ||
|
(c >= '0' && c <= '9') ||
|
||||||
c == '_' || c == '\'' || c == '-')) {
|
c == '_' || c == '\'' || c == '-')) {
|
||||||
showString(str, s);
|
showString(str, s);
|
||||||
return;
|
return str;
|
||||||
}
|
}
|
||||||
str << s;
|
str << s;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const Symbol & sym)
|
|
||||||
{
|
|
||||||
showId(str, sym.s);
|
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -499,12 +496,12 @@ void ExprPos::bindVars(const EvalState & es, const StaticEnv & env)
|
||||||
|
|
||||||
/* Storing function names. */
|
/* Storing function names. */
|
||||||
|
|
||||||
void Expr::setName(SymbolIdx name)
|
void Expr::setName(Symbol name)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void ExprLambda::setName(SymbolIdx name)
|
void ExprLambda::setName(Symbol name)
|
||||||
{
|
{
|
||||||
this->name = name;
|
this->name = name;
|
||||||
body->setName(name);
|
body->setName(name);
|
||||||
|
@ -526,7 +523,7 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
|
||||||
size_t SymbolTable::totalSize() const
|
size_t SymbolTable::totalSize() const
|
||||||
{
|
{
|
||||||
size_t n = 0;
|
size_t n = 0;
|
||||||
dump([&] (const Symbol & s) { n += std::string_view(s).size(); });
|
dump([&] (const std::string & s) { n += s.size(); });
|
||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -126,9 +126,9 @@ struct StaticEnv;
|
||||||
/* An attribute path is a sequence of attribute names. */
|
/* An attribute path is a sequence of attribute names. */
|
||||||
struct AttrName
|
struct AttrName
|
||||||
{
|
{
|
||||||
SymbolIdx symbol;
|
Symbol symbol;
|
||||||
Expr * expr;
|
Expr * expr;
|
||||||
AttrName(const SymbolIdx & s) : symbol(s) {};
|
AttrName(Symbol s) : symbol(s) {};
|
||||||
AttrName(Expr * e) : expr(e) {};
|
AttrName(Expr * e) : expr(e) {};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ struct Expr
|
||||||
virtual void bindVars(const EvalState & es, const StaticEnv & env);
|
virtual void bindVars(const EvalState & es, const StaticEnv & env);
|
||||||
virtual void eval(EvalState & state, Env & env, Value & v);
|
virtual void eval(EvalState & state, Env & env, Value & v);
|
||||||
virtual Value * maybeThunk(EvalState & state, Env & env);
|
virtual Value * maybeThunk(EvalState & state, Env & env);
|
||||||
virtual void setName(SymbolIdx name);
|
virtual void setName(Symbol name);
|
||||||
};
|
};
|
||||||
|
|
||||||
#define COMMON_METHODS \
|
#define COMMON_METHODS \
|
||||||
|
@ -196,7 +196,7 @@ typedef uint32_t Displacement;
|
||||||
struct ExprVar : Expr
|
struct ExprVar : Expr
|
||||||
{
|
{
|
||||||
PosIdx pos;
|
PosIdx pos;
|
||||||
SymbolIdx name;
|
Symbol name;
|
||||||
|
|
||||||
/* Whether the variable comes from an environment (e.g. a rec, let
|
/* Whether the variable comes from an environment (e.g. a rec, let
|
||||||
or function argument) or from a "with". */
|
or function argument) or from a "with". */
|
||||||
|
@ -211,8 +211,8 @@ struct ExprVar : Expr
|
||||||
Level level;
|
Level level;
|
||||||
Displacement displ;
|
Displacement displ;
|
||||||
|
|
||||||
ExprVar(const SymbolIdx & name) : name(name) { };
|
ExprVar(Symbol name) : name(name) { };
|
||||||
ExprVar(const PosIdx & pos, const SymbolIdx & name) : pos(pos), name(name) { };
|
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env);
|
||||||
};
|
};
|
||||||
|
@ -223,7 +223,7 @@ struct ExprSelect : Expr
|
||||||
Expr * e, * def;
|
Expr * e, * def;
|
||||||
AttrPath attrPath;
|
AttrPath attrPath;
|
||||||
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath & attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(attrPath) { };
|
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath & attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(attrPath) { };
|
||||||
ExprSelect(const PosIdx & pos, Expr * e, const SymbolIdx & name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ struct ExprAttrs : Expr
|
||||||
: inherited(inherited), e(e), pos(pos) { };
|
: inherited(inherited), e(e), pos(pos) { };
|
||||||
AttrDef() { };
|
AttrDef() { };
|
||||||
};
|
};
|
||||||
typedef std::map<SymbolIdx, AttrDef> AttrDefs;
|
typedef std::map<Symbol, AttrDef> AttrDefs;
|
||||||
AttrDefs attrs;
|
AttrDefs attrs;
|
||||||
struct DynamicAttrDef {
|
struct DynamicAttrDef {
|
||||||
Expr * nameExpr, * valueExpr;
|
Expr * nameExpr, * valueExpr;
|
||||||
|
@ -273,7 +273,7 @@ struct ExprList : Expr
|
||||||
struct Formal
|
struct Formal
|
||||||
{
|
{
|
||||||
PosIdx pos;
|
PosIdx pos;
|
||||||
SymbolIdx name;
|
Symbol name;
|
||||||
Expr * def;
|
Expr * def;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -283,9 +283,10 @@ struct Formals
|
||||||
Formals_ formals;
|
Formals_ formals;
|
||||||
bool ellipsis;
|
bool ellipsis;
|
||||||
|
|
||||||
bool has(SymbolIdx arg) const {
|
bool has(Symbol arg) const
|
||||||
|
{
|
||||||
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
||||||
[] (const Formal & f, const SymbolIdx & sym) { return f.name < sym; });
|
[] (const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||||
return it != formals.end() && it->name == arg;
|
return it != formals.end() && it->name == arg;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,11 +305,11 @@ struct Formals
|
||||||
struct ExprLambda : Expr
|
struct ExprLambda : Expr
|
||||||
{
|
{
|
||||||
PosIdx pos;
|
PosIdx pos;
|
||||||
SymbolIdx name;
|
Symbol name;
|
||||||
SymbolIdx arg;
|
Symbol arg;
|
||||||
Formals * formals;
|
Formals * formals;
|
||||||
Expr * body;
|
Expr * body;
|
||||||
ExprLambda(PosIdx pos, SymbolIdx arg, Formals * formals, Expr * body)
|
ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body)
|
||||||
: pos(pos), arg(arg), formals(formals), body(body)
|
: pos(pos), arg(arg), formals(formals), body(body)
|
||||||
{
|
{
|
||||||
};
|
};
|
||||||
|
@ -316,7 +317,7 @@ struct ExprLambda : Expr
|
||||||
: pos(pos), formals(formals), body(body)
|
: pos(pos), formals(formals), body(body)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
void setName(SymbolIdx name);
|
void setName(Symbol name);
|
||||||
std::string showNamePos(const EvalState & state) const;
|
std::string showNamePos(const EvalState & state) const;
|
||||||
inline bool hasFormals() const { return formals != nullptr; }
|
inline bool hasFormals() const { return formals != nullptr; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
|
@ -426,7 +427,7 @@ struct StaticEnv
|
||||||
const StaticEnv * up;
|
const StaticEnv * up;
|
||||||
|
|
||||||
// Note: these must be in sorted order.
|
// Note: these must be in sorted order.
|
||||||
typedef std::vector<std::pair<SymbolIdx, Displacement>> Vars;
|
typedef std::vector<std::pair<Symbol, Displacement>> Vars;
|
||||||
Vars vars;
|
Vars vars;
|
||||||
|
|
||||||
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
||||||
|
@ -450,7 +451,7 @@ struct StaticEnv
|
||||||
vars.erase(it, end);
|
vars.erase(it, end);
|
||||||
}
|
}
|
||||||
|
|
||||||
Vars::const_iterator find(const SymbolIdx & name) const
|
Vars::const_iterator find(Symbol name) const
|
||||||
{
|
{
|
||||||
Vars::value_type key(name, 0);
|
Vars::value_type key(name, 0);
|
||||||
auto i = std::lower_bound(vars.begin(), vars.end(), key);
|
auto i = std::lower_bound(vars.begin(), vars.end(), key);
|
||||||
|
|
|
@ -86,7 +86,7 @@ static void dupAttr(const EvalState & state, const AttrPath & attrPath, const Po
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dupAttr(const EvalState & state, SymbolIdx attr, const PosIdx pos, const PosIdx prevPos)
|
static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||||
{
|
{
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]),
|
.msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]),
|
||||||
|
@ -157,14 +157,14 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
||||||
|
|
||||||
|
|
||||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||||
PosIdx pos = noPos, SymbolIdx arg = {})
|
PosIdx pos = noPos, Symbol arg = {})
|
||||||
{
|
{
|
||||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||||
[] (const auto & a, const auto & b) {
|
[] (const auto & a, const auto & b) {
|
||||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||||
});
|
});
|
||||||
|
|
||||||
std::optional<std::pair<SymbolIdx, PosIdx>> duplicate;
|
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -68,14 +68,15 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
||||||
|
|
||||||
/* Get all the output paths corresponding to the placeholders we had */
|
/* Get all the output paths corresponding to the placeholders we had */
|
||||||
for (auto & [drvPath, outputs] : drvs) {
|
for (auto & [drvPath, outputs] : drvs) {
|
||||||
auto outputPaths = store->queryDerivationOutputMap(drvPath);
|
const auto outputPaths = store->queryDerivationOutputMap(drvPath);
|
||||||
for (auto & outputName : outputs) {
|
for (auto & outputName : outputs) {
|
||||||
if (outputPaths.count(outputName) == 0)
|
auto outputPath = get(outputPaths, outputName);
|
||||||
|
if (!outputPath)
|
||||||
throw Error("derivation '%s' does not have an output named '%s'",
|
throw Error("derivation '%s' does not have an output named '%s'",
|
||||||
store->printStorePath(drvPath), outputName);
|
store->printStorePath(drvPath), outputName);
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
downstreamPlaceholder(*store, drvPath, outputName),
|
downstreamPlaceholder(*store, drvPath, outputName),
|
||||||
store->printStorePath(outputPaths.at(outputName))
|
store->printStorePath(*outputPath)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -584,7 +585,7 @@ typedef std::list<Value *> ValueList;
|
||||||
static Bindings::iterator getAttr(
|
static Bindings::iterator getAttr(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
std::string_view funcName,
|
std::string_view funcName,
|
||||||
SymbolIdx attrSym,
|
Symbol attrSym,
|
||||||
Bindings * attrSet,
|
Bindings * attrSet,
|
||||||
const PosIdx pos)
|
const PosIdx pos)
|
||||||
{
|
{
|
||||||
|
@ -1249,8 +1250,13 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
switch (hashModulo.kind) {
|
switch (hashModulo.kind) {
|
||||||
case DrvHash::Kind::Regular:
|
case DrvHash::Kind::Regular:
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
auto h = hashModulo.hashes.at(i);
|
auto h = get(hashModulo.hashes, i);
|
||||||
auto outPath = state.store->makeOutputPath(i, h, drvName);
|
if (!h)
|
||||||
|
throw AssertionError({
|
||||||
|
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
||||||
|
.errPos = state.positions[posDrvName],
|
||||||
|
});
|
||||||
|
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||||
drv.env[i] = state.store->printStorePath(outPath);
|
drv.env[i] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign(
|
drv.outputs.insert_or_assign(
|
||||||
i,
|
i,
|
||||||
|
@ -2047,7 +2053,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
PathSet context;
|
PathSet context;
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
auto & n(state.symbols[attr.name]);
|
auto n = state.symbols[attr.name];
|
||||||
if (n == "path")
|
if (n == "path")
|
||||||
path = state.coerceToPath(attr.pos, *attr.value, context);
|
path = state.coerceToPath(attr.pos, *attr.value, context);
|
||||||
else if (attr.name == state.sName)
|
else if (attr.name == state.sName)
|
||||||
|
@ -2314,7 +2320,7 @@ static void prim_listToAttrs(EvalState & state, const PosIdx pos, Value * * args
|
||||||
|
|
||||||
auto attrs = state.buildBindings(args[0]->listSize());
|
auto attrs = state.buildBindings(args[0]->listSize());
|
||||||
|
|
||||||
std::set<SymbolIdx> seen;
|
std::set<Symbol> seen;
|
||||||
|
|
||||||
for (auto v2 : args[0]->listItems()) {
|
for (auto v2 : args[0]->listItems()) {
|
||||||
state.forceAttrs(*v2, pos);
|
state.forceAttrs(*v2, pos);
|
||||||
|
@ -2517,7 +2523,7 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
// attribute with the merge function application. this way we need not
|
// attribute with the merge function application. this way we need not
|
||||||
// use (slightly slower) temporary storage the GC does not know about.
|
// use (slightly slower) temporary storage the GC does not know about.
|
||||||
|
|
||||||
std::map<SymbolIdx, std::pair<size_t, Value * *>> attrsSeen;
|
std::map<Symbol, std::pair<size_t, Value * *>> attrsSeen;
|
||||||
|
|
||||||
state.forceFunction(*args[0], pos);
|
state.forceFunction(*args[0], pos);
|
||||||
state.forceList(*args[1], pos);
|
state.forceList(*args[1], pos);
|
||||||
|
@ -3523,7 +3529,7 @@ static RegisterPrimOp primop_match({
|
||||||
builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO "
|
builtins.match "[[:space:]]+([[:upper:]]+)[[:space:]]+" " FOO "
|
||||||
```
|
```
|
||||||
|
|
||||||
Evaluates to `[ "foo" ]`.
|
Evaluates to `[ "FOO" ]`.
|
||||||
)s",
|
)s",
|
||||||
.fun = prim_match,
|
.fun = prim_match,
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,82 +12,96 @@ namespace nix {
|
||||||
/* Symbol table used by the parser and evaluator to represent and look
|
/* Symbol table used by the parser and evaluator to represent and look
|
||||||
up identifiers and attributes efficiently. SymbolTable::create()
|
up identifiers and attributes efficiently. SymbolTable::create()
|
||||||
converts a string into a symbol. Symbols have the property that
|
converts a string into a symbol. Symbols have the property that
|
||||||
they can be compared efficiently (using a pointer equality test),
|
they can be compared efficiently (using an equality test),
|
||||||
because the symbol table stores only one copy of each string. */
|
because the symbol table stores only one copy of each string. */
|
||||||
|
|
||||||
class Symbol
|
/* This class mainly exists to give us an operator<< for ostreams. We could also
|
||||||
|
return plain strings from SymbolTable, but then we'd have to wrap every
|
||||||
|
instance of a symbol that is fmt()ed, which is inconvenient and error-prone. */
|
||||||
|
class SymbolStr
|
||||||
{
|
{
|
||||||
friend class SymbolTable;
|
friend class SymbolTable;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string s;
|
const std::string * s;
|
||||||
|
|
||||||
|
explicit SymbolStr(const std::string & symbol): s(&symbol) {}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Symbol(std::string_view s) : s(s) { }
|
|
||||||
|
|
||||||
// FIXME: remove
|
|
||||||
bool operator == (std::string_view s2) const
|
bool operator == (std::string_view s2) const
|
||||||
{
|
{
|
||||||
return s == s2;
|
return *s == s2;
|
||||||
}
|
}
|
||||||
|
|
||||||
operator const std::string & () const
|
operator const std::string & () const
|
||||||
{
|
{
|
||||||
return s;
|
return *s;
|
||||||
}
|
}
|
||||||
|
|
||||||
operator const std::string_view () const
|
operator const std::string_view () const
|
||||||
{
|
{
|
||||||
return s;
|
return *s;
|
||||||
}
|
}
|
||||||
|
|
||||||
friend std::ostream & operator << (std::ostream & str, const Symbol & sym);
|
friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol);
|
||||||
};
|
};
|
||||||
|
|
||||||
class SymbolIdx
|
class Symbol
|
||||||
{
|
{
|
||||||
friend class SymbolTable;
|
friend class SymbolTable;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
uint32_t id;
|
uint32_t id;
|
||||||
|
|
||||||
explicit SymbolIdx(uint32_t id): id(id) {}
|
explicit Symbol(uint32_t id): id(id) {}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
SymbolIdx() : id(0) {}
|
Symbol() : id(0) {}
|
||||||
|
|
||||||
explicit operator bool() const { return id > 0; }
|
explicit operator bool() const { return id > 0; }
|
||||||
|
|
||||||
bool operator<(const SymbolIdx other) const { return id < other.id; }
|
bool operator<(const Symbol other) const { return id < other.id; }
|
||||||
bool operator==(const SymbolIdx other) const { return id == other.id; }
|
bool operator==(const Symbol other) const { return id == other.id; }
|
||||||
bool operator!=(const SymbolIdx other) const { return id != other.id; }
|
bool operator!=(const Symbol other) const { return id != other.id; }
|
||||||
};
|
};
|
||||||
|
|
||||||
class SymbolTable
|
class SymbolTable
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
std::unordered_map<std::string_view, std::pair<const Symbol *, uint32_t>> symbols;
|
std::unordered_map<std::string_view, std::pair<const std::string *, uint32_t>> symbols;
|
||||||
ChunkedVector<Symbol, 8192> store{16};
|
ChunkedVector<std::string, 8192> store{16};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
SymbolIdx create(std::string_view s)
|
|
||||||
|
Symbol create(std::string_view s)
|
||||||
{
|
{
|
||||||
// Most symbols are looked up more than once, so we trade off insertion performance
|
// Most symbols are looked up more than once, so we trade off insertion performance
|
||||||
// for lookup performance.
|
// for lookup performance.
|
||||||
// TODO: could probably be done more efficiently with transparent Hash and Equals
|
// TODO: could probably be done more efficiently with transparent Hash and Equals
|
||||||
// on the original implementation using unordered_set
|
// on the original implementation using unordered_set
|
||||||
|
// FIXME: make this thread-safe.
|
||||||
auto it = symbols.find(s);
|
auto it = symbols.find(s);
|
||||||
if (it != symbols.end()) return SymbolIdx(it->second.second + 1);
|
if (it != symbols.end()) return Symbol(it->second.second + 1);
|
||||||
|
|
||||||
const auto & [rawSym, idx] = store.add(s);
|
const auto & [rawSym, idx] = store.add(std::string(s));
|
||||||
symbols.emplace(rawSym, std::make_pair(&rawSym, idx));
|
symbols.emplace(rawSym, std::make_pair(&rawSym, idx));
|
||||||
return SymbolIdx(idx + 1);
|
return Symbol(idx + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const Symbol & operator[](SymbolIdx s) const
|
std::vector<SymbolStr> resolve(const std::vector<Symbol> & symbols) const
|
||||||
|
{
|
||||||
|
std::vector<SymbolStr> result;
|
||||||
|
result.reserve(symbols.size());
|
||||||
|
for (auto sym : symbols)
|
||||||
|
result.push_back((*this)[sym]);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
SymbolStr operator[](Symbol s) const
|
||||||
{
|
{
|
||||||
if (s.id == 0 || s.id > store.size())
|
if (s.id == 0 || s.id > store.size())
|
||||||
abort();
|
abort();
|
||||||
return store[s.id - 1];
|
return SymbolStr(store[s.id - 1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t size() const
|
size_t size() const
|
||||||
|
|
68
src/libexpr/tests/json.cc
Normal file
68
src/libexpr/tests/json.cc
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
#include "libexprtests.hh"
|
||||||
|
#include "value-to-json.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
// Testing the conversion to JSON
|
||||||
|
|
||||||
|
class JSONValueTest : public LibExprTest {
|
||||||
|
protected:
|
||||||
|
std::string getJSONValue(Value& value) {
|
||||||
|
std::stringstream ss;
|
||||||
|
PathSet ps;
|
||||||
|
printValueAsJSON(state, true, value, noPos, ss, ps);
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, null) {
|
||||||
|
Value v;
|
||||||
|
v.mkNull();
|
||||||
|
ASSERT_EQ(getJSONValue(v), "null");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, BoolFalse) {
|
||||||
|
Value v;
|
||||||
|
v.mkBool(false);
|
||||||
|
ASSERT_EQ(getJSONValue(v),"false");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, BoolTrue) {
|
||||||
|
Value v;
|
||||||
|
v.mkBool(true);
|
||||||
|
ASSERT_EQ(getJSONValue(v), "true");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, IntPositive) {
|
||||||
|
Value v;
|
||||||
|
v.mkInt(100);
|
||||||
|
ASSERT_EQ(getJSONValue(v), "100");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, IntNegative) {
|
||||||
|
Value v;
|
||||||
|
v.mkInt(-100);
|
||||||
|
ASSERT_EQ(getJSONValue(v), "-100");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, String) {
|
||||||
|
Value v;
|
||||||
|
v.mkString("test");
|
||||||
|
ASSERT_EQ(getJSONValue(v), "\"test\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(JSONValueTest, StringQuotes) {
|
||||||
|
Value v;
|
||||||
|
|
||||||
|
v.mkString("test\"");
|
||||||
|
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
// The dummy store doesn't support writing files. Fails with this exception message:
|
||||||
|
// C++ exception with description "error: operation 'addToStoreFromDump' is
|
||||||
|
// not supported by store 'dummy'" thrown in the test body.
|
||||||
|
TEST_F(JSONValueTest, DISABLED_Path) {
|
||||||
|
Value v;
|
||||||
|
v.mkPath("test");
|
||||||
|
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
|
||||||
|
}
|
||||||
|
} /* namespace nix */
|
136
src/libexpr/tests/libexprtests.hh
Normal file
136
src/libexpr/tests/libexprtests.hh
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include <gmock/gmock.h>
|
||||||
|
|
||||||
|
#include "value.hh"
|
||||||
|
#include "nixexpr.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
#include "eval-inline.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
class LibExprTest : public ::testing::Test {
|
||||||
|
public:
|
||||||
|
static void SetUpTestSuite() {
|
||||||
|
initGC();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
LibExprTest()
|
||||||
|
: store(openStore("dummy://"))
|
||||||
|
, state({}, store)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
Value eval(std::string input, bool forceValue = true) {
|
||||||
|
Value v;
|
||||||
|
Expr * e = state.parseExprFromString(input, "");
|
||||||
|
assert(e);
|
||||||
|
state.eval(e, v);
|
||||||
|
if (forceValue)
|
||||||
|
state.forceValue(v, noPos);
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
Symbol createSymbol(const char * value) {
|
||||||
|
return state.symbols.create(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> store;
|
||||||
|
EvalState state;
|
||||||
|
};
|
||||||
|
|
||||||
|
MATCHER(IsListType, "") {
|
||||||
|
return arg != nList;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsList, "") {
|
||||||
|
return arg.type() == nList;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsString, "") {
|
||||||
|
return arg.type() == nString;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsNull, "") {
|
||||||
|
return arg.type() == nNull;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsThunk, "") {
|
||||||
|
return arg.type() == nThunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsAttrs, "") {
|
||||||
|
return arg.type() == nAttrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
|
||||||
|
if (arg.type() != nString) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return std::string_view(arg.string.s) == s;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
|
||||||
|
if (arg.type() != nInt) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return arg.integer == v;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
|
||||||
|
if (arg.type() != nFloat) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return arg.fpoint == v;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsTrue, "") {
|
||||||
|
if (arg.type() != nBool) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return arg.boolean == true;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER(IsFalse, "") {
|
||||||
|
if (arg.type() != nBool) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return arg.boolean == false;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
|
||||||
|
if (arg.type() != nPath) {
|
||||||
|
*result_listener << "Expected a path got " << arg.type();
|
||||||
|
return false;
|
||||||
|
} else if (std::string_view(arg.string.s) != p) {
|
||||||
|
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
|
||||||
|
if (arg.type() != nList) {
|
||||||
|
*result_listener << "Expected list got " << arg.type();
|
||||||
|
return false;
|
||||||
|
} else if (arg.listSize() != (size_t)n) {
|
||||||
|
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
|
||||||
|
if (arg.type() != nAttrs) {
|
||||||
|
*result_listener << "Expexted set got " << arg.type();
|
||||||
|
return false;
|
||||||
|
} else if (arg.attrs->size() != (size_t)n) {
|
||||||
|
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
} /* namespace nix */
|
15
src/libexpr/tests/local.mk
Normal file
15
src/libexpr/tests/local.mk
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
check: libexpr-tests_RUN
|
||||||
|
|
||||||
|
programs += libexpr-tests
|
||||||
|
|
||||||
|
libexpr-tests_DIR := $(d)
|
||||||
|
|
||||||
|
libexpr-tests_INSTALL_DIR :=
|
||||||
|
|
||||||
|
libexpr-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||||
|
|
||||||
|
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
||||||
|
|
||||||
|
libexpr-tests_LIBS = libexpr libutil libstore libfetchers
|
||||||
|
|
||||||
|
libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock
|
839
src/libexpr/tests/primops.cc
Normal file
839
src/libexpr/tests/primops.cc
Normal file
|
@ -0,0 +1,839 @@
|
||||||
|
#include <gmock/gmock.h>
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
#include "libexprtests.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
class CaptureLogger : public Logger
|
||||||
|
{
|
||||||
|
std::ostringstream oss;
|
||||||
|
|
||||||
|
public:
|
||||||
|
CaptureLogger() {}
|
||||||
|
|
||||||
|
std::string get() const {
|
||||||
|
return oss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
void log(Verbosity lvl, const FormatOrString & fs) override {
|
||||||
|
oss << fs.s << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
void logEI(const ErrorInfo & ei) override {
|
||||||
|
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
class CaptureLogging {
|
||||||
|
Logger * oldLogger;
|
||||||
|
std::unique_ptr<CaptureLogger> tempLogger;
|
||||||
|
public:
|
||||||
|
CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
|
||||||
|
oldLogger = logger;
|
||||||
|
logger = tempLogger.get();
|
||||||
|
}
|
||||||
|
|
||||||
|
~CaptureLogging() {
|
||||||
|
logger = oldLogger;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string get() const {
|
||||||
|
return tempLogger->get();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// Testing eval of PrimOp's
|
||||||
|
class PrimOpTest : public LibExprTest {};
|
||||||
|
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, throw) {
|
||||||
|
ASSERT_THROW(eval("throw \"foo\""), ThrownError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, abort) {
|
||||||
|
ASSERT_THROW(eval("abort \"abort\""), Abort);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, ceil) {
|
||||||
|
auto v = eval("builtins.ceil 1.9");
|
||||||
|
ASSERT_THAT(v, IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, floor) {
|
||||||
|
auto v = eval("builtins.floor 1.9");
|
||||||
|
ASSERT_THAT(v, IsIntEq(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, tryEvalFailure) {
|
||||||
|
auto v = eval("builtins.tryEval (throw \"\")");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
auto s = createSymbol("success");
|
||||||
|
auto p = v.attrs->get(s);
|
||||||
|
ASSERT_NE(p, nullptr);
|
||||||
|
ASSERT_THAT(*p->value, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, tryEvalSuccess) {
|
||||||
|
auto v = eval("builtins.tryEval 123");
|
||||||
|
ASSERT_THAT(v, IsAttrs());
|
||||||
|
auto s = createSymbol("success");
|
||||||
|
auto p = v.attrs->get(s);
|
||||||
|
ASSERT_NE(p, nullptr);
|
||||||
|
ASSERT_THAT(*p->value, IsTrue());
|
||||||
|
s = createSymbol("value");
|
||||||
|
p = v.attrs->get(s);
|
||||||
|
ASSERT_NE(p, nullptr);
|
||||||
|
ASSERT_THAT(*p->value, IsIntEq(123));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, getEnv) {
|
||||||
|
setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
|
||||||
|
auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("test value"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, seq) {
|
||||||
|
ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, seqNotDeep) {
|
||||||
|
auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
|
||||||
|
ASSERT_THAT(v, IsAttrs());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, deepSeq) {
|
||||||
|
ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, trace) {
|
||||||
|
CaptureLogging l;
|
||||||
|
auto v = eval("builtins.trace \"test string 123\" 123");
|
||||||
|
ASSERT_THAT(v, IsIntEq(123));
|
||||||
|
auto text = l.get();
|
||||||
|
ASSERT_NE(text.find("test string 123"), std::string::npos);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, placeholder) {
|
||||||
|
auto v = eval("builtins.placeholder \"out\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, baseNameOf) {
|
||||||
|
auto v = eval("builtins.baseNameOf /some/path");
|
||||||
|
ASSERT_THAT(v, IsStringEq("path"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, dirOf) {
|
||||||
|
auto v = eval("builtins.dirOf /some/path");
|
||||||
|
ASSERT_THAT(v, IsPathEq("/some"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, attrValues) {
|
||||||
|
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, getAttr) {
|
||||||
|
auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
|
||||||
|
ASSERT_THAT(v, IsStringEq("foo"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, getAttrNotFound) {
|
||||||
|
// FIXME: TypeError is really bad here, also the error wording is worse
|
||||||
|
// than on Nix <=2.3
|
||||||
|
ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, unsafeGetAttrPos) {
|
||||||
|
// The `y` attribute is at position
|
||||||
|
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
|
||||||
|
auto v = eval(expr);
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(3));
|
||||||
|
|
||||||
|
auto file = v.attrs->find(createSymbol("file"));
|
||||||
|
ASSERT_NE(file, nullptr);
|
||||||
|
// FIXME: The file when running these tests is the input string?!?
|
||||||
|
ASSERT_THAT(*file->value, IsStringEq(expr));
|
||||||
|
|
||||||
|
auto line = v.attrs->find(createSymbol("line"));
|
||||||
|
ASSERT_NE(line, nullptr);
|
||||||
|
ASSERT_THAT(*line->value, IsIntEq(1));
|
||||||
|
|
||||||
|
auto column = v.attrs->find(createSymbol("column"));
|
||||||
|
ASSERT_NE(column, nullptr);
|
||||||
|
ASSERT_THAT(*column->value, IsIntEq(33));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hasAttr) {
|
||||||
|
auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hasAttrNotFound) {
|
||||||
|
auto v = eval("builtins.hasAttr \"x\" { }");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, isAttrs) {
|
||||||
|
auto v = eval("builtins.isAttrs {}");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, isAttrsFalse) {
|
||||||
|
auto v = eval("builtins.isAttrs null");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, removeAttrs) {
|
||||||
|
auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, removeAttrsRetains) {
|
||||||
|
auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||||
|
ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, listToAttrsEmptyList) {
|
||||||
|
auto v = eval("builtins.listToAttrs []");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||||
|
ASSERT_EQ(v.type(), nAttrs);
|
||||||
|
ASSERT_EQ(v.attrs->size(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, listToAttrsNotFieldName) {
|
||||||
|
ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, listToAttrs) {
|
||||||
|
auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||||
|
auto key = v.attrs->find(createSymbol("key"));
|
||||||
|
ASSERT_NE(key, nullptr);
|
||||||
|
ASSERT_THAT(*key->value, IsIntEq(123));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, intersectAttrs) {
|
||||||
|
auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||||
|
auto b = v.attrs->find(createSymbol("b"));
|
||||||
|
ASSERT_NE(b, nullptr);
|
||||||
|
ASSERT_THAT(*b->value, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, catAttrs) {
|
||||||
|
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, functionArgs) {
|
||||||
|
auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
|
||||||
|
auto x = v.attrs->find(createSymbol("x"));
|
||||||
|
ASSERT_NE(x, nullptr);
|
||||||
|
ASSERT_THAT(*x->value, IsFalse());
|
||||||
|
|
||||||
|
auto y = v.attrs->find(createSymbol("y"));
|
||||||
|
ASSERT_NE(y, nullptr);
|
||||||
|
ASSERT_THAT(*y->value, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, mapAttrs) {
|
||||||
|
auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
|
||||||
|
auto a = v.attrs->find(createSymbol("a"));
|
||||||
|
ASSERT_NE(a, nullptr);
|
||||||
|
ASSERT_THAT(*a->value, IsThunk());
|
||||||
|
state.forceValue(*a->value, noPos);
|
||||||
|
ASSERT_THAT(*a->value, IsIntEq(10));
|
||||||
|
|
||||||
|
auto b = v.attrs->find(createSymbol("b"));
|
||||||
|
ASSERT_NE(b, nullptr);
|
||||||
|
ASSERT_THAT(*b->value, IsThunk());
|
||||||
|
state.forceValue(*b->value, noPos);
|
||||||
|
ASSERT_THAT(*b->value, IsIntEq(20));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, isList) {
|
||||||
|
auto v = eval("builtins.isList []");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, isListFalse) {
|
||||||
|
auto v = eval("builtins.isList null");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, elemtAt) {
|
||||||
|
auto v = eval("builtins.elemAt [0 1 2 3] 3");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
|
||||||
|
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, head) {
|
||||||
|
auto v = eval("builtins.head [ 3 2 1 0 ]");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, headEmpty) {
|
||||||
|
ASSERT_THROW(eval("builtins.head [ ]"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, headWrongType) {
|
||||||
|
ASSERT_THROW(eval("builtins.head { }"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, tail) {
|
||||||
|
auto v = eval("builtins.tail [ 3 2 1 0 ]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(3));
|
||||||
|
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, tailEmpty) {
|
||||||
|
ASSERT_THROW(eval("builtins.tail []"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, map) {
|
||||||
|
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(3));
|
||||||
|
auto elem = v.listElems()[0];
|
||||||
|
ASSERT_THAT(*elem, IsThunk());
|
||||||
|
state.forceValue(*elem, noPos);
|
||||||
|
ASSERT_THAT(*elem, IsStringEq("foobar"));
|
||||||
|
|
||||||
|
elem = v.listElems()[1];
|
||||||
|
ASSERT_THAT(*elem, IsThunk());
|
||||||
|
state.forceValue(*elem, noPos);
|
||||||
|
ASSERT_THAT(*elem, IsStringEq("foobla"));
|
||||||
|
|
||||||
|
elem = v.listElems()[2];
|
||||||
|
ASSERT_THAT(*elem, IsThunk());
|
||||||
|
state.forceValue(*elem, noPos);
|
||||||
|
ASSERT_THAT(*elem, IsStringEq("fooabc"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, filter) {
|
||||||
|
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(3));
|
||||||
|
for (const auto elem : v.listItems())
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, elemTrue) {
|
||||||
|
auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, elemFalse) {
|
||||||
|
auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, concatLists) {
|
||||||
|
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(4));
|
||||||
|
for (const auto [i, elem] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, length) {
|
||||||
|
auto v = eval("builtins.length [ 1 2 3 ]");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, foldStrict) {
|
||||||
|
auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
|
||||||
|
ASSERT_THAT(v, IsIntEq(6));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, anyTrue) {
|
||||||
|
auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, anyFalse) {
|
||||||
|
auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, allTrue) {
|
||||||
|
auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, allFalse) {
|
||||||
|
auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, genList) {
|
||||||
|
auto v = eval("builtins.genList (x: x + 1) 3");
|
||||||
|
ASSERT_EQ(v.type(), nList);
|
||||||
|
ASSERT_EQ(v.listSize(), 3);
|
||||||
|
for (const auto [i, elem] : enumerate(v.listItems())) {
|
||||||
|
ASSERT_THAT(*elem, IsThunk());
|
||||||
|
state.forceValue(*elem, noPos);
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, sortLessThan) {
|
||||||
|
auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
|
||||||
|
ASSERT_EQ(v.type(), nList);
|
||||||
|
ASSERT_EQ(v.listSize(), 6);
|
||||||
|
|
||||||
|
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
|
||||||
|
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, partition) {
|
||||||
|
auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
|
||||||
|
auto right = v.attrs->get(createSymbol("right"));
|
||||||
|
ASSERT_NE(right, nullptr);
|
||||||
|
ASSERT_THAT(*right->value, IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
|
||||||
|
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
|
||||||
|
|
||||||
|
auto wrong = v.attrs->get(createSymbol("wrong"));
|
||||||
|
ASSERT_NE(wrong, nullptr);
|
||||||
|
ASSERT_EQ(wrong->value->type(), nList);
|
||||||
|
ASSERT_EQ(wrong->value->listSize(), 3);
|
||||||
|
ASSERT_THAT(*wrong->value, IsListOfSize(3));
|
||||||
|
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
|
||||||
|
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
|
||||||
|
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, concatMap) {
|
||||||
|
auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
|
||||||
|
ASSERT_EQ(v.type(), nList);
|
||||||
|
ASSERT_EQ(v.listSize(), 6);
|
||||||
|
|
||||||
|
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
|
||||||
|
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, addInt) {
|
||||||
|
auto v = eval("builtins.add 3 5");
|
||||||
|
ASSERT_THAT(v, IsIntEq(8));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, addFloat) {
|
||||||
|
auto v = eval("builtins.add 3.0 5.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, addFloatToInt) {
|
||||||
|
auto v = eval("builtins.add 3.0 5");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||||
|
|
||||||
|
v = eval("builtins.add 3 5.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, subInt) {
|
||||||
|
auto v = eval("builtins.sub 5 2");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, subFloat) {
|
||||||
|
auto v = eval("builtins.sub 5.0 2.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(3.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, subFloatFromInt) {
|
||||||
|
auto v = eval("builtins.sub 5.0 2");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(3.0));
|
||||||
|
|
||||||
|
v = eval("builtins.sub 4 2.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(2.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, mulInt) {
|
||||||
|
auto v = eval("builtins.mul 3 5");
|
||||||
|
ASSERT_THAT(v, IsIntEq(15));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, mulFloat) {
|
||||||
|
auto v = eval("builtins.mul 3.0 5.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(15.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, mulFloatMixed) {
|
||||||
|
auto v = eval("builtins.mul 3 5.0");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(15.0));
|
||||||
|
|
||||||
|
v = eval("builtins.mul 2.0 5");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(10.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, divInt) {
|
||||||
|
auto v = eval("builtins.div 5 (-1)");
|
||||||
|
ASSERT_THAT(v, IsIntEq(-5));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, divIntZero) {
|
||||||
|
ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, divFloat) {
|
||||||
|
auto v = eval("builtins.div 5.0 (-1)");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(-5.0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, divFloatZero) {
|
||||||
|
ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, bitOr) {
|
||||||
|
auto v = eval("builtins.bitOr 1 2");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, bitXor) {
|
||||||
|
auto v = eval("builtins.bitXor 3 2");
|
||||||
|
ASSERT_THAT(v, IsIntEq(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, lessThanFalse) {
|
||||||
|
auto v = eval("builtins.lessThan 3 1");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, lessThanTrue) {
|
||||||
|
auto v = eval("builtins.lessThan 1 3");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, toStringAttrsThrows) {
|
||||||
|
ASSERT_THROW(eval("builtins.toString {}"), EvalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, toStringLambdaThrows) {
|
||||||
|
ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
|
||||||
|
}
|
||||||
|
|
||||||
|
class ToStringPrimOpTest :
|
||||||
|
public PrimOpTest,
|
||||||
|
public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
|
||||||
|
{};
|
||||||
|
|
||||||
|
TEST_P(ToStringPrimOpTest, toString) {
|
||||||
|
const auto [input, output] = GetParam();
|
||||||
|
auto v = eval(input);
|
||||||
|
ASSERT_THAT(v, IsStringEq(output));
|
||||||
|
}
|
||||||
|
|
||||||
|
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " #input), std::string_view(output)))
|
||||||
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
|
toString,
|
||||||
|
ToStringPrimOpTest,
|
||||||
|
testing::Values(
|
||||||
|
CASE("foo", "foo"),
|
||||||
|
CASE(1, "1"),
|
||||||
|
CASE([1 2 3], "1 2 3"),
|
||||||
|
CASE(.123, "0.123000"),
|
||||||
|
CASE(true, "1"),
|
||||||
|
CASE(false, ""),
|
||||||
|
CASE(null, ""),
|
||||||
|
CASE({ v = "bar"; __toString = self: self.v; }, "bar"),
|
||||||
|
CASE({ v = "bar"; __toString = self: self.v; outPath = "foo"; }, "bar"),
|
||||||
|
CASE({ outPath = "foo"; }, "foo"),
|
||||||
|
CASE(./test, "/test")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
#undef CASE
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, substring){
|
||||||
|
auto v = eval("builtins.substring 0 3 \"nixos\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("nix"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, substringSmallerString){
|
||||||
|
auto v = eval("builtins.substring 0 3 \"n\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("n"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, substringEmptyString){
|
||||||
|
auto v = eval("builtins.substring 1 3 \"\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, stringLength) {
|
||||||
|
auto v = eval("builtins.stringLength \"123\"");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
TEST_F(PrimOpTest, hashStringMd5) {
|
||||||
|
auto v = eval("builtins.hashString \"md5\" \"asdf\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hashStringSha1) {
|
||||||
|
auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hashStringSha256) {
|
||||||
|
auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hashStringSha512) {
|
||||||
|
auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
|
||||||
|
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, hashStringInvalidHashType) {
|
||||||
|
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, nixPath) {
|
||||||
|
auto v = eval("builtins.nixPath");
|
||||||
|
ASSERT_EQ(v.type(), nList);
|
||||||
|
// We can't test much more as currently the EvalSettings are a global
|
||||||
|
// that we can't easily swap / replace
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, langVersion) {
|
||||||
|
auto v = eval("builtins.langVersion");
|
||||||
|
ASSERT_EQ(v.type(), nInt);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, storeDir) {
|
||||||
|
auto v = eval("builtins.storeDir");
|
||||||
|
ASSERT_THAT(v, IsStringEq("/nix/store"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, nixVersion) {
|
||||||
|
auto v = eval("builtins.nixVersion");
|
||||||
|
ASSERT_THAT(v, IsStringEq(nixVersion));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, currentSystem) {
|
||||||
|
auto v = eval("builtins.currentSystem");
|
||||||
|
ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, derivation) {
|
||||||
|
auto v = eval("derivation");
|
||||||
|
ASSERT_EQ(v.type(), nFunction);
|
||||||
|
ASSERT_TRUE(v.isLambda());
|
||||||
|
ASSERT_NE(v.lambda.fun, nullptr);
|
||||||
|
ASSERT_TRUE(v.lambda.fun->hasFormals());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, currentTime) {
|
||||||
|
auto v = eval("builtins.currentTime");
|
||||||
|
ASSERT_EQ(v.type(), nInt);
|
||||||
|
ASSERT_TRUE(v.integer > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, splitVersion) {
|
||||||
|
auto v = eval("builtins.splitVersion \"1.2.3git\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(4));
|
||||||
|
|
||||||
|
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
|
||||||
|
for (const auto [n, p] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*p, IsStringEq(strings[n]));
|
||||||
|
}
|
||||||
|
|
||||||
|
class CompareVersionsPrimOpTest :
|
||||||
|
public PrimOpTest,
|
||||||
|
public testing::WithParamInterface<std::tuple<std::string, const int>>
|
||||||
|
{};
|
||||||
|
|
||||||
|
TEST_P(CompareVersionsPrimOpTest, compareVersions) {
|
||||||
|
auto [expression, expectation] = GetParam();
|
||||||
|
auto v = eval(expression);
|
||||||
|
ASSERT_THAT(v, IsIntEq(expectation));
|
||||||
|
}
|
||||||
|
|
||||||
|
#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
|
||||||
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
|
compareVersions,
|
||||||
|
CompareVersionsPrimOpTest,
|
||||||
|
testing::Values(
|
||||||
|
// The first two are weird cases. Intuition tells they should
|
||||||
|
// be the same but they aren't.
|
||||||
|
CASE(1.0, 1.0.0, -1),
|
||||||
|
CASE(1.0.0, 1.0, 1),
|
||||||
|
// the following are from the nix-env manual:
|
||||||
|
CASE(1.0, 2.3, -1),
|
||||||
|
CASE(2.1, 2.3, -1),
|
||||||
|
CASE(2.3, 2.3, 0),
|
||||||
|
CASE(2.5, 2.3, 1),
|
||||||
|
CASE(3.1, 2.3, 1),
|
||||||
|
CASE(2.3.1, 2.3, 1),
|
||||||
|
CASE(2.3.1, 2.3a, 1),
|
||||||
|
CASE(2.3pre1, 2.3, -1),
|
||||||
|
CASE(2.3pre3, 2.3pre12, -1),
|
||||||
|
CASE(2.3a, 2.3c, -1),
|
||||||
|
CASE(2.3pre1, 2.3c, -1),
|
||||||
|
CASE(2.3pre1, 2.3q, -1)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
#undef CASE
|
||||||
|
|
||||||
|
|
||||||
|
class ParseDrvNamePrimOpTest :
|
||||||
|
public PrimOpTest,
|
||||||
|
public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
|
||||||
|
{};
|
||||||
|
|
||||||
|
TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
|
||||||
|
auto [input, expectedName, expectedVersion] = GetParam();
|
||||||
|
const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
|
||||||
|
auto v = eval(expr);
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
|
||||||
|
auto name = v.attrs->find(createSymbol("name"));
|
||||||
|
ASSERT_TRUE(name);
|
||||||
|
ASSERT_THAT(*name->value, IsStringEq(expectedName));
|
||||||
|
|
||||||
|
auto version = v.attrs->find(createSymbol("version"));
|
||||||
|
ASSERT_TRUE(version);
|
||||||
|
ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
|
||||||
|
}
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
|
parseDrvName,
|
||||||
|
ParseDrvNamePrimOpTest,
|
||||||
|
testing::Values(
|
||||||
|
std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
|
||||||
|
std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, replaceStrings) {
|
||||||
|
// FIXME: add a test that verifies the string context is as expected
|
||||||
|
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
|
||||||
|
ASSERT_EQ(v.type(), nString);
|
||||||
|
ASSERT_EQ(v.string.s, std::string_view("fabir"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, concatStringsSep) {
|
||||||
|
// FIXME: add a test that verifies the string context is as expected
|
||||||
|
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
|
||||||
|
ASSERT_EQ(v.type(), nString);
|
||||||
|
ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, split1) {
|
||||||
|
// v = [ "" [ "a" ] "c" ]
|
||||||
|
auto v = eval("builtins.split \"(a)b\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(3));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, split2) {
|
||||||
|
// v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
|
||||||
|
auto v = eval("builtins.split \"([ac])\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(5));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
|
||||||
|
|
||||||
|
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, split3) {
|
||||||
|
auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(5));
|
||||||
|
|
||||||
|
// First list element
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||||
|
|
||||||
|
// 2nd list element is a list [ "" null ]
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||||
|
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
|
||||||
|
|
||||||
|
// 3rd element
|
||||||
|
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||||
|
|
||||||
|
// 4th element is a list: [ null "c" ]
|
||||||
|
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
|
||||||
|
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
|
||||||
|
|
||||||
|
// 5th element is the empty string
|
||||||
|
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, split4) {
|
||||||
|
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(3));
|
||||||
|
auto first = v.listElems()[0];
|
||||||
|
auto second = v.listElems()[1];
|
||||||
|
auto third = v.listElems()[2];
|
||||||
|
|
||||||
|
ASSERT_THAT(*first, IsStringEq(" "));
|
||||||
|
|
||||||
|
ASSERT_THAT(*second, IsListOfSize(1));
|
||||||
|
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
|
||||||
|
|
||||||
|
ASSERT_THAT(*third, IsStringEq(" "));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, match1) {
|
||||||
|
auto v = eval("builtins.match \"ab\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsNull());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, match2) {
|
||||||
|
auto v = eval("builtins.match \"abc\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, match3) {
|
||||||
|
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(2));
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
|
||||||
|
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, match4) {
|
||||||
|
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(1));
|
||||||
|
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(PrimOpTest, attrNames) {
|
||||||
|
auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(4));
|
||||||
|
|
||||||
|
// ensure that the list is sorted
|
||||||
|
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
|
||||||
|
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||||
|
ASSERT_THAT(*elem, IsStringEq(expected[n]));
|
||||||
|
}
|
||||||
|
} /* namespace nix */
|
196
src/libexpr/tests/trivial.cc
Normal file
196
src/libexpr/tests/trivial.cc
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
#include "libexprtests.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
// Testing of trivial expressions
|
||||||
|
class TrivialExpressionTest : public LibExprTest {};
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, true) {
|
||||||
|
auto v = eval("true");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, false) {
|
||||||
|
auto v = eval("false");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, null) {
|
||||||
|
auto v = eval("null");
|
||||||
|
ASSERT_THAT(v, IsNull());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, 1) {
|
||||||
|
auto v = eval("1");
|
||||||
|
ASSERT_THAT(v, IsIntEq(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, 1plus1) {
|
||||||
|
auto v = eval("1+1");
|
||||||
|
ASSERT_THAT(v, IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, minus1) {
|
||||||
|
auto v = eval("-1");
|
||||||
|
ASSERT_THAT(v, IsIntEq(-1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, 1minus1) {
|
||||||
|
auto v = eval("1-1");
|
||||||
|
ASSERT_THAT(v, IsIntEq(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, lambdaAdd) {
|
||||||
|
auto v = eval("let add = a: b: a + b; in add 1 2");
|
||||||
|
ASSERT_THAT(v, IsIntEq(3));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, list) {
|
||||||
|
auto v = eval("[]");
|
||||||
|
ASSERT_THAT(v, IsListOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, attrs) {
|
||||||
|
auto v = eval("{}");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, float) {
|
||||||
|
auto v = eval("1.234");
|
||||||
|
ASSERT_THAT(v, IsFloatEq(1.234));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, updateAttrs) {
|
||||||
|
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||||
|
auto a = v.attrs->find(createSymbol("a"));
|
||||||
|
ASSERT_NE(a, nullptr);
|
||||||
|
ASSERT_THAT(*a->value, IsIntEq(3));
|
||||||
|
|
||||||
|
auto b = v.attrs->find(createSymbol("b"));
|
||||||
|
ASSERT_NE(b, nullptr);
|
||||||
|
ASSERT_THAT(*b->value, IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
|
||||||
|
auto v = eval("{} ? a");
|
||||||
|
ASSERT_THAT(v, IsFalse());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
|
||||||
|
auto v = eval("{ a = 123; } ? a");
|
||||||
|
ASSERT_THAT(v, IsTrue());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, withFound) {
|
||||||
|
auto v = eval("with { a = 23; }; a");
|
||||||
|
ASSERT_THAT(v, IsIntEq(23));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, withNotFound) {
|
||||||
|
ASSERT_THROW(eval("with {}; a"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, withOverride) {
|
||||||
|
auto v = eval("with { a = 23; }; with { a = 42; }; a");
|
||||||
|
ASSERT_THAT(v, IsIntEq(42));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, letOverWith) {
|
||||||
|
auto v = eval("let a = 23; in with { a = 1; }; a");
|
||||||
|
ASSERT_THAT(v, IsIntEq(23));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, multipleLet) {
|
||||||
|
auto v = eval("let a = 23; in let a = 42; in a");
|
||||||
|
ASSERT_THAT(v, IsIntEq(42));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
|
||||||
|
auto v = eval("({ a ? 123 }: a) {}");
|
||||||
|
ASSERT_THAT(v, IsIntEq(123));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
|
||||||
|
auto v = eval("({ a ? 123 }: a) { a = 5; }");
|
||||||
|
ASSERT_THAT(v, IsIntEq(5));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
|
||||||
|
auto v = eval("({ a ? 123 }@args: args) {}");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
|
||||||
|
auto v = eval("(args@{ a ? 123 }: args) {}");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, assertThrows) {
|
||||||
|
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, assertPassed) {
|
||||||
|
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
|
||||||
|
ASSERT_THAT(v, IsIntEq(123));
|
||||||
|
}
|
||||||
|
|
||||||
|
class AttrSetMergeTrvialExpressionTest :
|
||||||
|
public TrivialExpressionTest,
|
||||||
|
public testing::WithParamInterface<const char*>
|
||||||
|
{};
|
||||||
|
|
||||||
|
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
|
||||||
|
// Usually Nix rejects duplicate keys in an attrset but it does allow
|
||||||
|
// so if it is an attribute set that contains disjoint sets of keys.
|
||||||
|
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
|
||||||
|
// The attribute set `a` will be a Thunk at first as the attribuets
|
||||||
|
// have to be merged (or otherwise computed) and that is done in a lazy
|
||||||
|
// manner.
|
||||||
|
|
||||||
|
auto expr = GetParam();
|
||||||
|
auto v = eval(expr);
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||||
|
|
||||||
|
auto a = v.attrs->find(createSymbol("a"));
|
||||||
|
ASSERT_NE(a, nullptr);
|
||||||
|
|
||||||
|
ASSERT_THAT(*a->value, IsThunk());
|
||||||
|
state.forceValue(*a->value, noPos);
|
||||||
|
|
||||||
|
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
|
||||||
|
|
||||||
|
auto b = a->value->attrs->find(createSymbol("b"));
|
||||||
|
ASSERT_NE(b, nullptr);
|
||||||
|
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||||
|
|
||||||
|
auto c = a->value->attrs->find(createSymbol("c"));
|
||||||
|
ASSERT_NE(c, nullptr);
|
||||||
|
ASSERT_THAT(*c->value, IsIntEq(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
|
attrsetMergeLazy,
|
||||||
|
AttrSetMergeTrvialExpressionTest,
|
||||||
|
testing::Values(
|
||||||
|
"{ a.b = 1; a.c = 2; }",
|
||||||
|
"{ a = { b = 1; }; a = { c = 2; }; }"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, functor) {
|
||||||
|
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
|
||||||
|
ASSERT_THAT(v, IsIntEq(15));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, bindOr) {
|
||||||
|
auto v = eval("{ or = 1; }");
|
||||||
|
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||||
|
auto b = v.attrs->find(createSymbol("or"));
|
||||||
|
ASSERT_NE(b, nullptr);
|
||||||
|
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(TrivialExpressionTest, orCantBeUsed) {
|
||||||
|
ASSERT_THROW(eval("let or = 1; in or"), Error);
|
||||||
|
}
|
||||||
|
} /* namespace nix */
|
|
@ -55,7 +55,7 @@ struct Env;
|
||||||
struct Expr;
|
struct Expr;
|
||||||
struct ExprLambda;
|
struct ExprLambda;
|
||||||
struct PrimOp;
|
struct PrimOp;
|
||||||
class SymbolIdx;
|
class Symbol;
|
||||||
class PosIdx;
|
class PosIdx;
|
||||||
struct Pos;
|
struct Pos;
|
||||||
class StorePath;
|
class StorePath;
|
||||||
|
@ -251,11 +251,6 @@ public:
|
||||||
|
|
||||||
void mkStringMove(const char * s, const PathSet & context);
|
void mkStringMove(const char * s, const PathSet & context);
|
||||||
|
|
||||||
inline void mkString(const Symbol & s)
|
|
||||||
{
|
|
||||||
mkString(std::string_view(s).data());
|
|
||||||
}
|
|
||||||
|
|
||||||
inline void mkPath(const char * s)
|
inline void mkPath(const char * s)
|
||||||
{
|
{
|
||||||
clearValue();
|
clearValue();
|
||||||
|
@ -410,12 +405,12 @@ public:
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
||||||
typedef std::map<SymbolIdx, Value *, std::less<SymbolIdx>, traceable_allocator<std::pair<const SymbolIdx, Value *> > > ValueMap;
|
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
|
||||||
typedef std::map<SymbolIdx, ValueVector, std::less<SymbolIdx>, traceable_allocator<std::pair<const SymbolIdx, ValueVector> > > ValueVectorMap;
|
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap;
|
||||||
#else
|
#else
|
||||||
typedef std::vector<Value *> ValueVector;
|
typedef std::vector<Value *> ValueVector;
|
||||||
typedef std::map<SymbolIdx, Value *> ValueMap;
|
typedef std::map<Symbol, Value *> ValueMap;
|
||||||
typedef std::map<SymbolIdx, ValueVector> ValueVectorMap;
|
typedef std::map<Symbol, ValueVector> ValueVectorMap;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,15 +5,20 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "url-parts.hh"
|
#include "url-parts.hh"
|
||||||
#include "pathlocks.hh"
|
#include "pathlocks.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "git.hh"
|
||||||
|
|
||||||
#include "fetch-settings.hh"
|
#include "fetch-settings.hh"
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
#include <string.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
#include <sys/wait.h>
|
#include <sys/wait.h>
|
||||||
|
|
||||||
using namespace std::string_literals;
|
using namespace std::string_literals;
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
namespace {
|
||||||
|
|
||||||
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
|
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
|
||||||
// The value itself does not matter, since we always fetch a specific revision or branch.
|
// The value itself does not matter, since we always fetch a specific revision or branch.
|
||||||
|
@ -21,16 +26,226 @@ namespace nix::fetchers {
|
||||||
// old version of git, which will ignore unrecognized `-c` options.
|
// old version of git, which will ignore unrecognized `-c` options.
|
||||||
const std::string gitInitialBranch = "__nix_dummy_branch";
|
const std::string gitInitialBranch = "__nix_dummy_branch";
|
||||||
|
|
||||||
static std::string readHead(const Path & path)
|
bool isCacheFileWithinTtl(const time_t now, const struct stat & st)
|
||||||
{
|
{
|
||||||
return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
|
return st.st_mtime + settings.tarballTtl > now;
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool isNotDotGitDirectory(const Path & path)
|
bool touchCacheFile(const Path& path, const time_t& touch_time)
|
||||||
|
{
|
||||||
|
struct timeval times[2];
|
||||||
|
times[0].tv_sec = touch_time;
|
||||||
|
times[0].tv_usec = 0;
|
||||||
|
times[1].tv_sec = touch_time;
|
||||||
|
times[1].tv_usec = 0;
|
||||||
|
|
||||||
|
return lutimes(path.c_str(), times) == 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Path getCachePath(std::string key)
|
||||||
|
{
|
||||||
|
return getCacheDir() + "/nix/gitv3/" +
|
||||||
|
hashString(htSHA256, key).to_string(Base32, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the name of the HEAD branch.
|
||||||
|
//
|
||||||
|
// Returns the head branch name as reported by git ls-remote --symref, e.g., if
|
||||||
|
// ls-remote returns the output below, "main" is returned based on the ref line.
|
||||||
|
//
|
||||||
|
// ref: refs/heads/main HEAD
|
||||||
|
// ...
|
||||||
|
std::optional<std::string> readHead(const Path & path)
|
||||||
|
{
|
||||||
|
auto [exit_code, output] = runProgram(RunOptions {
|
||||||
|
.program = "git",
|
||||||
|
.args = {"ls-remote", "--symref", path},
|
||||||
|
});
|
||||||
|
if (exit_code != 0) {
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string_view line = output;
|
||||||
|
line = line.substr(0, line.find("\n"));
|
||||||
|
if (const auto parseResult = git::parseLsRemoteLine(line)) {
|
||||||
|
switch (parseResult->kind) {
|
||||||
|
case git::LsRemoteRefLine::Kind::Symbolic:
|
||||||
|
debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
|
||||||
|
break;
|
||||||
|
case git::LsRemoteRefLine::Kind::Object:
|
||||||
|
debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return parseResult->target;
|
||||||
|
}
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Persist the HEAD ref from the remote repo in the local cached repo.
|
||||||
|
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
|
||||||
|
{
|
||||||
|
Path cacheDir = getCachePath(actualUrl);
|
||||||
|
try {
|
||||||
|
runProgram("git", true, { "-C", cacheDir, "symbolic-ref", "--", "HEAD", headRef });
|
||||||
|
} catch (ExecError &e) {
|
||||||
|
if (!WIFEXITED(e.status)) throw;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
/* No need to touch refs/HEAD, because `git symbolic-ref` updates the mtime. */
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> readHeadCached(const std::string& actualUrl)
|
||||||
|
{
|
||||||
|
// Create a cache path to store the branch of the HEAD ref. Append something
|
||||||
|
// in front of the URL to prevent collision with the repository itself.
|
||||||
|
Path cacheDir = getCachePath(actualUrl);
|
||||||
|
Path headRefFile = cacheDir + "/HEAD";
|
||||||
|
|
||||||
|
time_t now = time(0);
|
||||||
|
struct stat st;
|
||||||
|
std::optional<std::string> cachedRef;
|
||||||
|
if (stat(headRefFile.c_str(), &st) == 0) {
|
||||||
|
cachedRef = readHead(cacheDir);
|
||||||
|
if (cachedRef != std::nullopt &&
|
||||||
|
*cachedRef != gitInitialBranch &&
|
||||||
|
isCacheFileWithinTtl(now, st)) {
|
||||||
|
debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
|
||||||
|
return cachedRef;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto ref = readHead(actualUrl);
|
||||||
|
if (ref) {
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cachedRef) {
|
||||||
|
// If the cached git ref is expired in fetch() below, and the 'git fetch'
|
||||||
|
// fails, it falls back to continuing with the most recent version.
|
||||||
|
// This function must behave the same way, so we return the expired
|
||||||
|
// cached ref here.
|
||||||
|
warn("could not get HEAD ref for repository '%s'; using expired cached ref '%s'", actualUrl, *cachedRef);
|
||||||
|
return *cachedRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isNotDotGitDirectory(const Path & path)
|
||||||
{
|
{
|
||||||
return baseNameOf(path) != ".git";
|
return baseNameOf(path) != ".git";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct WorkdirInfo
|
||||||
|
{
|
||||||
|
bool clean = false;
|
||||||
|
bool hasHead = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Returns whether a git workdir is clean and has commits.
|
||||||
|
WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
|
||||||
|
{
|
||||||
|
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||||
|
std::string gitDir(".git");
|
||||||
|
|
||||||
|
auto env = getEnv();
|
||||||
|
// Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
|
||||||
|
// that way unknown errors can lead to a failure instead of continuing through the wrong code path
|
||||||
|
env["LC_ALL"] = "C";
|
||||||
|
|
||||||
|
/* Check whether HEAD points to something that looks like a commit,
|
||||||
|
since that is the refrence we want to use later on. */
|
||||||
|
auto result = runProgram(RunOptions {
|
||||||
|
.program = "git",
|
||||||
|
.args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
|
||||||
|
.environment = env,
|
||||||
|
.mergeStderrToStdout = true
|
||||||
|
});
|
||||||
|
auto exitCode = WEXITSTATUS(result.first);
|
||||||
|
auto errorMessage = result.second;
|
||||||
|
|
||||||
|
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
|
||||||
|
throw Error("'%s' is not a Git repository", workdir);
|
||||||
|
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
|
||||||
|
// indicates that the repo does not have any commits
|
||||||
|
// we want to proceed and will consider it dirty later
|
||||||
|
} else if (exitCode != 0) {
|
||||||
|
// any other errors should lead to a failure
|
||||||
|
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool clean = false;
|
||||||
|
bool hasHead = exitCode == 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (hasHead) {
|
||||||
|
// Using git diff is preferrable over lower-level operations here,
|
||||||
|
// because its conceptually simpler and we only need the exit code anyways.
|
||||||
|
auto gitDiffOpts = Strings({ "-C", workdir, "diff", "HEAD", "--quiet"});
|
||||||
|
if (!submodules) {
|
||||||
|
// Changes in submodules should only make the tree dirty
|
||||||
|
// when those submodules will be copied as well.
|
||||||
|
gitDiffOpts.emplace_back("--ignore-submodules");
|
||||||
|
}
|
||||||
|
gitDiffOpts.emplace_back("--");
|
||||||
|
runProgram("git", true, gitDiffOpts);
|
||||||
|
|
||||||
|
clean = true;
|
||||||
|
}
|
||||||
|
} catch (ExecError & e) {
|
||||||
|
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
return WorkdirInfo { .clean = clean, .hasHead = hasHead };
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
|
||||||
|
{
|
||||||
|
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||||
|
|
||||||
|
if (!fetchSettings.allowDirty)
|
||||||
|
throw Error("Git tree '%s' is dirty", workdir);
|
||||||
|
|
||||||
|
if (fetchSettings.warnDirty)
|
||||||
|
warn("Git tree '%s' is dirty", workdir);
|
||||||
|
|
||||||
|
auto gitOpts = Strings({ "-C", workdir, "ls-files", "-z" });
|
||||||
|
if (submodules)
|
||||||
|
gitOpts.emplace_back("--recurse-submodules");
|
||||||
|
|
||||||
|
auto files = tokenizeString<std::set<std::string>>(
|
||||||
|
runProgram("git", true, gitOpts), "\0"s);
|
||||||
|
|
||||||
|
Path actualPath(absPath(workdir));
|
||||||
|
|
||||||
|
PathFilter filter = [&](const Path & p) -> bool {
|
||||||
|
assert(hasPrefix(p, actualPath));
|
||||||
|
std::string file(p, actualPath.size() + 1);
|
||||||
|
|
||||||
|
auto st = lstat(p);
|
||||||
|
|
||||||
|
if (S_ISDIR(st.st_mode)) {
|
||||||
|
auto prefix = file + "/";
|
||||||
|
auto i = files.lower_bound(prefix);
|
||||||
|
return i != files.end() && hasPrefix(*i, prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
return files.count(file);
|
||||||
|
};
|
||||||
|
|
||||||
|
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
|
// FIXME: maybe we should use the timestamp of the last
|
||||||
|
// modified dirty file?
|
||||||
|
input.attrs.insert_or_assign(
|
||||||
|
"lastModified",
|
||||||
|
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||||
|
|
||||||
|
return {std::move(storePath), input};
|
||||||
|
}
|
||||||
|
} // end namespace
|
||||||
|
|
||||||
struct GitInputScheme : InputScheme
|
struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
@ -150,13 +365,14 @@ struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
auto sourcePath = getSourcePath(input);
|
auto sourcePath = getSourcePath(input);
|
||||||
assert(sourcePath);
|
assert(sourcePath);
|
||||||
|
auto gitDir = ".git";
|
||||||
|
|
||||||
runProgram("git", true,
|
runProgram("git", true,
|
||||||
{ "-C", *sourcePath, "add", "--force", "--intent-to-add", "--", std::string(file) });
|
{ "-C", *sourcePath, "--git-dir", gitDir, "add", "--force", "--intent-to-add", "--", std::string(file) });
|
||||||
|
|
||||||
if (commitMsg)
|
if (commitMsg)
|
||||||
runProgram("git", true,
|
runProgram("git", true,
|
||||||
{ "-C", *sourcePath, "commit", std::string(file), "-m", *commitMsg });
|
{ "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg });
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||||
|
@ -175,6 +391,7 @@ struct GitInputScheme : InputScheme
|
||||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
{
|
{
|
||||||
Input input(_input);
|
Input input(_input);
|
||||||
|
auto gitDir = ".git";
|
||||||
|
|
||||||
std::string name = input.getName();
|
std::string name = input.getName();
|
||||||
|
|
||||||
|
@ -223,106 +440,16 @@ struct GitInputScheme : InputScheme
|
||||||
auto [isLocal, actualUrl_] = getActualUrl(input);
|
auto [isLocal, actualUrl_] = getActualUrl(input);
|
||||||
auto actualUrl = actualUrl_; // work around clang bug
|
auto actualUrl = actualUrl_; // work around clang bug
|
||||||
|
|
||||||
// If this is a local directory and no ref or revision is
|
/* If this is a local directory and no ref or revision is given,
|
||||||
// given, then allow the use of an unclean working tree.
|
allow fetching directly from a dirty workdir. */
|
||||||
if (!input.getRef() && !input.getRev() && isLocal) {
|
if (!input.getRef() && !input.getRev() && isLocal) {
|
||||||
bool clean = false;
|
auto workdirInfo = getWorkdirInfo(input, actualUrl);
|
||||||
|
if (!workdirInfo.clean) {
|
||||||
auto env = getEnv();
|
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
|
||||||
// Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
|
|
||||||
// that way unknown errors can lead to a failure instead of continuing through the wrong code path
|
|
||||||
env["LC_ALL"] = "C";
|
|
||||||
|
|
||||||
/* Check whether HEAD points to something that looks like a commit,
|
|
||||||
since that is the refrence we want to use later on. */
|
|
||||||
auto result = runProgram(RunOptions {
|
|
||||||
.program = "git",
|
|
||||||
.args = { "-C", actualUrl, "--git-dir=.git", "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
|
|
||||||
.environment = env,
|
|
||||||
.mergeStderrToStdout = true
|
|
||||||
});
|
|
||||||
auto exitCode = WEXITSTATUS(result.first);
|
|
||||||
auto errorMessage = result.second;
|
|
||||||
|
|
||||||
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
|
|
||||||
throw Error("'%s' is not a Git repository", actualUrl);
|
|
||||||
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
|
|
||||||
// indicates that the repo does not have any commits
|
|
||||||
// we want to proceed and will consider it dirty later
|
|
||||||
} else if (exitCode != 0) {
|
|
||||||
// any other errors should lead to a failure
|
|
||||||
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", actualUrl, exitCode, errorMessage);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool hasHead = exitCode == 0;
|
|
||||||
try {
|
|
||||||
if (hasHead) {
|
|
||||||
// Using git diff is preferrable over lower-level operations here,
|
|
||||||
// because its conceptually simpler and we only need the exit code anyways.
|
|
||||||
auto gitDiffOpts = Strings({ "-C", actualUrl, "diff", "HEAD", "--quiet"});
|
|
||||||
if (!submodules) {
|
|
||||||
// Changes in submodules should only make the tree dirty
|
|
||||||
// when those submodules will be copied as well.
|
|
||||||
gitDiffOpts.emplace_back("--ignore-submodules");
|
|
||||||
}
|
|
||||||
gitDiffOpts.emplace_back("--");
|
|
||||||
runProgram("git", true, gitDiffOpts);
|
|
||||||
|
|
||||||
clean = true;
|
|
||||||
}
|
|
||||||
} catch (ExecError & e) {
|
|
||||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!clean) {
|
|
||||||
|
|
||||||
/* This is an unclean working tree. So copy all tracked files. */
|
|
||||||
|
|
||||||
if (!fetchSettings.allowDirty)
|
|
||||||
throw Error("Git tree '%s' is dirty", actualUrl);
|
|
||||||
|
|
||||||
if (fetchSettings.warnDirty)
|
|
||||||
warn("Git tree '%s' is dirty", actualUrl);
|
|
||||||
|
|
||||||
auto gitOpts = Strings({ "-C", actualUrl, "ls-files", "-z" });
|
|
||||||
if (submodules)
|
|
||||||
gitOpts.emplace_back("--recurse-submodules");
|
|
||||||
|
|
||||||
auto files = tokenizeString<std::set<std::string>>(
|
|
||||||
runProgram("git", true, gitOpts), "\0"s);
|
|
||||||
|
|
||||||
Path actualPath(absPath(actualUrl));
|
|
||||||
|
|
||||||
PathFilter filter = [&](const Path & p) -> bool {
|
|
||||||
assert(hasPrefix(p, actualPath));
|
|
||||||
std::string file(p, actualPath.size() + 1);
|
|
||||||
|
|
||||||
auto st = lstat(p);
|
|
||||||
|
|
||||||
if (S_ISDIR(st.st_mode)) {
|
|
||||||
auto prefix = file + "/";
|
|
||||||
auto i = files.lower_bound(prefix);
|
|
||||||
return i != files.end() && hasPrefix(*i, prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
return files.count(file);
|
|
||||||
};
|
|
||||||
|
|
||||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
|
||||||
|
|
||||||
// FIXME: maybe we should use the timestamp of the last
|
|
||||||
// modified dirty file?
|
|
||||||
input.attrs.insert_or_assign(
|
|
||||||
"lastModified",
|
|
||||||
hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
|
||||||
|
|
||||||
return {std::move(storePath), input};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
|
const Attrs unlockedAttrs({
|
||||||
|
|
||||||
Attrs unlockedAttrs({
|
|
||||||
{"type", cacheType},
|
{"type", cacheType},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"url", actualUrl},
|
{"url", actualUrl},
|
||||||
|
@ -332,14 +459,30 @@ struct GitInputScheme : InputScheme
|
||||||
Path repoDir;
|
Path repoDir;
|
||||||
|
|
||||||
if (isLocal) {
|
if (isLocal) {
|
||||||
|
if (!input.getRef()) {
|
||||||
|
auto head = readHead(actualUrl);
|
||||||
|
if (!head) {
|
||||||
|
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
||||||
|
head = "master";
|
||||||
|
}
|
||||||
|
input.attrs.insert_or_assign("ref", *head);
|
||||||
|
}
|
||||||
|
|
||||||
if (!input.getRev())
|
if (!input.getRev())
|
||||||
input.attrs.insert_or_assign("rev",
|
input.attrs.insert_or_assign("rev",
|
||||||
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev());
|
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
|
||||||
|
|
||||||
repoDir = actualUrl;
|
repoDir = actualUrl;
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
const bool useHeadRef = !input.getRef();
|
||||||
|
if (useHeadRef) {
|
||||||
|
auto head = readHeadCached(actualUrl);
|
||||||
|
if (!head) {
|
||||||
|
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
|
||||||
|
head = "master";
|
||||||
|
}
|
||||||
|
input.attrs.insert_or_assign("ref", *head);
|
||||||
|
}
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||||
|
@ -349,8 +492,9 @@ struct GitInputScheme : InputScheme
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
Path cacheDir = getCachePath(actualUrl);
|
||||||
repoDir = cacheDir;
|
repoDir = cacheDir;
|
||||||
|
gitDir = ".";
|
||||||
|
|
||||||
createDirs(dirOf(cacheDir));
|
createDirs(dirOf(cacheDir));
|
||||||
PathLocks cacheDirLock({cacheDir + ".lock"});
|
PathLocks cacheDirLock({cacheDir + ".lock"});
|
||||||
|
@ -371,7 +515,7 @@ struct GitInputScheme : InputScheme
|
||||||
repo. */
|
repo. */
|
||||||
if (input.getRev()) {
|
if (input.getRev()) {
|
||||||
try {
|
try {
|
||||||
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input.getRev()->gitRev() });
|
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() });
|
||||||
doFetch = false;
|
doFetch = false;
|
||||||
} catch (ExecError & e) {
|
} catch (ExecError & e) {
|
||||||
if (WIFEXITED(e.status)) {
|
if (WIFEXITED(e.status)) {
|
||||||
|
@ -388,7 +532,7 @@ struct GitInputScheme : InputScheme
|
||||||
git fetch to update the local ref to the remote ref. */
|
git fetch to update the local ref to the remote ref. */
|
||||||
struct stat st;
|
struct stat st;
|
||||||
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
||||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
|
!isCacheFileWithinTtl(now, st);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -406,19 +550,16 @@ struct GitInputScheme : InputScheme
|
||||||
: ref == "HEAD"
|
: ref == "HEAD"
|
||||||
? *ref
|
? *ref
|
||||||
: "refs/heads/" + *ref;
|
: "refs/heads/" + *ref;
|
||||||
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (!pathExists(localRefFile)) throw;
|
if (!pathExists(localRefFile)) throw;
|
||||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct timeval times[2];
|
if (!touchCacheFile(localRefFile, now))
|
||||||
times[0].tv_sec = now;
|
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
|
||||||
times[0].tv_usec = 0;
|
if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef()))
|
||||||
times[1].tv_sec = now;
|
warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl);
|
||||||
times[1].tv_usec = 0;
|
|
||||||
|
|
||||||
utimes(localRefFile.c_str(), times);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input.getRev())
|
if (!input.getRev())
|
||||||
|
@ -427,7 +568,7 @@ struct GitInputScheme : InputScheme
|
||||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||||
|
|
||||||
if (isShallow && !shallow)
|
if (isShallow && !shallow)
|
||||||
throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
|
throw Error("'%s' is a shallow Git repository, but a non-shallow repository is needed", actualUrl);
|
||||||
|
@ -447,7 +588,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
auto result = runProgram(RunOptions {
|
auto result = runProgram(RunOptions {
|
||||||
.program = "git",
|
.program = "git",
|
||||||
.args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
|
.args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() },
|
||||||
.mergeStderrToStdout = true
|
.mergeStderrToStdout = true
|
||||||
});
|
});
|
||||||
if (WEXITSTATUS(result.first) == 128
|
if (WEXITSTATUS(result.first) == 128
|
||||||
|
@ -486,7 +627,7 @@ struct GitInputScheme : InputScheme
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
runProgram2({
|
runProgram2({
|
||||||
.program = "git",
|
.program = "git",
|
||||||
.args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
|
.args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() },
|
||||||
.standardOut = &sink
|
.standardOut = &sink
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -496,7 +637,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
|
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input.getRev()->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
|
@ -505,7 +646,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
if (!shallow)
|
if (!shallow)
|
||||||
infoAttrs.insert_or_assign("revCount",
|
infoAttrs.insert_or_assign("revCount",
|
||||||
std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input.getRev()->gitRev() })));
|
std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() })));
|
||||||
|
|
||||||
if (!_input.getRev())
|
if (!_input.getRev())
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "url-parts.hh"
|
#include "url-parts.hh"
|
||||||
|
#include "git.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "fetch-settings.hh"
|
#include "fetch-settings.hh"
|
||||||
|
|
||||||
|
@ -243,7 +243,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||||
{
|
{
|
||||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||||
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
|
auto url = fmt(
|
||||||
|
host == "github.com"
|
||||||
|
? "https://api.%s/repos/%s/%s/commits/%s"
|
||||||
|
: "https://%s/api/v3/repos/%s/%s/commits/%s",
|
||||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||||
|
|
||||||
Headers headers = makeHeadersWithAuthTokens(host);
|
Headers headers = makeHeadersWithAuthTokens(host);
|
||||||
|
@ -262,7 +265,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
// FIXME: use regular /archive URLs instead? api.github.com
|
// FIXME: use regular /archive URLs instead? api.github.com
|
||||||
// might have stricter rate limits.
|
// might have stricter rate limits.
|
||||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
|
||||||
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
|
auto url = fmt(
|
||||||
|
host == "github.com"
|
||||||
|
? "https://api.%s/repos/%s/%s/tarball/%s"
|
||||||
|
: "https://%s/api/v3/repos/%s/%s/tarball/%s",
|
||||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||||
input.getRev()->to_string(Base16, false));
|
input.getRev()->to_string(Base16, false));
|
||||||
|
|
||||||
|
@ -383,35 +389,31 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||||
std::string line;
|
std::string line;
|
||||||
getline(is, line);
|
getline(is, line);
|
||||||
|
|
||||||
auto ref_index = line.find("ref: ");
|
auto remoteLine = git::parseLsRemoteLine(line);
|
||||||
if (ref_index == std::string::npos) {
|
if (!remoteLine) {
|
||||||
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
||||||
}
|
}
|
||||||
|
ref_uri = remoteLine->target;
|
||||||
ref_uri = line.substr(ref_index+5, line.length()-1);
|
} else {
|
||||||
} else
|
|
||||||
ref_uri = fmt("refs/(heads|tags)/%s", ref);
|
ref_uri = fmt("refs/(heads|tags)/%s", ref);
|
||||||
|
}
|
||||||
|
|
||||||
auto file = store->toRealPath(
|
auto file = store->toRealPath(
|
||||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
||||||
std::ifstream is(file);
|
std::ifstream is(file);
|
||||||
|
|
||||||
std::string line;
|
std::string line;
|
||||||
std::string id;
|
std::optional<std::string> id;
|
||||||
while(getline(is, line)) {
|
while(!id && getline(is, line)) {
|
||||||
// Append $ to avoid partial name matches
|
auto parsedLine = git::parseLsRemoteLine(line);
|
||||||
std::regex pattern(fmt("%s$", ref_uri));
|
if (parsedLine && parsedLine->reference == ref_uri)
|
||||||
|
id = parsedLine->target;
|
||||||
if (std::regex_search(line, pattern)) {
|
|
||||||
id = line.substr(0, line.find('\t'));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(id.empty())
|
if(!id)
|
||||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||||
|
|
||||||
auto rev = Hash::parseAny(id, htSHA1);
|
auto rev = Hash::parseAny(*id, htSHA1);
|
||||||
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
|
|
@ -786,8 +786,7 @@ void runPostBuildHook(
|
||||||
Store & store,
|
Store & store,
|
||||||
Logger & logger,
|
Logger & logger,
|
||||||
const StorePath & drvPath,
|
const StorePath & drvPath,
|
||||||
StorePathSet outputPaths
|
const StorePathSet & outputPaths)
|
||||||
)
|
|
||||||
{
|
{
|
||||||
auto hook = settings.postBuildHook;
|
auto hook = settings.postBuildHook;
|
||||||
if (hook == "")
|
if (hook == "")
|
||||||
|
@ -906,7 +905,7 @@ void DerivationGoal::buildDone()
|
||||||
auto builtOutputs = registerOutputs();
|
auto builtOutputs = registerOutputs();
|
||||||
|
|
||||||
StorePathSet outputPaths;
|
StorePathSet outputPaths;
|
||||||
for (auto & [_, output] : buildResult.builtOutputs)
|
for (auto & [_, output] : builtOutputs)
|
||||||
outputPaths.insert(output.outPath);
|
outputPaths.insert(output.outPath);
|
||||||
runPostBuildHook(
|
runPostBuildHook(
|
||||||
worker.store,
|
worker.store,
|
||||||
|
@ -985,21 +984,28 @@ void DerivationGoal::resolvedFinished()
|
||||||
realWantedOutputs = resolvedDrv.outputNames();
|
realWantedOutputs = resolvedDrv.outputNames();
|
||||||
|
|
||||||
for (auto & wantedOutput : realWantedOutputs) {
|
for (auto & wantedOutput : realWantedOutputs) {
|
||||||
assert(initialOutputs.count(wantedOutput) != 0);
|
auto initialOutput = get(initialOutputs, wantedOutput);
|
||||||
assert(resolvedHashes.count(wantedOutput) != 0);
|
auto resolvedHash = get(resolvedHashes, wantedOutput);
|
||||||
auto realisation = resolvedResult.builtOutputs.at(
|
if ((!initialOutput) || (!resolvedHash))
|
||||||
DrvOutput { resolvedHashes.at(wantedOutput), wantedOutput });
|
throw Error(
|
||||||
|
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
|
||||||
|
worker.store.printStorePath(drvPath), wantedOutput);
|
||||||
|
auto realisation = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
|
||||||
|
if (!realisation)
|
||||||
|
throw Error(
|
||||||
|
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
|
||||||
|
worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
|
||||||
if (drv->type().isPure()) {
|
if (drv->type().isPure()) {
|
||||||
auto newRealisation = realisation;
|
auto newRealisation = *realisation;
|
||||||
newRealisation.id = DrvOutput { initialOutputs.at(wantedOutput).outputHash, wantedOutput };
|
newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput };
|
||||||
newRealisation.signatures.clear();
|
newRealisation.signatures.clear();
|
||||||
if (!drv->type().isFixed())
|
if (!drv->type().isFixed())
|
||||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
|
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
|
||||||
signRealisation(newRealisation);
|
signRealisation(newRealisation);
|
||||||
worker.store.registerDrvOutput(newRealisation);
|
worker.store.registerDrvOutput(newRealisation);
|
||||||
}
|
}
|
||||||
outputPaths.insert(realisation.outPath);
|
outputPaths.insert(realisation->outPath);
|
||||||
builtOutputs.emplace(realisation.id, realisation);
|
builtOutputs.emplace(realisation->id, *realisation);
|
||||||
}
|
}
|
||||||
|
|
||||||
runPostBuildHook(
|
runPostBuildHook(
|
||||||
|
@ -1295,7 +1301,11 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
DrvOutputs validOutputs;
|
DrvOutputs validOutputs;
|
||||||
|
|
||||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||||
InitialOutput & info = initialOutputs.at(i.first);
|
auto initialOutput = get(initialOutputs, i.first);
|
||||||
|
if (!initialOutput)
|
||||||
|
// this is an invalid output, gets catched with (!wantedOutputsLeft.empty())
|
||||||
|
continue;
|
||||||
|
auto & info = *initialOutput;
|
||||||
info.wanted = wantOutput(i.first, wantedOutputs);
|
info.wanted = wantOutput(i.first, wantedOutputs);
|
||||||
if (info.wanted)
|
if (info.wanted)
|
||||||
wantedOutputsLeft.erase(i.first);
|
wantedOutputsLeft.erase(i.first);
|
||||||
|
@ -1310,7 +1320,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
: PathStatus::Corrupt,
|
: PathStatus::Corrupt,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
|
auto drvOutput = DrvOutput{info.outputHash, i.first};
|
||||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||||
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
||||||
info.known = {
|
info.known = {
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
#include "topo-sort.hh"
|
#include "topo-sort.hh"
|
||||||
#include "callback.hh"
|
#include "callback.hh"
|
||||||
|
#include "json-utils.hh"
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
|
@ -56,8 +57,6 @@
|
||||||
#include <pwd.h>
|
#include <pwd.h>
|
||||||
#include <grp.h>
|
#include <grp.h>
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
void handleDiffHook(
|
void handleDiffHook(
|
||||||
|
@ -482,7 +481,7 @@ void LocalDerivationGoal::startBuilder()
|
||||||
temporary build directory. The text files have the format used
|
temporary build directory. The text files have the format used
|
||||||
by `nix-store --register-validity'. However, the deriver
|
by `nix-store --register-validity'. However, the deriver
|
||||||
fields are left empty. */
|
fields are left empty. */
|
||||||
auto s = get(drv->env, "exportReferencesGraph").value_or("");
|
auto s = getOr(drv->env, "exportReferencesGraph", "");
|
||||||
Strings ss = tokenizeString<Strings>(s);
|
Strings ss = tokenizeString<Strings>(s);
|
||||||
if (ss.size() % 2 != 0)
|
if (ss.size() % 2 != 0)
|
||||||
throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s);
|
throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s);
|
||||||
|
@ -989,7 +988,7 @@ void LocalDerivationGoal::initTmpDir() {
|
||||||
there is no size constraint). */
|
there is no size constraint). */
|
||||||
if (!parsedDrv->getStructuredAttrs()) {
|
if (!parsedDrv->getStructuredAttrs()) {
|
||||||
|
|
||||||
StringSet passAsFile = tokenizeString<StringSet>(get(drv->env, "passAsFile").value_or(""));
|
StringSet passAsFile = tokenizeString<StringSet>(getOr(drv->env, "passAsFile", ""));
|
||||||
for (auto & i : drv->env) {
|
for (auto & i : drv->env) {
|
||||||
if (passAsFile.find(i.first) == passAsFile.end()) {
|
if (passAsFile.find(i.first) == passAsFile.end()) {
|
||||||
env[i.first] = i.second;
|
env[i.first] = i.second;
|
||||||
|
@ -2128,12 +2127,22 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
std::map<std::string, std::variant<AlreadyRegistered, PerhapsNeedToRegister>> outputReferencesIfUnregistered;
|
std::map<std::string, std::variant<AlreadyRegistered, PerhapsNeedToRegister>> outputReferencesIfUnregistered;
|
||||||
std::map<std::string, struct stat> outputStats;
|
std::map<std::string, struct stat> outputStats;
|
||||||
for (auto & [outputName, _] : drv->outputs) {
|
for (auto & [outputName, _] : drv->outputs) {
|
||||||
auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchOutputs.at(outputName)));
|
auto scratchOutput = get(scratchOutputs, outputName);
|
||||||
|
if (!scratchOutput)
|
||||||
|
throw BuildError(
|
||||||
|
"builder for '%s' has no scratch output for '%s'",
|
||||||
|
worker.store.printStorePath(drvPath), outputName);
|
||||||
|
auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchOutput));
|
||||||
|
|
||||||
outputsToSort.insert(outputName);
|
outputsToSort.insert(outputName);
|
||||||
|
|
||||||
/* Updated wanted info to remove the outputs we definitely don't need to register */
|
/* Updated wanted info to remove the outputs we definitely don't need to register */
|
||||||
auto & initialInfo = initialOutputs.at(outputName);
|
auto initialOutput = get(initialOutputs, outputName);
|
||||||
|
if (!initialOutput)
|
||||||
|
throw BuildError(
|
||||||
|
"builder for '%s' has no initial output for '%s'",
|
||||||
|
worker.store.printStorePath(drvPath), outputName);
|
||||||
|
auto & initialInfo = *initialOutput;
|
||||||
|
|
||||||
/* Don't register if already valid, and not checking */
|
/* Don't register if already valid, and not checking */
|
||||||
initialInfo.wanted = buildMode == bmCheck
|
initialInfo.wanted = buildMode == bmCheck
|
||||||
|
@ -2185,6 +2194,11 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
|
|
||||||
auto sortedOutputNames = topoSort(outputsToSort,
|
auto sortedOutputNames = topoSort(outputsToSort,
|
||||||
{[&](const std::string & name) {
|
{[&](const std::string & name) {
|
||||||
|
auto orifu = get(outputReferencesIfUnregistered, name);
|
||||||
|
if (!orifu)
|
||||||
|
throw BuildError(
|
||||||
|
"no output reference for '%s' in build of '%s'",
|
||||||
|
name, worker.store.printStorePath(drvPath));
|
||||||
return std::visit(overloaded {
|
return std::visit(overloaded {
|
||||||
/* Since we'll use the already installed versions of these, we
|
/* Since we'll use the already installed versions of these, we
|
||||||
can treat them as leaves and ignore any references they
|
can treat them as leaves and ignore any references they
|
||||||
|
@ -2199,7 +2213,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
referencedOutputs.insert(o);
|
referencedOutputs.insert(o);
|
||||||
return referencedOutputs;
|
return referencedOutputs;
|
||||||
},
|
},
|
||||||
}, outputReferencesIfUnregistered.at(name));
|
}, *orifu);
|
||||||
}},
|
}},
|
||||||
{[&](const std::string & path, const std::string & parent) {
|
{[&](const std::string & path, const std::string & parent) {
|
||||||
// TODO with more -vvvv also show the temporary paths for manual inspection.
|
// TODO with more -vvvv also show the temporary paths for manual inspection.
|
||||||
|
@ -2213,9 +2227,10 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
OutputPathMap finalOutputs;
|
OutputPathMap finalOutputs;
|
||||||
|
|
||||||
for (auto & outputName : sortedOutputNames) {
|
for (auto & outputName : sortedOutputNames) {
|
||||||
auto output = drv->outputs.at(outputName);
|
auto output = get(drv->outputs, outputName);
|
||||||
auto & scratchPath = scratchOutputs.at(outputName);
|
auto scratchPath = get(scratchOutputs, outputName);
|
||||||
auto actualPath = toRealPathChroot(worker.store.printStorePath(scratchPath));
|
assert(output && scratchPath);
|
||||||
|
auto actualPath = toRealPathChroot(worker.store.printStorePath(*scratchPath));
|
||||||
|
|
||||||
auto finish = [&](StorePath finalStorePath) {
|
auto finish = [&](StorePath finalStorePath) {
|
||||||
/* Store the final path */
|
/* Store the final path */
|
||||||
|
@ -2223,10 +2238,13 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
/* The rewrite rule will be used in downstream outputs that refer to
|
/* The rewrite rule will be used in downstream outputs that refer to
|
||||||
use. This is why the topological sort is essential to do first
|
use. This is why the topological sort is essential to do first
|
||||||
before this for loop. */
|
before this for loop. */
|
||||||
if (scratchPath != finalStorePath)
|
if (*scratchPath != finalStorePath)
|
||||||
outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
|
outputRewrites[std::string { scratchPath->hashPart() }] = std::string { finalStorePath.hashPart() };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
auto orifu = get(outputReferencesIfUnregistered, outputName);
|
||||||
|
assert(orifu);
|
||||||
|
|
||||||
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
||||||
[&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
|
[&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
|
||||||
finish(skippedFinalPath.path);
|
finish(skippedFinalPath.path);
|
||||||
|
@ -2235,7 +2253,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
[&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
|
[&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
|
||||||
return r.refs;
|
return r.refs;
|
||||||
},
|
},
|
||||||
}, outputReferencesIfUnregistered.at(outputName));
|
}, *orifu);
|
||||||
|
|
||||||
if (!referencesOpt)
|
if (!referencesOpt)
|
||||||
continue;
|
continue;
|
||||||
|
@ -2268,25 +2286,29 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
for (auto & r : references) {
|
for (auto & r : references) {
|
||||||
auto name = r.name();
|
auto name = r.name();
|
||||||
auto origHash = std::string { r.hashPart() };
|
auto origHash = std::string { r.hashPart() };
|
||||||
if (r == scratchPath)
|
if (r == *scratchPath) {
|
||||||
res.first = true;
|
res.first = true;
|
||||||
else if (outputRewrites.count(origHash) == 0)
|
} else if (auto outputRewrite = get(outputRewrites, origHash)) {
|
||||||
res.second.insert(r);
|
std::string newRef = *outputRewrite;
|
||||||
else {
|
|
||||||
std::string newRef = outputRewrites.at(origHash);
|
|
||||||
newRef += '-';
|
newRef += '-';
|
||||||
newRef += name;
|
newRef += name;
|
||||||
res.second.insert(StorePath { newRef });
|
res.second.insert(StorePath { newRef });
|
||||||
|
} else {
|
||||||
|
res.second.insert(r);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo {
|
auto newInfoFromCA = [&](const DerivationOutput::CAFloating outputHash) -> ValidPathInfo {
|
||||||
auto & st = outputStats.at(outputName);
|
auto st = get(outputStats, outputName);
|
||||||
|
if (!st)
|
||||||
|
throw BuildError(
|
||||||
|
"output path %1% without valid stats info",
|
||||||
|
actualPath);
|
||||||
if (outputHash.method == FileIngestionMethod::Flat) {
|
if (outputHash.method == FileIngestionMethod::Flat) {
|
||||||
/* The output path should be a regular file without execute permission. */
|
/* The output path should be a regular file without execute permission. */
|
||||||
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
"output path '%1%' should be a non-executable regular file "
|
"output path '%1%' should be a non-executable regular file "
|
||||||
"since recursive hashing is not enabled (outputHashMode=flat)",
|
"since recursive hashing is not enabled (outputHashMode=flat)",
|
||||||
|
@ -2294,7 +2316,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
}
|
}
|
||||||
rewriteOutput();
|
rewriteOutput();
|
||||||
/* FIXME optimize and deduplicate with addToStore */
|
/* FIXME optimize and deduplicate with addToStore */
|
||||||
std::string oldHashPart { scratchPath.hashPart() };
|
std::string oldHashPart { scratchPath->hashPart() };
|
||||||
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
HashModuloSink caSink { outputHash.hashType, oldHashPart };
|
||||||
switch (outputHash.method) {
|
switch (outputHash.method) {
|
||||||
case FileIngestionMethod::Recursive:
|
case FileIngestionMethod::Recursive:
|
||||||
|
@ -2313,7 +2335,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
outputPathName(drv->name, outputName),
|
outputPathName(drv->name, outputName),
|
||||||
refs.second,
|
refs.second,
|
||||||
refs.first);
|
refs.first);
|
||||||
if (scratchPath != finalPath) {
|
if (*scratchPath != finalPath) {
|
||||||
// Also rewrite the output path
|
// Also rewrite the output path
|
||||||
auto source = sinkToSource([&](Sink & nextSink) {
|
auto source = sinkToSource([&](Sink & nextSink) {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
|
@ -2354,9 +2376,9 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
auto requiredFinalPath = output.path;
|
auto requiredFinalPath = output.path;
|
||||||
/* Preemptively add rewrite rule for final hash, as that is
|
/* Preemptively add rewrite rule for final hash, as that is
|
||||||
what the NAR hash will use rather than normalized-self references */
|
what the NAR hash will use rather than normalized-self references */
|
||||||
if (scratchPath != requiredFinalPath)
|
if (*scratchPath != requiredFinalPath)
|
||||||
outputRewrites.insert_or_assign(
|
outputRewrites.insert_or_assign(
|
||||||
std::string { scratchPath.hashPart() },
|
std::string { scratchPath->hashPart() },
|
||||||
std::string { requiredFinalPath.hashPart() });
|
std::string { requiredFinalPath.hashPart() });
|
||||||
rewriteOutput();
|
rewriteOutput();
|
||||||
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
auto narHashAndSize = hashPath(htSHA256, actualPath);
|
||||||
|
@ -2409,7 +2431,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
}, output.raw());
|
}, output->raw());
|
||||||
|
|
||||||
/* FIXME: set proper permissions in restorePath() so
|
/* FIXME: set proper permissions in restorePath() so
|
||||||
we don't have to do another traversal. */
|
we don't have to do another traversal. */
|
||||||
|
@ -2425,7 +2447,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
derivations. */
|
derivations. */
|
||||||
PathLocks dynamicOutputLock;
|
PathLocks dynamicOutputLock;
|
||||||
dynamicOutputLock.setDeletion(true);
|
dynamicOutputLock.setDeletion(true);
|
||||||
auto optFixedPath = output.path(worker.store, drv->name, outputName);
|
auto optFixedPath = output->path(worker.store, drv->name, outputName);
|
||||||
if (!optFixedPath ||
|
if (!optFixedPath ||
|
||||||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
|
worker.store.printStorePath(*optFixedPath) != finalDestPath)
|
||||||
{
|
{
|
||||||
|
@ -2491,11 +2513,10 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
|
|
||||||
/* For debugging, print out the referenced and unreferenced paths. */
|
/* For debugging, print out the referenced and unreferenced paths. */
|
||||||
for (auto & i : inputPaths) {
|
for (auto & i : inputPaths) {
|
||||||
auto j = references.find(i);
|
if (references.count(i))
|
||||||
if (j == references.end())
|
|
||||||
debug("unreferenced input: '%1%'", worker.store.printStorePath(i));
|
|
||||||
else
|
|
||||||
debug("referenced input: '%1%'", worker.store.printStorePath(i));
|
debug("referenced input: '%1%'", worker.store.printStorePath(i));
|
||||||
|
else
|
||||||
|
debug("unreferenced input: '%1%'", worker.store.printStorePath(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (curRound == nrRounds) {
|
if (curRound == nrRounds) {
|
||||||
|
@ -2612,9 +2633,11 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
DrvOutputs builtOutputs;
|
DrvOutputs builtOutputs;
|
||||||
|
|
||||||
for (auto & [outputName, newInfo] : infos) {
|
for (auto & [outputName, newInfo] : infos) {
|
||||||
|
auto oldinfo = get(initialOutputs, outputName);
|
||||||
|
assert(oldinfo);
|
||||||
auto thisRealisation = Realisation {
|
auto thisRealisation = Realisation {
|
||||||
.id = DrvOutput {
|
.id = DrvOutput {
|
||||||
initialOutputs.at(outputName).outputHash,
|
oldinfo->outputHash,
|
||||||
outputName
|
outputName
|
||||||
},
|
},
|
||||||
.outPath = newInfo.path
|
.outPath = newInfo.path
|
||||||
|
@ -2710,9 +2733,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
|
||||||
for (auto & i : *value) {
|
for (auto & i : *value) {
|
||||||
if (worker.store.isStorePath(i))
|
if (worker.store.isStorePath(i))
|
||||||
spec.insert(worker.store.parseStorePath(i));
|
spec.insert(worker.store.parseStorePath(i));
|
||||||
else if (outputs.count(i))
|
else if (auto output = get(outputs, i))
|
||||||
spec.insert(outputs.at(i).path);
|
spec.insert(output->path);
|
||||||
else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
|
else
|
||||||
|
throw BuildError("derivation contains an illegal reference specifier '%s'", i);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto used = recursive
|
auto used = recursive
|
||||||
|
@ -2751,24 +2775,18 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
|
||||||
};
|
};
|
||||||
|
|
||||||
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
|
if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
|
||||||
auto outputChecks = structuredAttrs->find("outputChecks");
|
if (auto outputChecks = get(*structuredAttrs, "outputChecks")) {
|
||||||
if (outputChecks != structuredAttrs->end()) {
|
if (auto output = get(*outputChecks, outputName)) {
|
||||||
auto output = outputChecks->find(outputName);
|
|
||||||
|
|
||||||
if (output != outputChecks->end()) {
|
|
||||||
Checks checks;
|
Checks checks;
|
||||||
|
|
||||||
auto maxSize = output->find("maxSize");
|
if (auto maxSize = get(*output, "maxSize"))
|
||||||
if (maxSize != output->end())
|
|
||||||
checks.maxSize = maxSize->get<uint64_t>();
|
checks.maxSize = maxSize->get<uint64_t>();
|
||||||
|
|
||||||
auto maxClosureSize = output->find("maxClosureSize");
|
if (auto maxClosureSize = get(*output, "maxClosureSize"))
|
||||||
if (maxClosureSize != output->end())
|
|
||||||
checks.maxClosureSize = maxClosureSize->get<uint64_t>();
|
checks.maxClosureSize = maxClosureSize->get<uint64_t>();
|
||||||
|
|
||||||
auto get = [&](const std::string & name) -> std::optional<Strings> {
|
auto get_ = [&](const std::string & name) -> std::optional<Strings> {
|
||||||
auto i = output->find(name);
|
if (auto i = get(*output, name)) {
|
||||||
if (i != output->end()) {
|
|
||||||
Strings res;
|
Strings res;
|
||||||
for (auto j = i->begin(); j != i->end(); ++j) {
|
for (auto j = i->begin(); j != i->end(); ++j) {
|
||||||
if (!j->is_string())
|
if (!j->is_string())
|
||||||
|
@ -2781,10 +2799,10 @@ void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo
|
||||||
return {};
|
return {};
|
||||||
};
|
};
|
||||||
|
|
||||||
checks.allowedReferences = get("allowedReferences");
|
checks.allowedReferences = get_("allowedReferences");
|
||||||
checks.allowedRequisites = get("allowedRequisites");
|
checks.allowedRequisites = get_("allowedRequisites");
|
||||||
checks.disallowedReferences = get("disallowedReferences");
|
checks.disallowedReferences = get_("disallowedReferences");
|
||||||
checks.disallowedRequisites = get("disallowedRequisites");
|
checks.disallowedRequisites = get_("disallowedRequisites");
|
||||||
|
|
||||||
applyChecks(checks);
|
applyChecks(checks);
|
||||||
}
|
}
|
||||||
|
|
|
@ -350,7 +350,7 @@ void Worker::waitForInput()
|
||||||
become `available'. Note that `available' (i.e., non-blocking)
|
become `available'. Note that `available' (i.e., non-blocking)
|
||||||
includes EOF. */
|
includes EOF. */
|
||||||
std::vector<struct pollfd> pollStatus;
|
std::vector<struct pollfd> pollStatus;
|
||||||
std::map <int, int> fdToPollStatus;
|
std::map<int, size_t> fdToPollStatus;
|
||||||
for (auto & i : children) {
|
for (auto & i : children) {
|
||||||
for (auto & j : i.fds) {
|
for (auto & j : i.fds) {
|
||||||
pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN });
|
pollStatus.push_back((struct pollfd) { .fd = j, .events = POLLIN });
|
||||||
|
@ -380,7 +380,10 @@ void Worker::waitForInput()
|
||||||
std::set<int> fds2(j->fds);
|
std::set<int> fds2(j->fds);
|
||||||
std::vector<unsigned char> buffer(4096);
|
std::vector<unsigned char> buffer(4096);
|
||||||
for (auto & k : fds2) {
|
for (auto & k : fds2) {
|
||||||
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
|
const auto fdPollStatusId = get(fdToPollStatus, k);
|
||||||
|
assert(fdPollStatusId);
|
||||||
|
assert(*fdPollStatusId < pollStatus.size());
|
||||||
|
if (pollStatus.at(*fdPollStatusId).revents) {
|
||||||
ssize_t rd = ::read(k, buffer.data(), buffer.size());
|
ssize_t rd = ::read(k, buffer.data(), buffer.size());
|
||||||
// FIXME: is there a cleaner way to handle pt close
|
// FIXME: is there a cleaner way to handle pt close
|
||||||
// than EIO? Is this even standard?
|
// than EIO? Is this even standard?
|
||||||
|
|
|
@ -93,8 +93,9 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
|
||||||
auto prevPriority = state.priorities[dstFile];
|
auto prevPriority = state.priorities[dstFile];
|
||||||
if (prevPriority == priority)
|
if (prevPriority == priority)
|
||||||
throw Error(
|
throw Error(
|
||||||
"packages '%1%' and '%2%' have the same priority %3%; "
|
"files '%1%' and '%2%' have the same priority %3%; "
|
||||||
"use 'nix-env --set-flag priority NUMBER INSTALLED_PKGNAME' "
|
"use 'nix-env --set-flag priority NUMBER INSTALLED_PKGNAME' "
|
||||||
|
"or type 'nix profile install --help' if using 'nix profile' to find out how"
|
||||||
"to change the priority of one of the conflicting packages"
|
"to change the priority of one of the conflicting packages"
|
||||||
" (0 being the highest priority)",
|
" (0 being the highest priority)",
|
||||||
srcFile, readLink(dstFile), priority);
|
srcFile, readLink(dstFile), priority);
|
||||||
|
|
|
@ -24,7 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
|
|
||||||
Path storePath = getAttr("out");
|
Path storePath = getAttr("out");
|
||||||
auto mainUrl = getAttr("url");
|
auto mainUrl = getAttr("url");
|
||||||
bool unpack = get(drv.env, "unpack").value_or("") == "1";
|
bool unpack = getOr(drv.env, "unpack", "") == "1";
|
||||||
|
|
||||||
/* Note: have to use a fresh fileTransfer here because we're in
|
/* Note: have to use a fresh fileTransfer here because we're in
|
||||||
a forked process. */
|
a forked process. */
|
||||||
|
|
|
@ -661,8 +661,10 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
||||||
if (res.kind == DrvHash::Kind::Deferred)
|
if (res.kind == DrvHash::Kind::Deferred)
|
||||||
kind = DrvHash::Kind::Deferred;
|
kind = DrvHash::Kind::Deferred;
|
||||||
for (auto & outputName : inputOutputs) {
|
for (auto & outputName : inputOutputs) {
|
||||||
const auto h = res.hashes.at(outputName);
|
const auto h = get(res.hashes, outputName);
|
||||||
inputs2[h.to_string(Base16, false)].insert(outputName);
|
if (!h)
|
||||||
|
throw Error("no hash for output '%s' of derivation '%s'", outputName, drv.name);
|
||||||
|
inputs2[h->to_string(Base16, false)].insert(outputName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -836,8 +838,11 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
||||||
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
|
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
|
||||||
for (auto & [outputName, output] : drv.outputs) {
|
for (auto & [outputName, output] : drv.outputs) {
|
||||||
if (std::holds_alternative<DerivationOutput::Deferred>(output.raw())) {
|
if (std::holds_alternative<DerivationOutput::Deferred>(output.raw())) {
|
||||||
auto & h = hashModulo.hashes.at(outputName);
|
auto h = get(hashModulo.hashes, outputName);
|
||||||
auto outPath = store.makeOutputPath(outputName, h, drv.name);
|
if (!h)
|
||||||
|
throw Error("derivation '%s' output '%s' has no hash (derivations.cc/rewriteDerivation)",
|
||||||
|
drv.name, outputName);
|
||||||
|
auto outPath = store.makeOutputPath(outputName, *h, drv.name);
|
||||||
drv.env[outputName] = store.printStorePath(outPath);
|
drv.env[outputName] = store.printStorePath(outPath);
|
||||||
output = DerivationOutput::InputAddressed {
|
output = DerivationOutput::InputAddressed {
|
||||||
.path = std::move(outPath),
|
.path = std::move(outPath),
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
|
nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
|
||||||
|
@ -17,12 +19,12 @@ nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const {
|
||||||
res["drvPath"] = store->printStorePath(drvPath);
|
res["drvPath"] = store->printStorePath(drvPath);
|
||||||
// Fallback for the input-addressed derivation case: We expect to always be
|
// Fallback for the input-addressed derivation case: We expect to always be
|
||||||
// able to print the output paths, so let’s do it
|
// able to print the output paths, so let’s do it
|
||||||
auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
|
const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
|
||||||
for (const auto& output : outputs) {
|
for (const auto& output : outputs) {
|
||||||
if (knownOutputs.at(output))
|
auto knownOutput = get(knownOutputs, output);
|
||||||
res["outputs"][output] = store->printStorePath(knownOutputs.at(output).value());
|
res["outputs"][output] = (knownOutput && *knownOutput)
|
||||||
else
|
? store->printStorePath(**knownOutput)
|
||||||
res["outputs"][output] = nullptr;
|
: nullptr;
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -123,10 +125,15 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||||
for (auto& [outputName, outputPath] : p.outputs) {
|
for (auto& [outputName, outputPath] : p.outputs) {
|
||||||
if (settings.isExperimentalFeatureEnabled(
|
if (settings.isExperimentalFeatureEnabled(
|
||||||
Xp::CaDerivations)) {
|
Xp::CaDerivations)) {
|
||||||
|
auto drvOutput = get(drvHashes, outputName);
|
||||||
|
if (!drvOutput)
|
||||||
|
throw Error(
|
||||||
|
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||||
|
store.printStorePath(p.drvPath), outputName);
|
||||||
auto thisRealisation = store.queryRealisation(
|
auto thisRealisation = store.queryRealisation(
|
||||||
DrvOutput{drvHashes.at(outputName), outputName});
|
DrvOutput{*drvOutput, outputName});
|
||||||
assert(thisRealisation); // We’ve built it, so we must h
|
assert(thisRealisation); // We’ve built it, so we must
|
||||||
// ve the realisation
|
// have the realisation
|
||||||
res.insert(*thisRealisation);
|
res.insert(*thisRealisation);
|
||||||
} else {
|
} else {
|
||||||
res.insert(outputPath);
|
res.insert(outputPath);
|
||||||
|
|
|
@ -692,10 +692,10 @@ struct curlFileTransfer : public FileTransfer
|
||||||
#if ENABLE_S3
|
#if ENABLE_S3
|
||||||
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
||||||
|
|
||||||
std::string profile = get(params, "profile").value_or("");
|
std::string profile = getOr(params, "profile", "");
|
||||||
std::string region = get(params, "region").value_or(Aws::Region::US_EAST_1);
|
std::string region = getOr(params, "region", Aws::Region::US_EAST_1);
|
||||||
std::string scheme = get(params, "scheme").value_or("");
|
std::string scheme = getOr(params, "scheme", "");
|
||||||
std::string endpoint = get(params, "endpoint").value_or("");
|
std::string endpoint = getOr(params, "endpoint", "");
|
||||||
|
|
||||||
S3Helper s3Helper(profile, region, scheme, endpoint);
|
S3Helper s3Helper(profile, region, scheme, endpoint);
|
||||||
|
|
||||||
|
|
|
@ -718,7 +718,11 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
||||||
// somewhat expensive so we do lazily
|
// somewhat expensive so we do lazily
|
||||||
hashesModulo = hashDerivationModulo(*this, drv, true);
|
hashesModulo = hashDerivationModulo(*this, drv, true);
|
||||||
}
|
}
|
||||||
StorePath recomputed = makeOutputPath(i.first, hashesModulo->hashes.at(i.first), drvName);
|
auto currentOutputHash = get(hashesModulo->hashes, i.first);
|
||||||
|
if (!currentOutputHash)
|
||||||
|
throw Error("derivation '%s' has unexpected output '%s' (local-store / hashesModulo) named '%s'",
|
||||||
|
printStorePath(drvPath), printStorePath(doia.path), i.first);
|
||||||
|
StorePath recomputed = makeOutputPath(i.first, *currentOutputHash, drvName);
|
||||||
if (doia.path != recomputed)
|
if (doia.path != recomputed)
|
||||||
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
|
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
|
||||||
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
|
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
|
||||||
|
|
|
@ -278,11 +278,16 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||||
std::set<Realisation> inputRealisations;
|
std::set<Realisation> inputRealisations;
|
||||||
|
|
||||||
for (const auto & [inputDrv, outputNames] : drv.inputDrvs) {
|
for (const auto & [inputDrv, outputNames] : drv.inputDrvs) {
|
||||||
auto outputHashes =
|
const auto outputHashes =
|
||||||
staticOutputHashes(store, store.readDerivation(inputDrv));
|
staticOutputHashes(store, store.readDerivation(inputDrv));
|
||||||
for (const auto & outputName : outputNames) {
|
for (const auto & outputName : outputNames) {
|
||||||
|
auto outputHash = get(outputHashes, outputName);
|
||||||
|
if (!outputHash)
|
||||||
|
throw Error(
|
||||||
|
"output '%s' of derivation '%s' isn't realised", outputName,
|
||||||
|
store.printStorePath(inputDrv));
|
||||||
auto thisRealisation = store.queryRealisation(
|
auto thisRealisation = store.queryRealisation(
|
||||||
DrvOutput{outputHashes.at(outputName), outputName});
|
DrvOutput{*outputHash, outputName});
|
||||||
if (!thisRealisation)
|
if (!thisRealisation)
|
||||||
throw Error(
|
throw Error(
|
||||||
"output '%s' of derivation '%s' isn't built", outputName,
|
"output '%s' of derivation '%s' isn't built", outputName,
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
#include "path-with-outputs.hh"
|
#include "path-with-outputs.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "nlohmann/json.hpp"
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -68,4 +71,57 @@ StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std:
|
||||||
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
|
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s)
|
||||||
|
{
|
||||||
|
static std::regex regex(R"((.*)\^((\*)|([a-z]+(,[a-z]+)*)))");
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
if (!std::regex_match(s, match, regex))
|
||||||
|
return {s, DefaultOutputs()};
|
||||||
|
|
||||||
|
if (match[3].matched)
|
||||||
|
return {match[1], AllOutputs()};
|
||||||
|
|
||||||
|
return {match[1], tokenizeString<OutputNames>(match[4].str(), ",")};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string printOutputsSpec(const OutputsSpec & outputsSpec)
|
||||||
|
{
|
||||||
|
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
||||||
|
return "";
|
||||||
|
|
||||||
|
if (std::get_if<AllOutputs>(&outputsSpec))
|
||||||
|
return "^*";
|
||||||
|
|
||||||
|
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||||
|
return "^" + concatStringsSep(",", *outputNames);
|
||||||
|
|
||||||
|
assert(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
void to_json(nlohmann::json & json, const OutputsSpec & outputsSpec)
|
||||||
|
{
|
||||||
|
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
||||||
|
json = nullptr;
|
||||||
|
|
||||||
|
else if (std::get_if<AllOutputs>(&outputsSpec))
|
||||||
|
json = std::vector<std::string>({"*"});
|
||||||
|
|
||||||
|
else if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||||
|
json = *outputNames;
|
||||||
|
}
|
||||||
|
|
||||||
|
void from_json(const nlohmann::json & json, OutputsSpec & outputsSpec)
|
||||||
|
{
|
||||||
|
if (json.is_null())
|
||||||
|
outputsSpec = DefaultOutputs();
|
||||||
|
else {
|
||||||
|
auto names = json.get<OutputNames>();
|
||||||
|
if (names == OutputNames({"*"}))
|
||||||
|
outputsSpec = AllOutputs();
|
||||||
|
else
|
||||||
|
outputsSpec = names;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
|
#include "nlohmann/json_fwd.hpp"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -32,4 +33,25 @@ StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view
|
||||||
|
|
||||||
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
|
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
|
||||||
|
|
||||||
|
typedef std::set<std::string> OutputNames;
|
||||||
|
|
||||||
|
struct AllOutputs {
|
||||||
|
bool operator < (const AllOutputs & _) const { return false; }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct DefaultOutputs {
|
||||||
|
bool operator < (const DefaultOutputs & _) const { return false; }
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::variant<DefaultOutputs, AllOutputs, OutputNames> OutputsSpec;
|
||||||
|
|
||||||
|
/* Parse a string of the form 'prefix^output1,...outputN' or
|
||||||
|
'prefix^*', returning the prefix and the outputs spec. */
|
||||||
|
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s);
|
||||||
|
|
||||||
|
std::string printOutputsSpec(const OutputsSpec & outputsSpec);
|
||||||
|
|
||||||
|
void to_json(nlohmann::json &, const OutputsSpec &);
|
||||||
|
void from_json(const nlohmann::json &, OutputsSpec &);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -853,15 +853,15 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
||||||
|
|
||||||
OutputPathMap outputs;
|
OutputPathMap outputs;
|
||||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||||
auto drvOutputs = drv.outputsAndOptPaths(*this);
|
const auto drvOutputs = drv.outputsAndOptPaths(*this);
|
||||||
for (auto & output : bfd.outputs) {
|
for (auto & output : bfd.outputs) {
|
||||||
if (!outputHashes.count(output))
|
auto outputHash = get(outputHashes, output);
|
||||||
|
if (!outputHash)
|
||||||
throw Error(
|
throw Error(
|
||||||
"the derivation '%s' doesn't have an output named '%s'",
|
"the derivation '%s' doesn't have an output named '%s'",
|
||||||
printStorePath(bfd.drvPath), output);
|
printStorePath(bfd.drvPath), output);
|
||||||
auto outputId =
|
auto outputId = DrvOutput{ *outputHash, output };
|
||||||
DrvOutput{outputHashes.at(output), output};
|
|
||||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||||
auto realisation =
|
auto realisation =
|
||||||
queryRealisation(outputId);
|
queryRealisation(outputId);
|
||||||
|
@ -874,13 +874,14 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
||||||
} else {
|
} else {
|
||||||
// If ca-derivations isn't enabled, assume that
|
// If ca-derivations isn't enabled, assume that
|
||||||
// the output path is statically known.
|
// the output path is statically known.
|
||||||
assert(drvOutputs.count(output));
|
const auto drvOutput = get(drvOutputs, output);
|
||||||
assert(drvOutputs.at(output).second);
|
assert(drvOutput);
|
||||||
|
assert(drvOutput->second);
|
||||||
res.builtOutputs.emplace(
|
res.builtOutputs.emplace(
|
||||||
outputId,
|
outputId,
|
||||||
Realisation {
|
Realisation {
|
||||||
.id = outputId,
|
.id = outputId,
|
||||||
.outPath = *drvOutputs.at(output).second
|
.outPath = *drvOutput->second,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
#include "ref.hh"
|
#include "ref.hh"
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
namespace Aws { namespace Client { class ClientConfiguration; } }
|
namespace Aws { namespace Client { class ClientConfiguration; } }
|
||||||
namespace Aws { namespace S3 { class S3Client; } }
|
namespace Aws { namespace S3 { class S3Client; } }
|
||||||
|
|
|
@ -1314,7 +1314,7 @@ static bool isNonUriPath(const std::string & spec) {
|
||||||
std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
|
std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
|
||||||
{
|
{
|
||||||
if (uri == "" || uri == "auto") {
|
if (uri == "" || uri == "auto") {
|
||||||
auto stateDir = get(params, "state").value_or(settings.nixStateDir);
|
auto stateDir = getOr(params, "state", settings.nixStateDir);
|
||||||
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
||||||
return std::make_shared<LocalStore>(params);
|
return std::make_shared<LocalStore>(params);
|
||||||
else if (pathExists(settings.nixDaemonSocketFile))
|
else if (pathExists(settings.nixDaemonSocketFile))
|
||||||
|
|
46
src/libstore/tests/path-with-outputs.cc
Normal file
46
src/libstore/tests/path-with-outputs.cc
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
#include "path-with-outputs.hh"
|
||||||
|
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
TEST(parseOutputsSpec, basic)
|
||||||
|
{
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo");
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo^*");
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ASSERT_TRUE(std::get_if<AllOutputs>(&outputsSpec));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out");
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out"}));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out,bin");
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo^bar^out,bin");
|
||||||
|
ASSERT_EQ(prefix, "foo^bar");
|
||||||
|
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
auto [prefix, outputsSpec] = parseOutputsSpec("foo^&*()");
|
||||||
|
ASSERT_EQ(prefix, "foo^&*()");
|
||||||
|
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -35,7 +35,9 @@ const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::str
|
||||||
|
|
||||||
std::string_view showExperimentalFeature(const ExperimentalFeature feature)
|
std::string_view showExperimentalFeature(const ExperimentalFeature feature)
|
||||||
{
|
{
|
||||||
return stringifiedXpFeatures.at(feature);
|
const auto ret = get(stringifiedXpFeatures, feature);
|
||||||
|
assert(ret);
|
||||||
|
return *ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures)
|
std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures)
|
||||||
|
@ -58,11 +60,13 @@ std::ostream & operator <<(std::ostream & str, const ExperimentalFeature & featu
|
||||||
return str << showExperimentalFeature(feature);
|
return str << showExperimentalFeature(feature);
|
||||||
}
|
}
|
||||||
|
|
||||||
void to_json(nlohmann::json& j, const ExperimentalFeature& feature) {
|
void to_json(nlohmann::json & j, const ExperimentalFeature & feature)
|
||||||
|
{
|
||||||
j = showExperimentalFeature(feature);
|
j = showExperimentalFeature(feature);
|
||||||
}
|
}
|
||||||
|
|
||||||
void from_json(const nlohmann::json& j, ExperimentalFeature& feature) {
|
void from_json(const nlohmann::json & j, ExperimentalFeature & feature)
|
||||||
|
{
|
||||||
const std::string input = j;
|
const std::string input = j;
|
||||||
const auto parsed = parseExperimentalFeature(input);
|
const auto parsed = parseExperimentalFeature(input);
|
||||||
|
|
||||||
|
|
25
src/libutil/git.cc
Normal file
25
src/libutil/git.cc
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
#include "git.hh"
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
namespace git {
|
||||||
|
|
||||||
|
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
|
||||||
|
{
|
||||||
|
const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
|
||||||
|
std::match_results<std::string_view::const_iterator> match;
|
||||||
|
if (!std::regex_match(line.cbegin(), line.cend(), match, line_regex))
|
||||||
|
return std::nullopt;
|
||||||
|
|
||||||
|
return LsRemoteRefLine {
|
||||||
|
.kind = match[1].length() == 0
|
||||||
|
? LsRemoteRefLine::Kind::Object
|
||||||
|
: LsRemoteRefLine::Kind::Symbolic,
|
||||||
|
.target = match[2],
|
||||||
|
.reference = match[3].length() == 0 ? std::nullopt : std::optional<std::string>{ match[3] }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
40
src/libutil/git.hh
Normal file
40
src/libutil/git.hh
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <string_view>
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
namespace git {
|
||||||
|
|
||||||
|
// A line from the output of `git ls-remote --symref`.
|
||||||
|
//
|
||||||
|
// These can be of two kinds:
|
||||||
|
//
|
||||||
|
// - Symbolic references of the form
|
||||||
|
//
|
||||||
|
// ref: {target} {reference}
|
||||||
|
//
|
||||||
|
// where {target} is itself a reference and {reference} is optional
|
||||||
|
//
|
||||||
|
// - Object references of the form
|
||||||
|
//
|
||||||
|
// {target} {reference}
|
||||||
|
//
|
||||||
|
// where {target} is a commit id and {reference} is mandatory
|
||||||
|
struct LsRemoteRefLine {
|
||||||
|
enum struct Kind {
|
||||||
|
Symbolic,
|
||||||
|
Object
|
||||||
|
};
|
||||||
|
Kind kind;
|
||||||
|
std::string target;
|
||||||
|
std::optional<std::string> reference;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
21
src/libutil/json-utils.hh
Normal file
21
src/libutil/json-utils.hh
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return nullptr;
|
||||||
|
return &*i;
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json * get(nlohmann::json & map, const std::string & key)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return nullptr;
|
||||||
|
return &*i;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
33
src/libutil/tests/git.cc
Normal file
33
src/libutil/tests/git.cc
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
#include "git.hh"
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
TEST(GitLsRemote, parseSymrefLineWithReference) {
|
||||||
|
auto line = "ref: refs/head/main HEAD";
|
||||||
|
auto res = git::parseLsRemoteLine(line);
|
||||||
|
ASSERT_TRUE(res.has_value());
|
||||||
|
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
|
||||||
|
ASSERT_EQ(res->target, "refs/head/main");
|
||||||
|
ASSERT_EQ(res->reference, "HEAD");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(GitLsRemote, parseSymrefLineWithNoReference) {
|
||||||
|
auto line = "ref: refs/head/main";
|
||||||
|
auto res = git::parseLsRemoteLine(line);
|
||||||
|
ASSERT_TRUE(res.has_value());
|
||||||
|
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
|
||||||
|
ASSERT_EQ(res->target, "refs/head/main");
|
||||||
|
ASSERT_EQ(res->reference, std::nullopt);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(GitLsRemote, parseObjectRefLine) {
|
||||||
|
auto line = "abc123 refs/head/main";
|
||||||
|
auto res = git::parseLsRemoteLine(line);
|
||||||
|
ASSERT_TRUE(res.has_value());
|
||||||
|
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Object);
|
||||||
|
ASSERT_EQ(res->target, "abc123");
|
||||||
|
ASSERT_EQ(res->reference, "refs/head/main");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -548,7 +548,7 @@ namespace nix {
|
||||||
|
|
||||||
TEST(get, emptyContainer) {
|
TEST(get, emptyContainer) {
|
||||||
StringMap s = { };
|
StringMap s = { };
|
||||||
auto expected = std::nullopt;
|
auto expected = nullptr;
|
||||||
|
|
||||||
ASSERT_EQ(get(s, "one"), expected);
|
ASSERT_EQ(get(s, "one"), expected);
|
||||||
}
|
}
|
||||||
|
@ -559,7 +559,23 @@ namespace nix {
|
||||||
s["two"] = "er";
|
s["two"] = "er";
|
||||||
auto expected = "yi";
|
auto expected = "yi";
|
||||||
|
|
||||||
ASSERT_EQ(get(s, "one"), expected);
|
ASSERT_EQ(*get(s, "one"), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(getOr, emptyContainer) {
|
||||||
|
StringMap s = { };
|
||||||
|
auto expected = "yi";
|
||||||
|
|
||||||
|
ASSERT_EQ(getOr(s, "one", "yi"), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(getOr, getFromContainer) {
|
||||||
|
StringMap s;
|
||||||
|
s["one"] = "yi";
|
||||||
|
s["two"] = "er";
|
||||||
|
auto expected = "yi";
|
||||||
|
|
||||||
|
ASSERT_EQ(getOr(s, "one", "nope"), expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ----------------------------------------------------------------------------
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
|
@ -1588,7 +1588,6 @@ std::string stripIndentation(std::string_view s)
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
|
||||||
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
|
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -543,13 +543,31 @@ std::string stripIndentation(std::string_view s);
|
||||||
|
|
||||||
/* Get a value for the specified key from an associate container. */
|
/* Get a value for the specified key from an associate container. */
|
||||||
template <class T>
|
template <class T>
|
||||||
std::optional<typename T::mapped_type> get(const T & map, const typename T::key_type & key)
|
const typename T::mapped_type * get(const T & map, const typename T::key_type & key)
|
||||||
{
|
{
|
||||||
auto i = map.find(key);
|
auto i = map.find(key);
|
||||||
if (i == map.end()) return {};
|
if (i == map.end()) return nullptr;
|
||||||
return std::optional<typename T::mapped_type>(i->second);
|
return &i->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
typename T::mapped_type * get(T & map, const typename T::key_type & key)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return nullptr;
|
||||||
|
return &i->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Get a value for the specified key from an associate container, or a default value if the key isn't present. */
|
||||||
|
template <class T>
|
||||||
|
const typename T::mapped_type & getOr(T & map,
|
||||||
|
const typename T::key_type & key,
|
||||||
|
const typename T::mapped_type & defaultValue)
|
||||||
|
{
|
||||||
|
auto i = map.find(key);
|
||||||
|
if (i == map.end()) return defaultValue;
|
||||||
|
return i->second;
|
||||||
|
}
|
||||||
|
|
||||||
/* Remove and return the first item from a container. */
|
/* Remove and return the first item from a container. */
|
||||||
template <class T>
|
template <class T>
|
||||||
|
|
6
src/nix-build/nix-build.cc
Executable file → Normal file
6
src/nix-build/nix-build.cc
Executable file → Normal file
|
@ -440,7 +440,7 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
env["NIX_STORE"] = store->storeDir;
|
env["NIX_STORE"] = store->storeDir;
|
||||||
env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores);
|
env["NIX_BUILD_CORES"] = std::to_string(settings.buildCores);
|
||||||
|
|
||||||
auto passAsFile = tokenizeString<StringSet>(get(drv.env, "passAsFile").value_or(""));
|
auto passAsFile = tokenizeString<StringSet>(getOr(drv.env, "passAsFile", ""));
|
||||||
|
|
||||||
bool keepTmp = false;
|
bool keepTmp = false;
|
||||||
int fileNr = 0;
|
int fileNr = 0;
|
||||||
|
@ -543,8 +543,6 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
|
|
||||||
restoreProcessContext();
|
restoreProcessContext();
|
||||||
|
|
||||||
logger->stop();
|
|
||||||
|
|
||||||
execvp(shell->c_str(), argPtrs.data());
|
execvp(shell->c_str(), argPtrs.data());
|
||||||
|
|
||||||
throw SysError("executing shell '%s'", *shell);
|
throw SysError("executing shell '%s'", *shell);
|
||||||
|
@ -603,8 +601,6 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
outPaths.push_back(outputPath);
|
outPaths.push_back(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger->stop();
|
|
||||||
|
|
||||||
for (auto & path : outPaths)
|
for (auto & path : outPaths)
|
||||||
std::cout << store->printStorePath(path) << '\n';
|
std::cout << store->printStorePath(path) << '\n';
|
||||||
}
|
}
|
||||||
|
|
|
@ -1489,8 +1489,6 @@ static int main_nix_env(int argc, char * * argv)
|
||||||
|
|
||||||
globals.state->printStats();
|
globals.state->printStats();
|
||||||
|
|
||||||
logger->stop();
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1095,8 +1095,6 @@ static int main_nix_store(int argc, char * * argv)
|
||||||
|
|
||||||
op(opFlags, opArgs);
|
op(opFlags, opArgs);
|
||||||
|
|
||||||
logger->stop();
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,7 @@ UnresolvedApp Installable::toApp(EvalState & state)
|
||||||
|
|
||||||
auto type = cursor->getAttr("type")->getString();
|
auto type = cursor->getAttr("type")->getString();
|
||||||
|
|
||||||
std::string expected = !attrPath.empty() && attrPath[0] == "apps" ? "app" : "derivation";
|
std::string expected = !attrPath.empty() && state.symbols[attrPath[0]] == "apps" ? "app" : "derivation";
|
||||||
if (type != expected)
|
if (type != expected)
|
||||||
throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expected);
|
throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expected);
|
||||||
|
|
||||||
|
@ -85,11 +85,11 @@ UnresolvedApp Installable::toApp(EvalState & state)
|
||||||
|
|
||||||
else if (type == "derivation") {
|
else if (type == "derivation") {
|
||||||
auto drvPath = cursor->forceDerivation();
|
auto drvPath = cursor->forceDerivation();
|
||||||
auto outPath = cursor->getAttr("outPath")->getString();
|
auto outPath = cursor->getAttr(state.sOutPath)->getString();
|
||||||
auto outputName = cursor->getAttr("outputName")->getString();
|
auto outputName = cursor->getAttr(state.sOutputName)->getString();
|
||||||
auto name = cursor->getAttr("name")->getString();
|
auto name = cursor->getAttr(state.sName)->getString();
|
||||||
auto aPname = cursor->maybeGetAttr("pname");
|
auto aPname = cursor->maybeGetAttr("pname");
|
||||||
auto aMeta = cursor->maybeGetAttr("meta");
|
auto aMeta = cursor->maybeGetAttr(state.sMeta);
|
||||||
auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr;
|
auto aMainProgram = aMeta ? aMeta->maybeGetAttr("mainProgram") : nullptr;
|
||||||
auto mainProgram =
|
auto mainProgram =
|
||||||
aMainProgram
|
aMainProgram
|
||||||
|
|
|
@ -75,10 +75,10 @@ struct CmdBundle : InstallableCommand
|
||||||
|
|
||||||
auto val = installable->toValue(*evalState).first;
|
auto val = installable->toValue(*evalState).first;
|
||||||
|
|
||||||
auto [bundlerFlakeRef, bundlerName] = parseFlakeRefWithFragment(bundler, absPath("."));
|
auto [bundlerFlakeRef, bundlerName, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(bundler, absPath("."));
|
||||||
const flake::LockFlags lockFlags{ .writeLockFile = false };
|
const flake::LockFlags lockFlags{ .writeLockFile = false };
|
||||||
InstallableFlake bundler{this,
|
InstallableFlake bundler{this,
|
||||||
evalState, std::move(bundlerFlakeRef), bundlerName,
|
evalState, std::move(bundlerFlakeRef), bundlerName, outputsSpec,
|
||||||
{"bundlers." + settings.thisSystem.get() + ".default",
|
{"bundlers." + settings.thisSystem.get() + ".default",
|
||||||
"defaultBundler." + settings.thisSystem.get()
|
"defaultBundler." + settings.thisSystem.get()
|
||||||
},
|
},
|
||||||
|
|
|
@ -18,6 +18,9 @@ struct DevelopSettings : Config
|
||||||
Setting<std::string> bashPrompt{this, "", "bash-prompt",
|
Setting<std::string> bashPrompt{this, "", "bash-prompt",
|
||||||
"The bash prompt (`PS1`) in `nix develop` shells."};
|
"The bash prompt (`PS1`) in `nix develop` shells."};
|
||||||
|
|
||||||
|
Setting<std::string> bashPromptPrefix{this, "", "bash-prompt-prefix",
|
||||||
|
"Prefix prepended to the `PS1` environment variable in `nix develop` shells."};
|
||||||
|
|
||||||
Setting<std::string> bashPromptSuffix{this, "", "bash-prompt-suffix",
|
Setting<std::string> bashPromptSuffix{this, "", "bash-prompt-suffix",
|
||||||
"Suffix appended to the `PS1` environment variable in `nix develop` shells."};
|
"Suffix appended to the `PS1` environment variable in `nix develop` shells."};
|
||||||
};
|
};
|
||||||
|
@ -482,6 +485,9 @@ struct CmdDevelop : Common, MixEnvironment
|
||||||
if (developSettings.bashPrompt != "")
|
if (developSettings.bashPrompt != "")
|
||||||
script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n",
|
script += fmt("[ -n \"$PS1\" ] && PS1=%s;\n",
|
||||||
shellEscape(developSettings.bashPrompt.get()));
|
shellEscape(developSettings.bashPrompt.get()));
|
||||||
|
if (developSettings.bashPromptPrefix != "")
|
||||||
|
script += fmt("[ -n \"$PS1\" ] && PS1=%s\"$PS1\";\n",
|
||||||
|
shellEscape(developSettings.bashPromptPrefix.get()));
|
||||||
if (developSettings.bashPromptSuffix != "")
|
if (developSettings.bashPromptSuffix != "")
|
||||||
script += fmt("[ -n \"$PS1\" ] && PS1+=%s;\n",
|
script += fmt("[ -n \"$PS1\" ] && PS1+=%s;\n",
|
||||||
shellEscape(developSettings.bashPromptSuffix.get()));
|
shellEscape(developSettings.bashPromptSuffix.get()));
|
||||||
|
@ -507,13 +513,25 @@ struct CmdDevelop : Common, MixEnvironment
|
||||||
state,
|
state,
|
||||||
installable->nixpkgsFlakeRef(),
|
installable->nixpkgsFlakeRef(),
|
||||||
"bashInteractive",
|
"bashInteractive",
|
||||||
|
DefaultOutputs(),
|
||||||
Strings{},
|
Strings{},
|
||||||
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
|
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
|
||||||
nixpkgsLockFlags);
|
nixpkgsLockFlags);
|
||||||
|
|
||||||
shell = store->printStorePath(
|
bool found = false;
|
||||||
Installable::toStorePath(getEvalStore(), store, Realise::Outputs, OperateOn::Output, bashInstallable))
|
|
||||||
+ "/bin/bash";
|
for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) {
|
||||||
|
auto s = store->printStorePath(path) + "/bin/bash";
|
||||||
|
if (pathExists(s)) {
|
||||||
|
shell = s;
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!found)
|
||||||
|
throw Error("package 'nixpkgs#bashInteractive' does not provide a 'bin/bash'");
|
||||||
|
|
||||||
} catch (Error &) {
|
} catch (Error &) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,8 +80,8 @@ initialised by `stdenv` and exits. This build environment can be
|
||||||
recorded into a profile using `--profile`.
|
recorded into a profile using `--profile`.
|
||||||
|
|
||||||
The prompt used by the `bash` shell can be customised by setting the
|
The prompt used by the `bash` shell can be customised by setting the
|
||||||
`bash-prompt` and `bash-prompt-suffix` settings in `nix.conf` or in
|
`bash-prompt`, `bash-prompt-prefix`, and `bash-prompt-suffix` settings in
|
||||||
the flake's `nixConfig` attribute.
|
`nix.conf` or in the flake's `nixConfig` attribute.
|
||||||
|
|
||||||
# Flake output attributes
|
# Flake output attributes
|
||||||
|
|
||||||
|
|
|
@ -311,7 +311,7 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
return state->positions[p];
|
return state->positions[p];
|
||||||
};
|
};
|
||||||
|
|
||||||
auto argHasName = [&] (SymbolIdx arg, std::string_view expected) {
|
auto argHasName = [&] (Symbol arg, std::string_view expected) {
|
||||||
std::string_view name = state->symbols[arg];
|
std::string_view name = state->symbols[arg];
|
||||||
return
|
return
|
||||||
name == expected
|
name == expected
|
||||||
|
@ -509,7 +509,7 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
|
|
||||||
std::string_view replacement =
|
std::string_view replacement =
|
||||||
name == "defaultPackage" ? "packages.<system>.default" :
|
name == "defaultPackage" ? "packages.<system>.default" :
|
||||||
name == "defaultApps" ? "apps.<system>.default" :
|
name == "defaultApp" ? "apps.<system>.default" :
|
||||||
name == "defaultTemplate" ? "templates.default" :
|
name == "defaultTemplate" ? "templates.default" :
|
||||||
name == "defaultBundler" ? "bundlers.<system>.default" :
|
name == "defaultBundler" ? "bundlers.<system>.default" :
|
||||||
name == "overlay" ? "overlays.default" :
|
name == "overlay" ? "overlays.default" :
|
||||||
|
@ -724,7 +724,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath("."));
|
auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath("."));
|
||||||
|
|
||||||
auto installable = InstallableFlake(nullptr,
|
auto installable = InstallableFlake(nullptr,
|
||||||
evalState, std::move(templateFlakeRef), templateName,
|
evalState, std::move(templateFlakeRef), templateName, DefaultOutputs(),
|
||||||
defaultTemplateAttrPaths,
|
defaultTemplateAttrPaths,
|
||||||
defaultTemplateAttrPathsPrefixes,
|
defaultTemplateAttrPathsPrefixes,
|
||||||
lockFlags);
|
lockFlags);
|
||||||
|
@ -986,8 +986,11 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
{
|
{
|
||||||
auto j = nlohmann::json::object();
|
auto j = nlohmann::json::object();
|
||||||
|
|
||||||
|
auto attrPathS = state->symbols.resolve(attrPath);
|
||||||
|
|
||||||
Activity act(*logger, lvlInfo, actUnknown,
|
Activity act(*logger, lvlInfo, actUnknown,
|
||||||
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
|
fmt("evaluating '%s'", concatStringsSep(".", attrPathS)));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto recurse = [&]()
|
auto recurse = [&]()
|
||||||
{
|
{
|
||||||
|
@ -995,24 +998,25 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
logger->cout("%s", headerPrefix);
|
logger->cout("%s", headerPrefix);
|
||||||
auto attrs = visitor.getAttrs();
|
auto attrs = visitor.getAttrs();
|
||||||
for (const auto & [i, attr] : enumerate(attrs)) {
|
for (const auto & [i, attr] : enumerate(attrs)) {
|
||||||
|
const auto & attrName = state->symbols[attr];
|
||||||
bool last = i + 1 == attrs.size();
|
bool last = i + 1 == attrs.size();
|
||||||
auto visitor2 = visitor.getAttr(attr);
|
auto visitor2 = visitor.getAttr(attrName);
|
||||||
auto attrPath2(attrPath);
|
auto attrPath2(attrPath);
|
||||||
attrPath2.push_back(attr);
|
attrPath2.push_back(attr);
|
||||||
auto j2 = visit(*visitor2, attrPath2,
|
auto j2 = visit(*visitor2, attrPath2,
|
||||||
fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attr),
|
fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attrName),
|
||||||
nextPrefix + (last ? treeNull : treeLine));
|
nextPrefix + (last ? treeNull : treeLine));
|
||||||
if (json) j.emplace(attr, std::move(j2));
|
if (json) j.emplace(attrName, std::move(j2));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
auto showDerivation = [&]()
|
auto showDerivation = [&]()
|
||||||
{
|
{
|
||||||
auto name = visitor.getAttr("name")->getString();
|
auto name = visitor.getAttr(state->sName)->getString();
|
||||||
if (json) {
|
if (json) {
|
||||||
std::optional<std::string> description;
|
std::optional<std::string> description;
|
||||||
if (auto aMeta = visitor.maybeGetAttr("meta")) {
|
if (auto aMeta = visitor.maybeGetAttr(state->sMeta)) {
|
||||||
if (auto aDescription = aMeta->maybeGetAttr("description"))
|
if (auto aDescription = aMeta->maybeGetAttr(state->sDescription))
|
||||||
description = aDescription->getString();
|
description = aDescription->getString();
|
||||||
}
|
}
|
||||||
j.emplace("type", "derivation");
|
j.emplace("type", "derivation");
|
||||||
|
@ -1022,10 +1026,10 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
} else {
|
} else {
|
||||||
logger->cout("%s: %s '%s'",
|
logger->cout("%s: %s '%s'",
|
||||||
headerPrefix,
|
headerPrefix,
|
||||||
attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
|
attrPath.size() == 2 && attrPathS[0] == "devShell" ? "development environment" :
|
||||||
attrPath.size() >= 2 && attrPath[0] == "devShells" ? "development environment" :
|
attrPath.size() >= 2 && attrPathS[0] == "devShells" ? "development environment" :
|
||||||
attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
|
attrPath.size() == 3 && attrPathS[0] == "checks" ? "derivation" :
|
||||||
attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
|
attrPath.size() >= 1 && attrPathS[0] == "hydraJobs" ? "derivation" :
|
||||||
"package",
|
"package",
|
||||||
name);
|
name);
|
||||||
}
|
}
|
||||||
|
@ -1033,27 +1037,27 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
|
|
||||||
if (attrPath.size() == 0
|
if (attrPath.size() == 0
|
||||||
|| (attrPath.size() == 1 && (
|
|| (attrPath.size() == 1 && (
|
||||||
attrPath[0] == "defaultPackage"
|
attrPathS[0] == "defaultPackage"
|
||||||
|| attrPath[0] == "devShell"
|
|| attrPathS[0] == "devShell"
|
||||||
|| attrPath[0] == "formatter"
|
|| attrPathS[0] == "formatter"
|
||||||
|| attrPath[0] == "nixosConfigurations"
|
|| attrPathS[0] == "nixosConfigurations"
|
||||||
|| attrPath[0] == "nixosModules"
|
|| attrPathS[0] == "nixosModules"
|
||||||
|| attrPath[0] == "defaultApp"
|
|| attrPathS[0] == "defaultApp"
|
||||||
|| attrPath[0] == "templates"
|
|| attrPathS[0] == "templates"
|
||||||
|| attrPath[0] == "overlays"))
|
|| attrPathS[0] == "overlays"))
|
||||||
|| ((attrPath.size() == 1 || attrPath.size() == 2)
|
|| ((attrPath.size() == 1 || attrPath.size() == 2)
|
||||||
&& (attrPath[0] == "checks"
|
&& (attrPathS[0] == "checks"
|
||||||
|| attrPath[0] == "packages"
|
|| attrPathS[0] == "packages"
|
||||||
|| attrPath[0] == "devShells"
|
|| attrPathS[0] == "devShells"
|
||||||
|| attrPath[0] == "apps"))
|
|| attrPathS[0] == "apps"))
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
recurse();
|
recurse();
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (
|
else if (
|
||||||
(attrPath.size() == 2 && (attrPath[0] == "defaultPackage" || attrPath[0] == "devShell" || attrPath[0] == "formatter"))
|
(attrPath.size() == 2 && (attrPathS[0] == "defaultPackage" || attrPathS[0] == "devShell" || attrPathS[0] == "formatter"))
|
||||||
|| (attrPath.size() == 3 && (attrPath[0] == "checks" || attrPath[0] == "packages" || attrPath[0] == "devShells"))
|
|| (attrPath.size() == 3 && (attrPathS[0] == "checks" || attrPathS[0] == "packages" || attrPathS[0] == "devShells"))
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
if (visitor.isDerivation())
|
if (visitor.isDerivation())
|
||||||
|
@ -1062,19 +1066,23 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
throw Error("expected a derivation");
|
throw Error("expected a derivation");
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (attrPath.size() > 0 && attrPath[0] == "hydraJobs") {
|
else if (attrPath.size() > 0 && attrPathS[0] == "hydraJobs") {
|
||||||
if (visitor.isDerivation())
|
if (visitor.isDerivation())
|
||||||
showDerivation();
|
showDerivation();
|
||||||
else
|
else
|
||||||
recurse();
|
recurse();
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (attrPath.size() > 0 && attrPath[0] == "legacyPackages") {
|
else if (attrPath.size() > 0 && attrPathS[0] == "legacyPackages") {
|
||||||
if (attrPath.size() == 1)
|
if (attrPath.size() == 1)
|
||||||
recurse();
|
recurse();
|
||||||
else if (!showLegacy)
|
else if (!showLegacy){
|
||||||
logger->warn(fmt("%s: " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
|
if (!json)
|
||||||
|
logger->cout(fmt("%s " ANSI_WARNING "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix));
|
||||||
else {
|
else {
|
||||||
|
logger->warn(fmt("%s omitted (use '--legacy' to show)", concatStringsSep(".", attrPathS)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if (visitor.isDerivation())
|
if (visitor.isDerivation())
|
||||||
showDerivation();
|
showDerivation();
|
||||||
else if (attrPath.size() <= 2)
|
else if (attrPath.size() <= 2)
|
||||||
|
@ -1084,8 +1092,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (
|
else if (
|
||||||
(attrPath.size() == 2 && attrPath[0] == "defaultApp") ||
|
(attrPath.size() == 2 && attrPathS[0] == "defaultApp") ||
|
||||||
(attrPath.size() == 3 && attrPath[0] == "apps"))
|
(attrPath.size() == 3 && attrPathS[0] == "apps"))
|
||||||
{
|
{
|
||||||
auto aType = visitor.maybeGetAttr("type");
|
auto aType = visitor.maybeGetAttr("type");
|
||||||
if (!aType || aType->getString() != "app")
|
if (!aType || aType->getString() != "app")
|
||||||
|
@ -1098,8 +1106,8 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (
|
else if (
|
||||||
(attrPath.size() == 1 && attrPath[0] == "defaultTemplate") ||
|
(attrPath.size() == 1 && attrPathS[0] == "defaultTemplate") ||
|
||||||
(attrPath.size() == 2 && attrPath[0] == "templates"))
|
(attrPath.size() == 2 && attrPathS[0] == "templates"))
|
||||||
{
|
{
|
||||||
auto description = visitor.getAttr("description")->getString();
|
auto description = visitor.getAttr("description")->getString();
|
||||||
if (json) {
|
if (json) {
|
||||||
|
@ -1112,11 +1120,11 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
|
|
||||||
else {
|
else {
|
||||||
auto [type, description] =
|
auto [type, description] =
|
||||||
(attrPath.size() == 1 && attrPath[0] == "overlay")
|
(attrPath.size() == 1 && attrPathS[0] == "overlay")
|
||||||
|| (attrPath.size() == 2 && attrPath[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") :
|
|| (attrPath.size() == 2 && attrPathS[0] == "overlays") ? std::make_pair("nixpkgs-overlay", "Nixpkgs overlay") :
|
||||||
attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") :
|
attrPath.size() == 2 && attrPathS[0] == "nixosConfigurations" ? std::make_pair("nixos-configuration", "NixOS configuration") :
|
||||||
(attrPath.size() == 1 && attrPath[0] == "nixosModule")
|
(attrPath.size() == 1 && attrPathS[0] == "nixosModule")
|
||||||
|| (attrPath.size() == 2 && attrPath[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") :
|
|| (attrPath.size() == 2 && attrPathS[0] == "nixosModules") ? std::make_pair("nixos-module", "NixOS module") :
|
||||||
std::make_pair("unknown", "unknown");
|
std::make_pair("unknown", "unknown");
|
||||||
if (json) {
|
if (json) {
|
||||||
j.emplace("type", type);
|
j.emplace("type", type);
|
||||||
|
@ -1125,7 +1133,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (EvalError & e) {
|
} catch (EvalError & e) {
|
||||||
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
|
if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages"))
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -153,7 +153,7 @@ Currently the `type` attribute can be one of the following:
|
||||||
git(+http|+https|+ssh|+git|+file|):(//<server>)?<path>(\?<params>)?
|
git(+http|+https|+ssh|+git|+file|):(//<server>)?<path>(\?<params>)?
|
||||||
```
|
```
|
||||||
|
|
||||||
The `ref` attribute defaults to `master`.
|
The `ref` attribute defaults to resolving the `HEAD` reference.
|
||||||
|
|
||||||
The `rev` attribute must denote a commit that exists in the branch
|
The `rev` attribute must denote a commit that exists in the branch
|
||||||
or tag specified by the `ref` attribute, since Nix doesn't do a full
|
or tag specified by the `ref` attribute, since Nix doesn't do a full
|
||||||
|
@ -161,6 +161,11 @@ Currently the `type` attribute can be one of the following:
|
||||||
doesn't allow fetching a `rev` without a known `ref`). The default
|
doesn't allow fetching a `rev` without a known `ref`). The default
|
||||||
is the commit currently pointed to by `ref`.
|
is the commit currently pointed to by `ref`.
|
||||||
|
|
||||||
|
When `git+file` is used without specifying `ref` or `rev`, files are
|
||||||
|
fetched directly from the local `path` as long as they have been added
|
||||||
|
to the Git repository. If there are uncommitted changes, the reference
|
||||||
|
is treated as dirty and a warning is printed.
|
||||||
|
|
||||||
For example, the following are valid Git flake references:
|
For example, the following are valid Git flake references:
|
||||||
|
|
||||||
* `git+https://example.org/my/repo`
|
* `git+https://example.org/my/repo`
|
||||||
|
@ -326,9 +331,10 @@ The following attributes are supported in `flake.nix`:
|
||||||
|
|
||||||
* `nixConfig`: a set of `nix.conf` options to be set when evaluating any
|
* `nixConfig`: a set of `nix.conf` options to be set when evaluating any
|
||||||
part of a flake. In the interests of security, only a small set of
|
part of a flake. In the interests of security, only a small set of
|
||||||
whitelisted options (currently `bash-prompt`, `bash-prompt-suffix`,
|
whitelisted options (currently `bash-prompt`, `bash-prompt-prefix`,
|
||||||
and `flake-registry`) are allowed to be set without confirmation so long as
|
`bash-prompt-suffix`, and `flake-registry`) are allowed to be set without
|
||||||
`accept-flake-config` is not set in the global configuration.
|
confirmation so long as `accept-flake-config` is not set in the global
|
||||||
|
configuration.
|
||||||
|
|
||||||
## Flake inputs
|
## Flake inputs
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,8 @@ struct CmdFmt : SourceExprCommand {
|
||||||
|
|
||||||
Strings getDefaultFlakeAttrPathPrefixes() override { return Strings{}; }
|
Strings getDefaultFlakeAttrPathPrefixes() override { return Strings{}; }
|
||||||
|
|
||||||
void run(ref<Store> store) {
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
auto evalState = getEvalState();
|
auto evalState = getEvalState();
|
||||||
auto evalStore = getEvalStore();
|
auto evalStore = getEvalStore();
|
||||||
|
|
||||||
|
|
|
@ -261,6 +261,8 @@ void mainWrapped(int argc, char * * argv)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
Finally f([] { logger->stop(); });
|
||||||
|
|
||||||
programPath = argv[0];
|
programPath = argv[0];
|
||||||
auto programName = std::string(baseNameOf(programPath));
|
auto programName = std::string(baseNameOf(programPath));
|
||||||
|
|
||||||
|
@ -279,8 +281,6 @@ void mainWrapped(int argc, char * * argv)
|
||||||
verbosity = lvlInfo;
|
verbosity = lvlInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
Finally f([] { logger->stop(); });
|
|
||||||
|
|
||||||
NixArgs args;
|
NixArgs args;
|
||||||
|
|
||||||
if (argc == 2 && std::string(argv[1]) == "__dump-args") {
|
if (argc == 2 && std::string(argv[1]) == "__dump-args") {
|
||||||
|
|
|
@ -146,6 +146,51 @@ For most commands, if no installable is specified, the default is `.`,
|
||||||
i.e. Nix will operate on the default flake output attribute of the
|
i.e. Nix will operate on the default flake output attribute of the
|
||||||
flake in the current directory.
|
flake in the current directory.
|
||||||
|
|
||||||
|
## Derivation output selection
|
||||||
|
|
||||||
|
Derivations can have multiple outputs, each corresponding to a
|
||||||
|
different store path. For instance, a package can have a `bin` output
|
||||||
|
that contains programs, and a `dev` output that provides development
|
||||||
|
artifacts like C/C++ header files. The outputs on which `nix` commands
|
||||||
|
operate are determined as follows:
|
||||||
|
|
||||||
|
* You can explicitly specify the desired outputs using the syntax
|
||||||
|
*installable*`^`*output1*`,`*...*`,`*outputN*. For example, you can
|
||||||
|
obtain the `dev` and `static` outputs of the `glibc` package:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix build 'nixpkgs#glibc^dev,static'
|
||||||
|
# ls ./result-dev/include/ ./result-static/lib/
|
||||||
|
…
|
||||||
|
```
|
||||||
|
|
||||||
|
* You can also specify that *all* outputs should be used using the
|
||||||
|
syntax *installable*`^*`. For example, the following shows the size
|
||||||
|
of all outputs of the `glibc` package in the binary cache:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix path-info -S --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*'
|
||||||
|
/nix/store/g02b1lpbddhymmcjb923kf0l7s9nww58-glibc-2.33-123 33208200
|
||||||
|
/nix/store/851dp95qqiisjifi639r0zzg5l465ny4-glibc-2.33-123-bin 36142896
|
||||||
|
/nix/store/kdgs3q6r7xdff1p7a9hnjr43xw2404z7-glibc-2.33-123-debug 155787312
|
||||||
|
/nix/store/n4xa8h6pbmqmwnq0mmsz08l38abb06zc-glibc-2.33-123-static 42488328
|
||||||
|
/nix/store/q6580lr01jpcsqs4r5arlh4ki2c1m9rv-glibc-2.33-123-dev 44200560
|
||||||
|
```
|
||||||
|
|
||||||
|
* If you didn't specify the desired outputs, but the derivation has an
|
||||||
|
attribute `meta.outputsToInstall`, Nix will use those outputs. For
|
||||||
|
example, since the package `nixpkgs#libxml2` has this attribute:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix eval 'nixpkgs#libxml2.meta.outputsToInstall'
|
||||||
|
[ "bin" "man" ]
|
||||||
|
```
|
||||||
|
|
||||||
|
a command like `nix shell nixpkgs#libxml2` will provide only those
|
||||||
|
two outputs by default.
|
||||||
|
|
||||||
|
* Otherwise, Nix will use all outputs of the derivation.
|
||||||
|
|
||||||
# Nix stores
|
# Nix stores
|
||||||
|
|
||||||
Most `nix` subcommands operate on a *Nix store*.
|
Most `nix` subcommands operate on a *Nix store*.
|
||||||
|
|
|
@ -20,6 +20,13 @@ R""(
|
||||||
# nix profile install nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello
|
# nix profile install nixpkgs/d73407e8e6002646acfdef0e39ace088bacc83da#hello
|
||||||
```
|
```
|
||||||
|
|
||||||
|
* Install a specific output of a package:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix profile install nixpkgs#bash^man
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
This command adds *installables* to a Nix profile.
|
This command adds *installables* to a Nix profile.
|
||||||
|
|
|
@ -22,13 +22,13 @@ struct ProfileElementSource
|
||||||
// FIXME: record original attrpath.
|
// FIXME: record original attrpath.
|
||||||
FlakeRef resolvedRef;
|
FlakeRef resolvedRef;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
// FIXME: output names
|
OutputsSpec outputs;
|
||||||
|
|
||||||
bool operator < (const ProfileElementSource & other) const
|
bool operator < (const ProfileElementSource & other) const
|
||||||
{
|
{
|
||||||
return
|
return
|
||||||
std::pair(originalRef.to_string(), attrPath) <
|
std::tuple(originalRef.to_string(), attrPath, outputs) <
|
||||||
std::pair(other.originalRef.to_string(), other.attrPath);
|
std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -37,12 +37,12 @@ struct ProfileElement
|
||||||
StorePathSet storePaths;
|
StorePathSet storePaths;
|
||||||
std::optional<ProfileElementSource> source;
|
std::optional<ProfileElementSource> source;
|
||||||
bool active = true;
|
bool active = true;
|
||||||
// FIXME: priority
|
int priority = 5;
|
||||||
|
|
||||||
std::string describe() const
|
std::string describe() const
|
||||||
{
|
{
|
||||||
if (source)
|
if (source)
|
||||||
return fmt("%s#%s", source->originalRef, source->attrPath);
|
return fmt("%s#%s%s", source->originalRef, source->attrPath, printOutputsSpec(source->outputs));
|
||||||
StringSet names;
|
StringSet names;
|
||||||
for (auto & path : storePaths)
|
for (auto & path : storePaths)
|
||||||
names.insert(DrvName(path.name()).name);
|
names.insert(DrvName(path.name()).name);
|
||||||
|
@ -67,7 +67,6 @@ struct ProfileElement
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const BuiltPaths & builtPaths)
|
const BuiltPaths & builtPaths)
|
||||||
{
|
{
|
||||||
// FIXME: respect meta.outputsToInstall
|
|
||||||
storePaths.clear();
|
storePaths.clear();
|
||||||
for (auto & buildable : builtPaths) {
|
for (auto & buildable : builtPaths) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
|
@ -117,11 +116,15 @@ struct ProfileManifest
|
||||||
for (auto & p : e["storePaths"])
|
for (auto & p : e["storePaths"])
|
||||||
element.storePaths.insert(state.store->parseStorePath((std::string) p));
|
element.storePaths.insert(state.store->parseStorePath((std::string) p));
|
||||||
element.active = e["active"];
|
element.active = e["active"];
|
||||||
|
if(e.contains("priority")) {
|
||||||
|
element.priority = e["priority"];
|
||||||
|
}
|
||||||
if (e.value(sUrl, "") != "") {
|
if (e.value(sUrl, "") != "") {
|
||||||
element.source = ProfileElementSource {
|
element.source = ProfileElementSource {
|
||||||
parseFlakeRef(e[sOriginalUrl]),
|
parseFlakeRef(e[sOriginalUrl]),
|
||||||
parseFlakeRef(e[sUrl]),
|
parseFlakeRef(e[sUrl]),
|
||||||
e["attrPath"]
|
e["attrPath"],
|
||||||
|
e["outputs"].get<OutputsSpec>()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
elements.emplace_back(std::move(element));
|
elements.emplace_back(std::move(element));
|
||||||
|
@ -153,10 +156,12 @@ struct ProfileManifest
|
||||||
nlohmann::json obj;
|
nlohmann::json obj;
|
||||||
obj["storePaths"] = paths;
|
obj["storePaths"] = paths;
|
||||||
obj["active"] = element.active;
|
obj["active"] = element.active;
|
||||||
|
obj["priority"] = element.priority;
|
||||||
if (element.source) {
|
if (element.source) {
|
||||||
obj["originalUrl"] = element.source->originalRef.to_string();
|
obj["originalUrl"] = element.source->originalRef.to_string();
|
||||||
obj["url"] = element.source->resolvedRef.to_string();
|
obj["url"] = element.source->resolvedRef.to_string();
|
||||||
obj["attrPath"] = element.source->attrPath;
|
obj["attrPath"] = element.source->attrPath;
|
||||||
|
obj["outputs"] = element.source->outputs;
|
||||||
}
|
}
|
||||||
array.push_back(obj);
|
array.push_back(obj);
|
||||||
}
|
}
|
||||||
|
@ -176,7 +181,7 @@ struct ProfileManifest
|
||||||
for (auto & element : elements) {
|
for (auto & element : elements) {
|
||||||
for (auto & path : element.storePaths) {
|
for (auto & path : element.storePaths) {
|
||||||
if (element.active)
|
if (element.active)
|
||||||
pkgs.emplace_back(store->printStorePath(path), true, 5);
|
pkgs.emplace_back(store->printStorePath(path), true, element.priority);
|
||||||
references.insert(path);
|
references.insert(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,6 +263,16 @@ builtPathsPerInstallable(
|
||||||
|
|
||||||
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
||||||
{
|
{
|
||||||
|
std::optional<int> priority;
|
||||||
|
CmdProfileInstall() {
|
||||||
|
addFlag({
|
||||||
|
.longName = "priority",
|
||||||
|
.description = "The priority of the package to install.",
|
||||||
|
.labels = {"priority"},
|
||||||
|
.handler = {&priority},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
return "install a package into a profile";
|
return "install a package into a profile";
|
||||||
|
@ -281,6 +296,8 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
||||||
for (auto & installable : installables) {
|
for (auto & installable : installables) {
|
||||||
ProfileElement element;
|
ProfileElement element;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if (auto installable2 = std::dynamic_pointer_cast<InstallableFlake>(installable)) {
|
if (auto installable2 = std::dynamic_pointer_cast<InstallableFlake>(installable)) {
|
||||||
// FIXME: make build() return this?
|
// FIXME: make build() return this?
|
||||||
auto [attrPath, resolvedRef, drv] = installable2->toDerivation();
|
auto [attrPath, resolvedRef, drv] = installable2->toDerivation();
|
||||||
|
@ -288,8 +305,17 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
||||||
installable2->flakeRef,
|
installable2->flakeRef,
|
||||||
resolvedRef,
|
resolvedRef,
|
||||||
attrPath,
|
attrPath,
|
||||||
|
installable2->outputsSpec
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if(drv.priority) {
|
||||||
|
element.priority = *drv.priority;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(priority) { // if --priority was specified we want to override the priority of the installable
|
||||||
|
element.priority = *priority;
|
||||||
|
};
|
||||||
|
|
||||||
element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]);
|
element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]);
|
||||||
|
|
||||||
|
@ -444,6 +470,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
||||||
getEvalState(),
|
getEvalState(),
|
||||||
FlakeRef(element.source->originalRef),
|
FlakeRef(element.source->originalRef),
|
||||||
"",
|
"",
|
||||||
|
element.source->outputs,
|
||||||
Strings{element.source->attrPath},
|
Strings{element.source->attrPath},
|
||||||
Strings{},
|
Strings{},
|
||||||
lockFlags);
|
lockFlags);
|
||||||
|
@ -459,6 +486,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
||||||
installable->flakeRef,
|
installable->flakeRef,
|
||||||
resolvedRef,
|
resolvedRef,
|
||||||
attrPath,
|
attrPath,
|
||||||
|
installable->outputsSpec
|
||||||
};
|
};
|
||||||
|
|
||||||
installables.push_back(installable);
|
installables.push_back(installable);
|
||||||
|
@ -514,8 +542,8 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
|
||||||
for (size_t i = 0; i < manifest.elements.size(); ++i) {
|
for (size_t i = 0; i < manifest.elements.size(); ++i) {
|
||||||
auto & element(manifest.elements[i]);
|
auto & element(manifest.elements[i]);
|
||||||
logger->cout("%d %s %s %s", i,
|
logger->cout("%d %s %s %s", i,
|
||||||
element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath : "-",
|
element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-",
|
||||||
element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath : "-",
|
element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath + printOutputsSpec(element.source->outputs) : "-",
|
||||||
concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
|
concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,7 +73,7 @@ struct NixRepl
|
||||||
void initEnv();
|
void initEnv();
|
||||||
void reloadFiles();
|
void reloadFiles();
|
||||||
void addAttrsToScope(Value & attrs);
|
void addAttrsToScope(Value & attrs);
|
||||||
void addVarToScope(const SymbolIdx name, Value & v);
|
void addVarToScope(const Symbol name, Value & v);
|
||||||
Expr * parseString(std::string s);
|
Expr * parseString(std::string s);
|
||||||
void evalString(std::string s, Value & v);
|
void evalString(std::string s, Value & v);
|
||||||
|
|
||||||
|
@ -711,7 +711,7 @@ void NixRepl::addAttrsToScope(Value & attrs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void NixRepl::addVarToScope(const SymbolIdx name, Value & v)
|
void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||||
{
|
{
|
||||||
if (displ >= envSize)
|
if (displ >= envSize)
|
||||||
throw Error("environment full; cannot add more variables");
|
throw Error("environment full; cannot add more variables");
|
||||||
|
|
|
@ -77,13 +77,15 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
|
|
||||||
visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath, bool initialRecurse)
|
visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath, bool initialRecurse)
|
||||||
{
|
{
|
||||||
|
auto attrPathS = state->symbols.resolve(attrPath);
|
||||||
|
|
||||||
Activity act(*logger, lvlInfo, actUnknown,
|
Activity act(*logger, lvlInfo, actUnknown,
|
||||||
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
|
fmt("evaluating '%s'", concatStringsSep(".", attrPathS)));
|
||||||
try {
|
try {
|
||||||
auto recurse = [&]()
|
auto recurse = [&]()
|
||||||
{
|
{
|
||||||
for (const auto & attr : cursor.getAttrs()) {
|
for (const auto & attr : cursor.getAttrs()) {
|
||||||
auto cursor2 = cursor.getAttr(attr);
|
auto cursor2 = cursor.getAttr(state->symbols[attr]);
|
||||||
auto attrPath2(attrPath);
|
auto attrPath2(attrPath);
|
||||||
attrPath2.push_back(attr);
|
attrPath2.push_back(attr);
|
||||||
visit(*cursor2, attrPath2, false);
|
visit(*cursor2, attrPath2, false);
|
||||||
|
@ -91,13 +93,13 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
};
|
};
|
||||||
|
|
||||||
if (cursor.isDerivation()) {
|
if (cursor.isDerivation()) {
|
||||||
DrvName name(cursor.getAttr("name")->getString());
|
DrvName name(cursor.getAttr(state->sName)->getString());
|
||||||
|
|
||||||
auto aMeta = cursor.maybeGetAttr("meta");
|
auto aMeta = cursor.maybeGetAttr(state->sMeta);
|
||||||
auto aDescription = aMeta ? aMeta->maybeGetAttr("description") : nullptr;
|
auto aDescription = aMeta ? aMeta->maybeGetAttr(state->sDescription) : nullptr;
|
||||||
auto description = aDescription ? aDescription->getString() : "";
|
auto description = aDescription ? aDescription->getString() : "";
|
||||||
std::replace(description.begin(), description.end(), '\n', ' ');
|
std::replace(description.begin(), description.end(), '\n', ' ');
|
||||||
auto attrPath2 = concatStringsSep(".", attrPath);
|
auto attrPath2 = concatStringsSep(".", attrPathS);
|
||||||
|
|
||||||
std::vector<std::smatch> attrPathMatches;
|
std::vector<std::smatch> attrPathMatches;
|
||||||
std::vector<std::smatch> descriptionMatches;
|
std::vector<std::smatch> descriptionMatches;
|
||||||
|
@ -146,21 +148,21 @@ struct CmdSearch : InstallableCommand, MixJSON
|
||||||
|
|
||||||
else if (
|
else if (
|
||||||
attrPath.size() == 0
|
attrPath.size() == 0
|
||||||
|| (attrPath[0] == "legacyPackages" && attrPath.size() <= 2)
|
|| (attrPathS[0] == "legacyPackages" && attrPath.size() <= 2)
|
||||||
|| (attrPath[0] == "packages" && attrPath.size() <= 2))
|
|| (attrPathS[0] == "packages" && attrPath.size() <= 2))
|
||||||
recurse();
|
recurse();
|
||||||
|
|
||||||
else if (initialRecurse)
|
else if (initialRecurse)
|
||||||
recurse();
|
recurse();
|
||||||
|
|
||||||
else if (attrPath[0] == "legacyPackages" && attrPath.size() > 2) {
|
else if (attrPathS[0] == "legacyPackages" && attrPath.size() > 2) {
|
||||||
auto attr = cursor.maybeGetAttr("recurseForDerivations");
|
auto attr = cursor.maybeGetAttr(state->sRecurseForDerivations);
|
||||||
if (attr && attr->getBool())
|
if (attr && attr->getBool())
|
||||||
recurse();
|
recurse();
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (EvalError & e) {
|
} catch (EvalError & e) {
|
||||||
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
|
if (!(attrPath.size() > 0 && attrPathS[0] == "legacyPackages"))
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -176,7 +176,7 @@ int main(int argc, char ** argv)
|
||||||
impurePaths.insert(argv[2]);
|
impurePaths.insert(argv[2]);
|
||||||
else {
|
else {
|
||||||
auto drv = store->derivationFromPath(store->parseStorePath(argv[1]));
|
auto drv = store->derivationFromPath(store->parseStorePath(argv[1]));
|
||||||
impurePaths = tokenizeString<StringSet>(get(drv.env, "__impureHostDeps").value_or(""));
|
impurePaths = tokenizeString<StringSet>(getOr(drv.env, "__impureHostDeps", ""));
|
||||||
impurePaths.insert("/usr/lib/libSystem.dylib");
|
impurePaths.insert("/usr/lib/libSystem.dylib");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ outPath=$(readlink -f $TEST_ROOT/result)
|
||||||
|
|
||||||
grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
|
grep 'FOO BAR BAZ' $TEST_ROOT/machine0/$outPath
|
||||||
|
|
||||||
testPrintOutPath=$(nix build -L -v -f $file --print-out-paths --max-jobs 0 \
|
testPrintOutPath=$(nix build -L -v -f $file --no-link --print-out-paths --max-jobs 0 \
|
||||||
--arg busybox $busybox \
|
--arg busybox $busybox \
|
||||||
--store $TEST_ROOT/machine0 \
|
--store $TEST_ROOT/machine0 \
|
||||||
--builders "$(join_by '; ' "${builders[@]}")"
|
--builders "$(join_by '; ' "${builders[@]}")"
|
||||||
|
@ -72,6 +72,7 @@ fi
|
||||||
|
|
||||||
# Behavior of keep-failed
|
# Behavior of keep-failed
|
||||||
out="$(nix-build 2>&1 failing.nix \
|
out="$(nix-build 2>&1 failing.nix \
|
||||||
|
--no-out-link \
|
||||||
--builders "$(join_by '; ' "${builders[@]}")" \
|
--builders "$(join_by '; ' "${builders[@]}")" \
|
||||||
--keep-failed \
|
--keep-failed \
|
||||||
--store $TEST_ROOT/machine0 \
|
--store $TEST_ROOT/machine0 \
|
||||||
|
|
|
@ -2,15 +2,10 @@ source common.sh
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
|
|
||||||
# Make sure that 'nix build' only returns the outputs we asked for.
|
set -o pipefail
|
||||||
nix build -f multiple-outputs.nix --json a --no-link | jq --exit-status '
|
|
||||||
(.[0] |
|
|
||||||
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
|
||||||
(.outputs | keys | length == 1) and
|
|
||||||
(.outputs.first | match(".*multiple-outputs-a-first")))
|
|
||||||
'
|
|
||||||
|
|
||||||
nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-status '
|
# Make sure that 'nix build' returns all outputs by default.
|
||||||
|
nix build -f multiple-outputs.nix --json a b --no-link | jq --exit-status '
|
||||||
(.[0] |
|
(.[0] |
|
||||||
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
||||||
(.outputs | keys | length == 2) and
|
(.outputs | keys | length == 2) and
|
||||||
|
@ -22,6 +17,52 @@ nix build -f multiple-outputs.nix --json a.all b.all --no-link | jq --exit-statu
|
||||||
(.outputs.out | match(".*multiple-outputs-b")))
|
(.outputs.out | match(".*multiple-outputs-b")))
|
||||||
'
|
'
|
||||||
|
|
||||||
|
# Test output selection using the '^' syntax.
|
||||||
|
nix build -f multiple-outputs.nix --json a^first --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
||||||
|
(.outputs | keys == ["first"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
nix build -f multiple-outputs.nix --json a^second,first --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
||||||
|
(.outputs | keys == ["first", "second"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
nix build -f multiple-outputs.nix --json 'a^*' --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-a.drv")) and
|
||||||
|
(.outputs | keys == ["first", "second"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
# Test that 'outputsToInstall' is respected by default.
|
||||||
|
nix build -f multiple-outputs.nix --json e --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-e.drv")) and
|
||||||
|
(.outputs | keys == ["a", "b"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
# But not when it's overriden.
|
||||||
|
nix build -f multiple-outputs.nix --json e^a --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-e.drv")) and
|
||||||
|
(.outputs | keys == ["a"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
nix build -f multiple-outputs.nix --json 'e^*' --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-e.drv")) and
|
||||||
|
(.outputs | keys == ["a", "b", "c"]))
|
||||||
|
'
|
||||||
|
|
||||||
|
# Make sure that `--impure` works (regression test for https://github.com/NixOS/nix/issues/6488)
|
||||||
|
nix build --impure -f multiple-outputs.nix --json e --no-link | jq --exit-status '
|
||||||
|
(.[0] |
|
||||||
|
(.drvPath | match(".*multiple-outputs-e.drv")) and
|
||||||
|
(.outputs | keys == ["a", "b"]))
|
||||||
|
'
|
||||||
|
|
||||||
testNormalization () {
|
testNormalization () {
|
||||||
clearStore
|
clearStore
|
||||||
outPath=$(nix-build ./simple.nix --no-out-link)
|
outPath=$(nix-build ./simple.nix --no-out-link)
|
||||||
|
|
|
@ -25,7 +25,8 @@ buildDrvs --substitute --substituters $REMOTE_STORE --no-require-sigs -j0 transi
|
||||||
# Check that the thing we’ve just substituted has its realisation stored
|
# Check that the thing we’ve just substituted has its realisation stored
|
||||||
nix realisation info --file ./content-addressed.nix transitivelyDependentCA
|
nix realisation info --file ./content-addressed.nix transitivelyDependentCA
|
||||||
# Check that its dependencies have it too
|
# Check that its dependencies have it too
|
||||||
nix realisation info --file ./content-addressed.nix dependentCA rootCA
|
nix realisation info --file ./content-addressed.nix dependentCA
|
||||||
|
# nix realisation info --file ./content-addressed.nix rootCA --outputs out
|
||||||
|
|
||||||
# Same thing, but
|
# Same thing, but
|
||||||
# 1. With non-ca derivations
|
# 1. With non-ca derivations
|
||||||
|
|
|
@ -157,11 +157,12 @@ expect() {
|
||||||
local expected res
|
local expected res
|
||||||
expected="$1"
|
expected="$1"
|
||||||
shift
|
shift
|
||||||
set +e
|
"$@" || res="$?"
|
||||||
"$@"
|
if [[ $res -ne $expected ]]; then
|
||||||
res="$?"
|
echo "Expected '$expected' but got '$res' while running '$*'"
|
||||||
set -e
|
return 1
|
||||||
[[ $res -eq $expected ]]
|
fi
|
||||||
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
needLocalStore() {
|
needLocalStore() {
|
||||||
|
|
|
@ -7,7 +7,7 @@ clearStore
|
||||||
clearCacheCache
|
clearCacheCache
|
||||||
|
|
||||||
# Initialize binary cache.
|
# Initialize binary cache.
|
||||||
nonCaPath=$(nix build --json --file ./dependencies.nix | jq -r .[].outputs.out)
|
nonCaPath=$(nix build --json --file ./dependencies.nix --no-link | jq -r .[].outputs.out)
|
||||||
caPath=$(nix store make-content-addressed --json $nonCaPath | jq -r '.rewrites | map(.) | .[]')
|
caPath=$(nix store make-content-addressed --json $nonCaPath | jq -r '.rewrites | map(.) | .[]')
|
||||||
nix copy --to file://$cacheDir $nonCaPath
|
nix copy --to file://$cacheDir $nonCaPath
|
||||||
|
|
||||||
|
|
|
@ -161,6 +161,14 @@ path4=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
||||||
[[ $(cat $path4/hello) = dev ]]
|
[[ $(cat $path4/hello) = dev ]]
|
||||||
[[ $path3 = $path4 ]]
|
[[ $path3 = $path4 ]]
|
||||||
|
|
||||||
|
# Using remote path with branch other than 'master' should fetch the HEAD revision.
|
||||||
|
# (--tarball-ttl 0 to prevent using the cached repo above)
|
||||||
|
export _NIX_FORCE_HTTP=1
|
||||||
|
path4=$(nix eval --tarball-ttl 0 --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
||||||
|
[[ $(cat $path4/hello) = dev ]]
|
||||||
|
[[ $path3 = $path4 ]]
|
||||||
|
unset _NIX_FORCE_HTTP
|
||||||
|
|
||||||
# Confirm same as 'dev' branch
|
# Confirm same as 'dev' branch
|
||||||
path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
|
path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
|
||||||
[[ $path3 = $path5 ]]
|
[[ $path3 = $path5 ]]
|
||||||
|
|
|
@ -31,7 +31,14 @@ flakeFollowsE=$TEST_ROOT/follows/flakeA/flakeE
|
||||||
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
|
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB $flakeFollowsA; do
|
||||||
rm -rf $repo $repo.tmp
|
rm -rf $repo $repo.tmp
|
||||||
mkdir -p $repo
|
mkdir -p $repo
|
||||||
git -C $repo init
|
|
||||||
|
# Give one repo a non-master initial branch.
|
||||||
|
extraArgs=
|
||||||
|
if [[ $repo == $flake2Dir ]]; then
|
||||||
|
extraArgs="--initial-branch=main"
|
||||||
|
fi
|
||||||
|
|
||||||
|
git -C $repo init $extraArgs
|
||||||
git -C $repo config user.email "foobar@example.com"
|
git -C $repo config user.email "foobar@example.com"
|
||||||
git -C $repo config user.name "Foobar"
|
git -C $repo config user.name "Foobar"
|
||||||
done
|
done
|
||||||
|
@ -156,6 +163,7 @@ nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Di
|
||||||
# But should succeed in impure mode.
|
# But should succeed in impure mode.
|
||||||
(! nix build -o $TEST_ROOT/result flake2#bar --impure)
|
(! nix build -o $TEST_ROOT/result flake2#bar --impure)
|
||||||
nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file
|
nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file
|
||||||
|
nix eval --expr "builtins.getFlake \"$flake2Dir\"" --impure
|
||||||
|
|
||||||
# Building a local flake with an unlocked dependency should fail with --no-update-lock-file.
|
# Building a local flake with an unlocked dependency should fail with --no-update-lock-file.
|
||||||
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
|
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
|
||||||
|
|
|
@ -4,6 +4,7 @@ export TEST_VAR=foo # for eval-okay-getenv.nix
|
||||||
export NIX_REMOTE=dummy://
|
export NIX_REMOTE=dummy://
|
||||||
|
|
||||||
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
|
nix-instantiate --eval -E 'builtins.trace "Hello" 123' 2>&1 | grep -q Hello
|
||||||
|
nix-instantiate --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1
|
||||||
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
|
(! nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" 123' 2>&1 | grep -q Hello)
|
||||||
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
|
nix-instantiate --show-trace --eval -E 'builtins.addErrorContext "Hello" (throw "Foo")' 2>&1 | grep -q Hello
|
||||||
|
|
||||||
|
@ -14,7 +15,7 @@ fail=0
|
||||||
for i in lang/parse-fail-*.nix; do
|
for i in lang/parse-fail-*.nix; do
|
||||||
echo "parsing $i (should fail)";
|
echo "parsing $i (should fail)";
|
||||||
i=$(basename $i .nix)
|
i=$(basename $i .nix)
|
||||||
if nix-instantiate --parse - < lang/$i.nix; then
|
if ! expect 1 nix-instantiate --parse - < lang/$i.nix; then
|
||||||
echo "FAIL: $i shouldn't parse"
|
echo "FAIL: $i shouldn't parse"
|
||||||
fail=1
|
fail=1
|
||||||
fi
|
fi
|
||||||
|
@ -23,7 +24,7 @@ done
|
||||||
for i in lang/parse-okay-*.nix; do
|
for i in lang/parse-okay-*.nix; do
|
||||||
echo "parsing $i (should succeed)";
|
echo "parsing $i (should succeed)";
|
||||||
i=$(basename $i .nix)
|
i=$(basename $i .nix)
|
||||||
if ! nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then
|
if ! expect 0 nix-instantiate --parse - < lang/$i.nix > lang/$i.out; then
|
||||||
echo "FAIL: $i should parse"
|
echo "FAIL: $i should parse"
|
||||||
fail=1
|
fail=1
|
||||||
fi
|
fi
|
||||||
|
@ -32,7 +33,7 @@ done
|
||||||
for i in lang/eval-fail-*.nix; do
|
for i in lang/eval-fail-*.nix; do
|
||||||
echo "evaluating $i (should fail)";
|
echo "evaluating $i (should fail)";
|
||||||
i=$(basename $i .nix)
|
i=$(basename $i .nix)
|
||||||
if nix-instantiate --eval lang/$i.nix; then
|
if ! expect 1 nix-instantiate --eval lang/$i.nix; then
|
||||||
echo "FAIL: $i shouldn't evaluate"
|
echo "FAIL: $i shouldn't evaluate"
|
||||||
fail=1
|
fail=1
|
||||||
fi
|
fi
|
||||||
|
@ -47,7 +48,7 @@ for i in lang/eval-okay-*.nix; do
|
||||||
if test -e lang/$i.flags; then
|
if test -e lang/$i.flags; then
|
||||||
flags=$(cat lang/$i.flags)
|
flags=$(cat lang/$i.flags)
|
||||||
fi
|
fi
|
||||||
if ! NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
|
if ! expect 0 env NIX_PATH=lang/dir3:lang/dir4 nix-instantiate $flags --eval --strict lang/$i.nix > lang/$i.out; then
|
||||||
echo "FAIL: $i should evaluate"
|
echo "FAIL: $i should evaluate"
|
||||||
fail=1
|
fail=1
|
||||||
elif ! diff lang/$i.out lang/$i.exp; then
|
elif ! diff lang/$i.out lang/$i.exp; then
|
||||||
|
@ -57,7 +58,7 @@ for i in lang/eval-okay-*.nix; do
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test -e lang/$i.exp.xml; then
|
if test -e lang/$i.exp.xml; then
|
||||||
if ! nix-instantiate --eval --xml --no-location --strict \
|
if ! expect 0 nix-instantiate --eval --xml --no-location --strict \
|
||||||
lang/$i.nix > lang/$i.out.xml; then
|
lang/$i.nix > lang/$i.out.xml; then
|
||||||
echo "FAIL: $i should evaluate"
|
echo "FAIL: $i should evaluate"
|
||||||
fail=1
|
fail=1
|
||||||
|
|
|
@ -1,36 +1,35 @@
|
||||||
# RFC 7159, section 13.
|
|
||||||
builtins.fromJSON
|
builtins.fromJSON
|
||||||
''
|
''
|
||||||
{
|
{
|
||||||
"Image": {
|
"Video": {
|
||||||
"Width": 800,
|
"Title": "The Penguin Chronicles",
|
||||||
"Height": 600,
|
"Width": 1920,
|
||||||
"Title": "View from 15th Floor",
|
"Height": 1080,
|
||||||
"Thumbnail": {
|
"EmbeddedData": [3.14159, 23493,null, true ,false, -10],
|
||||||
"Url": "http://www.example.com/image/481989943",
|
"Thumb": {
|
||||||
"Height": 125,
|
"Url": "http://www.example.com/video/5678931",
|
||||||
"Width": 100
|
"Width": 200,
|
||||||
|
"Height": 250
|
||||||
},
|
},
|
||||||
"Animated" : false,
|
"Subtitle" : false,
|
||||||
"IDs": [116, 943, 234, 38793, true ,false,null, -100],
|
"Latitude": 46.2051,
|
||||||
"Latitude": 37.7668,
|
"Longitude": 6.0723
|
||||||
"Longitude": -122.3959
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
''
|
''
|
||||||
==
|
==
|
||||||
{ Image =
|
{ Video =
|
||||||
{ Width = 800;
|
{ Title = "The Penguin Chronicles";
|
||||||
Height = 600;
|
Width = 1920;
|
||||||
Title = "View from 15th Floor";
|
Height = 1080;
|
||||||
Thumbnail =
|
EmbeddedData = [ 3.14159 23493 null true false (0-10) ];
|
||||||
{ Url = http://www.example.com/image/481989943;
|
Thumb =
|
||||||
Height = 125;
|
{ Url = "http://www.example.com/video/5678931";
|
||||||
Width = 100;
|
Width = 200;
|
||||||
|
Height = 250;
|
||||||
};
|
};
|
||||||
Animated = false;
|
Subtitle = false;
|
||||||
IDs = [ 116 943 234 38793 true false null (0-100) ];
|
Latitude = 46.2051;
|
||||||
Latitude = 37.7668;
|
Longitude = 6.0723;
|
||||||
Longitude = -122.3959;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,4 +80,11 @@ rec {
|
||||||
'';
|
'';
|
||||||
}).a;
|
}).a;
|
||||||
|
|
||||||
|
e = mkDerivation {
|
||||||
|
name = "multiple-outputs-e";
|
||||||
|
outputs = [ "a" "b" "c" ];
|
||||||
|
meta.outputsToInstall = [ "a" "b" ];
|
||||||
|
buildCommand = "mkdir $a $b $c";
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ cat > $flake1Dir/flake.nix <<EOF
|
||||||
outputs = { self }: with import ./config.nix; rec {
|
outputs = { self }: with import ./config.nix; rec {
|
||||||
packages.$system.default = mkDerivation {
|
packages.$system.default = mkDerivation {
|
||||||
name = "profile-test-\${builtins.readFile ./version}";
|
name = "profile-test-\${builtins.readFile ./version}";
|
||||||
|
outputs = [ "out" "man" "dev" ];
|
||||||
builder = builtins.toFile "builder.sh"
|
builder = builtins.toFile "builder.sh"
|
||||||
''
|
''
|
||||||
mkdir -p \$out/bin
|
mkdir -p \$out/bin
|
||||||
|
@ -26,10 +27,13 @@ cat > $flake1Dir/flake.nix <<EOF
|
||||||
EOF
|
EOF
|
||||||
chmod +x \$out/bin/hello
|
chmod +x \$out/bin/hello
|
||||||
echo DONE
|
echo DONE
|
||||||
|
mkdir -p \$man/share/man
|
||||||
|
mkdir -p \$dev/include
|
||||||
'';
|
'';
|
||||||
__contentAddressed = import ./ca.nix;
|
__contentAddressed = import ./ca.nix;
|
||||||
outputHashMode = "recursive";
|
outputHashMode = "recursive";
|
||||||
outputHashAlgo = "sha256";
|
outputHashAlgo = "sha256";
|
||||||
|
meta.outputsToInstall = [ "out" "man" ];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -46,6 +50,8 @@ nix-env -f ./user-envs.nix -i foo-1.0
|
||||||
nix profile list | grep '0 - - .*-foo-1.0'
|
nix profile list | grep '0 - - .*-foo-1.0'
|
||||||
nix profile install $flake1Dir -L
|
nix profile install $flake1Dir -L
|
||||||
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
||||||
|
[ -e $TEST_HOME/.nix-profile/share/man ]
|
||||||
|
(! [ -e $TEST_HOME/.nix-profile/include ])
|
||||||
nix profile history
|
nix profile history
|
||||||
nix profile history | grep "packages.$system.default: ∅ -> 1.0"
|
nix profile history | grep "packages.$system.default: ∅ -> 1.0"
|
||||||
nix profile diff-closures | grep 'env-manifest.nix: ε → ∅'
|
nix profile diff-closures | grep 'env-manifest.nix: ε → ∅'
|
||||||
|
@ -55,7 +61,7 @@ printf NixOS > $flake1Dir/who
|
||||||
printf 2.0 > $flake1Dir/version
|
printf 2.0 > $flake1Dir/version
|
||||||
nix profile upgrade 1
|
nix profile upgrade 1
|
||||||
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]]
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]]
|
||||||
nix profile history | grep "packages.$system.default: 1.0 -> 2.0"
|
nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man"
|
||||||
|
|
||||||
# Test 'history', 'diff-closures'.
|
# Test 'history', 'diff-closures'.
|
||||||
nix profile diff-closures
|
nix profile diff-closures
|
||||||
|
@ -86,7 +92,7 @@ nix profile wipe-history
|
||||||
printf true > $flake1Dir/ca.nix
|
printf true > $flake1Dir/ca.nix
|
||||||
printf 3.0 > $flake1Dir/version
|
printf 3.0 > $flake1Dir/version
|
||||||
nix profile upgrade 0
|
nix profile upgrade 0
|
||||||
nix profile history | grep "packages.$system.default: 1.0 -> 3.0"
|
nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man"
|
||||||
|
|
||||||
# Test new install of CA package.
|
# Test new install of CA package.
|
||||||
nix profile remove 0
|
nix profile remove 0
|
||||||
|
@ -95,3 +101,40 @@ printf Utrecht > $flake1Dir/who
|
||||||
nix profile install $flake1Dir
|
nix profile install $flake1Dir
|
||||||
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
|
||||||
[[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]]
|
[[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]]
|
||||||
|
|
||||||
|
# Override the outputs.
|
||||||
|
nix profile remove 0 1
|
||||||
|
nix profile install "$flake1Dir^*"
|
||||||
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
|
||||||
|
[ -e $TEST_HOME/.nix-profile/share/man ]
|
||||||
|
[ -e $TEST_HOME/.nix-profile/include ]
|
||||||
|
|
||||||
|
printf Nix > $flake1Dir/who
|
||||||
|
nix profile upgrade 0
|
||||||
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]]
|
||||||
|
[ -e $TEST_HOME/.nix-profile/share/man ]
|
||||||
|
[ -e $TEST_HOME/.nix-profile/include ]
|
||||||
|
|
||||||
|
nix profile remove 0
|
||||||
|
nix profile install "$flake1Dir^man"
|
||||||
|
(! [ -e $TEST_HOME/.nix-profile/bin/hello ])
|
||||||
|
[ -e $TEST_HOME/.nix-profile/share/man ]
|
||||||
|
(! [ -e $TEST_HOME/.nix-profile/include ])
|
||||||
|
|
||||||
|
# test priority
|
||||||
|
nix profile remove 0
|
||||||
|
|
||||||
|
# Make another flake.
|
||||||
|
flake2Dir=$TEST_ROOT/flake2
|
||||||
|
printf World > $flake1Dir/who
|
||||||
|
cp -r $flake1Dir $flake2Dir
|
||||||
|
printf World2 > $flake2Dir/who
|
||||||
|
|
||||||
|
nix profile install $flake1Dir
|
||||||
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
||||||
|
nix profile install $flake2Dir --priority 100
|
||||||
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
||||||
|
nix profile install $flake2Dir --priority 0
|
||||||
|
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World2" ]]
|
||||||
|
# nix profile install $flake1Dir --priority 100
|
||||||
|
# [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
|
||||||
|
|
|
@ -9,12 +9,12 @@ echo 'require-sigs = false' >> $NIX_CONF_DIR/nix.conf
|
||||||
|
|
||||||
restartDaemon
|
restartDaemon
|
||||||
|
|
||||||
# Build the dependencies and push them to the remote store
|
# Build the dependencies and push them to the remote store.
|
||||||
nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook $PWD/push-to-store.sh
|
nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook $PWD/push-to-store.sh
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
|
|
||||||
# Ensure that we the remote store contains both the runtime and buildtime
|
# Ensure that the remote store contains both the runtime and build-time
|
||||||
# closure of what we've just built
|
# closure of what we've just built.
|
||||||
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix
|
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix
|
||||||
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix input1_drv
|
nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix input1_drv
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
set -x
|
set -x
|
||||||
|
set -e
|
||||||
|
|
||||||
|
[ -n "$OUT_PATHS" ]
|
||||||
|
[ -n "$DRV_PATH" ]
|
||||||
|
|
||||||
echo Pushing "$OUT_PATHS" to "$REMOTE_STORE"
|
echo Pushing "$OUT_PATHS" to "$REMOTE_STORE"
|
||||||
printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs
|
printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs
|
||||||
|
|
|
@ -3,15 +3,24 @@ with import ./config.nix;
|
||||||
{
|
{
|
||||||
hello = mkDerivation {
|
hello = mkDerivation {
|
||||||
name = "hello";
|
name = "hello";
|
||||||
|
outputs = [ "out" "dev" ];
|
||||||
|
meta.outputsToInstall = [ "out" ];
|
||||||
buildCommand =
|
buildCommand =
|
||||||
''
|
''
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin $dev/bin
|
||||||
|
|
||||||
cat > $out/bin/hello <<EOF
|
cat > $out/bin/hello <<EOF
|
||||||
#! ${shell}
|
#! ${shell}
|
||||||
who=\$1
|
who=\$1
|
||||||
echo "Hello \''${who:-World} from $out/bin/hello"
|
echo "Hello \''${who:-World} from $out/bin/hello"
|
||||||
EOF
|
EOF
|
||||||
chmod +x $out/bin/hello
|
chmod +x $out/bin/hello
|
||||||
|
|
||||||
|
cat > $dev/bin/hello2 <<EOF
|
||||||
|
#! ${shell}
|
||||||
|
echo "Hello2"
|
||||||
|
EOF
|
||||||
|
chmod +x $dev/bin/hello2
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,10 @@ clearCache
|
||||||
nix shell -f shell-hello.nix hello -c hello | grep 'Hello World'
|
nix shell -f shell-hello.nix hello -c hello | grep 'Hello World'
|
||||||
nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS'
|
nix shell -f shell-hello.nix hello -c hello NixOS | grep 'Hello NixOS'
|
||||||
|
|
||||||
|
# Test output selection.
|
||||||
|
nix shell -f shell-hello.nix hello^dev -c hello2 | grep 'Hello2'
|
||||||
|
nix shell -f shell-hello.nix 'hello^*' -c hello2 | grep 'Hello2'
|
||||||
|
|
||||||
if ! canUseSandbox; then exit 99; fi
|
if ! canUseSandbox; then exit 99; fi
|
||||||
|
|
||||||
chmod -R u+w $TEST_ROOT/store0 || true
|
chmod -R u+w $TEST_ROOT/store0 || true
|
||||||
|
|
Loading…
Reference in a new issue