diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index b04723b95..37966bab2 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v1.2.0 + uses: zeebe-io/backport-action@v1.3.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c06c77043..0f1f6d43f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,10 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v20 + - uses: cachix/install-nix-action@v21 + with: + # The sandbox would otherwise be disabled by default on Darwin + extra_nix_config: "sandbox = true" - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - uses: cachix/cachix-action@v12 if: needs.check_secrets.outputs.cachix == 'true' @@ -58,7 +61,7 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v20 + - uses: cachix/install-nix-action@v21 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - uses: cachix/cachix-action@v12 @@ -79,7 +82,7 @@ jobs: steps: - uses: actions/checkout@v3 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v20 + - uses: cachix/install-nix-action@v21 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -106,7 +109,7 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v20 + - uses: cachix/install-nix-action@v21 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV diff --git a/.gitignore b/.gitignore index 8ceff4ef2..7ae1071d0 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,8 @@ perl/Makefile.config /src/nix/nix +/src/nix/doc + # /src/nix-env/ /src/nix-env/nix-env @@ -85,6 +87,7 @@ perl/Makefile.config /tests/shell.drv /tests/config.nix /tests/ca/config.nix +/tests/dyn-drv/config.nix /tests/repl-result-out # /tests/lang/ diff --git a/.version b/.version index 752490696..d76bd2ba3 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.16.0 +2.17.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1b0ecaf36..57a949906 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -30,6 +30,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). You can use [labels](https://github.com/NixOS/nix/labels) to filter for relevant topics. 2. Search for related issues that cover what you're going to work on. It could help to mention there that you will work on the issue. + Pull requests addressing issues labeled ["idea approved"](https://github.com/NixOS/nix/labels/idea%20approved) are especially welcomed by maintainers and will receive prioritised review. 3. Check the [Nix reference manual](https://nixos.org/manual/nix/unstable/contributing/hacking.html) for information on building Nix and running its tests. diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix index 115bb3f94..71f96153f 100644 --- a/doc/manual/generate-builtins.nix +++ b/doc/manual/generate-builtins.nix @@ -1,8 +1,12 @@ -builtinsDump: +let + inherit (builtins) concatStringsSep attrNames; +in + +builtinsInfo: let showBuiltin = name: let - inherit (builtinsDump.${name}) doc args; + inherit (builtinsInfo.${name}) doc args; in ''
@@ -14,7 +18,7 @@ let ''; - listArgs = args: builtins.concatStringsSep " " (map (s: "${s}") args); + listArgs = args: concatStringsSep " " (map (s: "${s}") args); in -with builtins; concatStringsSep "\n" (map showBuiltin (attrNames builtinsDump)) +concatStringsSep "\n" (map showBuiltin (attrNames builtinsInfo)) diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index d04eecf55..fb34898f3 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -1,10 +1,16 @@ -cliDumpStr: +let + inherit (builtins) + attrNames attrValues fromJSON listToAttrs mapAttrs + concatStringsSep concatMap length lessThan replaceStrings sort; + inherit (import ./utils.nix) concatStrings optionalString filterAttrs trim squash unique showSettings; +in -with builtins; -with import ./utils.nix; +commandDump: let + commandInfo = fromJSON commandDump; + showCommand = { command, details, filename, toplevel }: let @@ -96,7 +102,7 @@ let ${option.description} ''; - categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions))); + categories = sort lessThan (unique (map (cmd: cmd.category) (attrValues allOptions))); in concatStrings (map showCategory categories); in squash result; @@ -117,13 +123,11 @@ let }; in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {}); - cliDump = builtins.fromJSON cliDumpStr; - manpages = processCommand { command = "nix"; - details = cliDump.args; + details = commandInfo.args; filename = "nix"; - toplevel = cliDump.args; + toplevel = commandInfo.args; }; tableOfContents = let @@ -143,6 +147,6 @@ let ${showSettings { useAnchors = false; } settings} ''; - in concatStrings (attrValues (mapAttrs showStore cliDump.stores)); + in concatStrings (attrValues (mapAttrs showStore commandInfo.stores)); in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } diff --git a/doc/manual/local.mk b/doc/manual/local.mk index 63e7e61e4..b4b7283ef 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -11,6 +11,7 @@ man-pages := $(foreach n, \ nix-prefetch-url.1 nix-channel.1 \ nix-hash.1 nix-copy-closure.1 \ nix.conf.5 nix-daemon.8 \ + nix-profiles.5 \ , $(d)/$(n)) # man pages for subcommands @@ -85,6 +86,12 @@ $(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@ @rm $^.tmp +$(d)/nix-profiles.5: $(d)/src/command-ref/files/profiles.md + @printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp + @cat $^ >> $^.tmp + $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@ + @rm $^.tmp + $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/contributing/experimental-feature-descriptions.md @cp $< $@ @$(call process-includes,$@,$@) diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index 69f75d3a0..5cd6fdea2 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -338,6 +338,9 @@ const redirects = { "strings": "#string", "lists": "#list", "attribute-sets": "#attribute-set" + }, + "installation/installing-binary.html": { + "uninstalling": "uninstall.html" } }; diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index f783d5908..69c721b57 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -15,6 +15,7 @@ - [Multi-User Mode](installation/multi-user.md) - [Environment Variables](installation/env-variables.md) - [Upgrading Nix](installation/upgrading.md) + - [Uninstalling Nix](installation/uninstall.md) - [Package Management](package-management/package-management.md) - [Basic Package Management](package-management/basic-package-mgmt.md) - [Profiles](package-management/profiles.md) @@ -91,6 +92,11 @@ {{#include ./command-ref/new-cli/SUMMARY.md}} - [Files](command-ref/files.md) - [nix.conf](command-ref/conf-file.md) + - [Profiles](command-ref/files/profiles.md) + - [manifest.nix](command-ref/files/manifest.nix.md) + - [manifest.json](command-ref/files/manifest.json.md) + - [Channels](command-ref/files/channels.md) + - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture](architecture/architecture.md) - [Glossary](glossary.md) - [Contributing](contributing/contributing.md) @@ -99,6 +105,7 @@ - [CLI guideline](contributing/cli-guideline.md) - [Release Notes](release-notes/release-notes.md) - [Release X.Y (202?-??-??)](release-notes/rl-next.md) + - [Release 2.16 (2023-05-31)](release-notes/rl-2.16.md) - [Release 2.15 (2023-04-11)](release-notes/rl-2.15.md) - [Release 2.14 (2023-02-28)](release-notes/rl-2.14.md) - [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md) diff --git a/doc/manual/src/advanced-topics/diff-hook.md b/doc/manual/src/advanced-topics/diff-hook.md index 4a742c160..207aad3b8 100644 --- a/doc/manual/src/advanced-topics/diff-hook.md +++ b/doc/manual/src/advanced-topics/diff-hook.md @@ -48,13 +48,13 @@ If the build passes and is deterministic, Nix will exit with a status code of 0: ```console -$ nix-build ./deterministic.nix -A stable +$ nix-build ./deterministic.nix --attr stable this derivation will be built: /nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv building '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'... /nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable -$ nix-build ./deterministic.nix -A stable --check +$ nix-build ./deterministic.nix --attr stable --check checking outputs of '/nix/store/z98fasz2jqy9gs0xbvdj939p27jwda38-stable.drv'... /nix/store/yyxlzw3vqaas7wfp04g0b1xg51f2czgq-stable ``` @@ -63,13 +63,13 @@ If the build is not deterministic, Nix will exit with a status code of 1: ```console -$ nix-build ./deterministic.nix -A unstable +$ nix-build ./deterministic.nix --attr unstable this derivation will be built: /nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv building '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... /nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable -$ nix-build ./deterministic.nix -A unstable --check +$ nix-build ./deterministic.nix --attr unstable --check checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may not be deterministic: output '/nix/store/krpqk0l9ib0ibi1d2w52z293zw455cap-unstable' differs @@ -89,7 +89,7 @@ Using `--check` with `--keep-failed` will cause Nix to keep the second build's output in a special, `.check` path: ```console -$ nix-build ./deterministic.nix -A unstable --check --keep-failed +$ nix-build ./deterministic.nix --attr unstable --check --keep-failed checking outputs of '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv'... note: keeping build directory '/tmp/nix-build-unstable.drv-0' error: derivation '/nix/store/cgl13lbj1w368r5z8gywipl1ifli7dhk-unstable.drv' may diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md index fefd10100..73a113d35 100644 --- a/doc/manual/src/advanced-topics/distributed-builds.md +++ b/doc/manual/src/advanced-topics/distributed-builds.md @@ -38,11 +38,9 @@ contains Nix. > **Warning** > -> If you are building via the Nix daemon, it is the Nix daemon user -> account (that is, `root`) that should have SSH access to the remote -> machine. If you can’t or don’t want to configure `root` to be able to -> access to remote machine, you can use a private Nix store instead by -> passing e.g. `--store ~/my-nix`. +> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine. +> +> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine. The list of remote machines can be specified on the command line or in the Nix configuration file. The former is convenient for testing. For diff --git a/doc/manual/src/advanced-topics/post-build-hook.md b/doc/manual/src/advanced-topics/post-build-hook.md index 1479cc3a4..a251dec48 100644 --- a/doc/manual/src/advanced-topics/post-build-hook.md +++ b/doc/manual/src/advanced-topics/post-build-hook.md @@ -90,7 +90,7 @@ Then, restart the `nix-daemon`. Build any derivation, for example: ```console -$ nix-build -E '(import {}).writeText "example" (builtins.toString builtins.currentTime)' +$ nix-build --expr '(import {}).writeText "example" (builtins.toString builtins.currentTime)' this derivation will be built: /nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv building '/nix/store/s4pnfbkalzy5qz57qs6yybna8wylkig6-example.drv'... diff --git a/doc/manual/src/command-ref/env-common.md b/doc/manual/src/command-ref/env-common.md index bf00be84f..b4a9bb2a9 100644 --- a/doc/manual/src/command-ref/env-common.md +++ b/doc/manual/src/command-ref/env-common.md @@ -71,9 +71,12 @@ Most Nix commands interpret the following environment variables: Settings are separated by the newline character. - [`NIX_USER_CONF_FILES`](#env-NIX_USER_CONF_FILES)\ - Overrides the location of the user Nix configuration files to load - from (defaults to the XDG spec locations). The variable is treated - as a list separated by the `:` token. + Overrides the location of the Nix user configuration files to load from. + + The default are the locations according to the [XDG Base Directory Specification]. + See the [XDG Base Directories](#xdg-base-directories) sub-section for details. + + The variable is treated as a list separated by the `:` token. - [`TMPDIR`](#env-TMPDIR)\ Use the specified directory to store temporary files. In particular, @@ -103,15 +106,19 @@ Most Nix commands interpret the following environment variables: 384 MiB. Setting it to a low value reduces memory consumption, but will increase runtime due to the overhead of garbage collection. -## XDG Base Directory +## XDG Base Directories -New Nix commands conform to the [XDG Base Directory Specification], and use the following environment variables to determine locations of various state and configuration files: +Nix follows the [XDG Base Directory Specification]. + +For backwards compatibility, Nix commands will follow the standard only when [`use-xdg-base-directories`] is enabled. +[New Nix commands](@docroot@/command-ref/new-cli/nix.md) (experimental) conform to the standard by default. + +The following environment variables are used to determine locations of various state and configuration files: - [`XDG_CONFIG_HOME`]{#env-XDG_CONFIG_HOME} (default `~/.config`) - [`XDG_STATE_HOME`]{#env-XDG_STATE_HOME} (default `~/.local/state`) - [`XDG_CACHE_HOME`]{#env-XDG_CACHE_HOME} (default `~/.cache`) -Classic Nix commands can also be made to follow this standard using the [`use-xdg-base-directories`] configuration option. [XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html -[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories \ No newline at end of file +[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories diff --git a/doc/manual/src/command-ref/files/channels.md b/doc/manual/src/command-ref/files/channels.md new file mode 100644 index 000000000..7b1f27128 --- /dev/null +++ b/doc/manual/src/command-ref/files/channels.md @@ -0,0 +1,26 @@ +## Channels + +A directory containing symlinks to Nix channels, managed by [`nix-channel`]: + +- `$XDG_STATE_HOME/nix/profiles/channels` for regular users +- `$NIX_STATE_DIR/profiles/per-user/root/channels` for `root` + +[`nix-channel`] uses a [profile](@docroot@/command-ref/files/profiles.md) to store channels. +This profile contains symlinks to the contents of those channels. + +## Subscribed channels + +The list of subscribed channels is stored in + +- `~/.nix-channels` +- `$XDG_STATE_HOME/nix/channels` if [`use-xdg-base-directories`] is set to `true` + +in the following format: + +``` + +... +``` + +[`nix-channel`]: @docroot@/command-ref/nix-channel.md +[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories diff --git a/doc/manual/src/command-ref/files/default-nix-expression.md b/doc/manual/src/command-ref/files/default-nix-expression.md new file mode 100644 index 000000000..620f7035c --- /dev/null +++ b/doc/manual/src/command-ref/files/default-nix-expression.md @@ -0,0 +1,52 @@ +## Default Nix expression + +The source for the default [Nix expressions](@docroot@/language/index.md) used by [`nix-env`]: + +- `~/.nix-defexpr` +- `$XDG_STATE_HOME/nix/defexpr` if [`use-xdg-base-directories`] is set to `true`. + +It is loaded as follows: + +- If the default expression is a file, it is loaded as a Nix expression. +- If the default expression is a directory containing a `default.nix` file, that `default.nix` file is loaded as a Nix expression. +- If the default expression is a directory without a `default.nix` file, then its contents (both files and subdirectories) are loaded as Nix expressions. + The expressions are combined into a single attribute set, each expression under an attribute with the same name as the original file or subdirectory. + Subdirectories without a `default.nix` file are traversed recursively in search of more Nix expressions, but the names of these intermediate directories are not added to the attribute paths of the default Nix expression. + +Then, the resulting expression is interpreted like this: + +- If the expression is an attribute set, it is used as the default Nix expression. +- If the expression is a function, an empty set is passed as argument and the return value is used as the default Nix expression. + + +For example, if the default expression contains two files, `foo.nix` and `bar.nix`, then the default Nix expression will be equivalent to + +```nix +{ + foo = import ~/.nix-defexpr/foo.nix; + bar = import ~/.nix-defexpr/bar.nix; +} +``` + +The file [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) is always ignored. + +The command [`nix-channel`] places a symlink to the user's current [channels profile](@docroot@/command-ref/files/channels.md) in this directory. +This makes all subscribed channels available as attributes in the default expression. + +## User channel link + +A symlink that ensures that [`nix-env`] can find your channels: + +- `~/.nix-defexpr/channels` +- `$XDG_STATE_HOME/defexpr/channels` if [`use-xdg-base-directories`] is set to `true`. + +This symlink points to: + +- `$XDG_STATE_HOME/profiles/channels` for regular users +- `$NIX_STATE_DIR/profiles/per-user/root/channels` for `root` + +In a multi-user installation, you may also have `~/.nix-defexpr/channels_root`, which links to the channels of the root user.[`nix-env`]: ../nix-env.md + +[`nix-env`]: @docroot@/command-ref/nix-env.md +[`nix-channel`]: @docroot@/command-ref/nix-channel.md +[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories diff --git a/doc/manual/src/command-ref/files/manifest.json.md b/doc/manual/src/command-ref/files/manifest.json.md new file mode 100644 index 000000000..bcfe7373d --- /dev/null +++ b/doc/manual/src/command-ref/files/manifest.json.md @@ -0,0 +1,45 @@ +## `manifest.json` + +The manifest file records the provenance of the packages that are installed in a [profile](./profiles.md) managed by [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) (experimental). + +Here is an example of what the file might look like after installing `zoom-us` from Nixpkgs: + +```json +{ + "version": 1, + "elements": [ + { + "active": true, + "attrPath": "legacyPackages.x86_64-linux.zoom-us", + "originalUrl": "flake:nixpkgs", + "storePaths": [ + "/nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927" + ], + "uri": "github:NixOS/nixpkgs/13d0c311e3ae923a00f734b43fd1d35b47d8943a" + }, + … + ] +} +``` + +Each object in the array `elements` denotes an installed package and +has the following fields: + +* `originalUrl`: The [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md) specified by + the user at the time of installation (e.g. `nixpkgs`). This is also + the flake reference that will be used by `nix profile upgrade`. + +* `uri`: The locked flake reference to which `originalUrl` resolved. + +* `attrPath`: The flake output attribute that provided this + package. Note that this is not necessarily the attribute that the + user specified, but the one resulting from applying the default + attribute paths and prefixes; for instance, `hello` might resolve to + `packages.x86_64-linux.hello` and the empty string to + `packages.x86_64-linux.default`. + +* `storePath`: The paths in the Nix store containing the package. + +* `active`: Whether the profile contains symlinks to the files of this + package. If set to false, the package is kept in the Nix store, but + is not "visible" in the profile's symlink tree. diff --git a/doc/manual/src/command-ref/files/manifest.nix.md b/doc/manual/src/command-ref/files/manifest.nix.md new file mode 100644 index 000000000..d7d1b605b --- /dev/null +++ b/doc/manual/src/command-ref/files/manifest.nix.md @@ -0,0 +1,128 @@ +## `manifest.nix` + +The manifest file records the provenance of the packages that are installed in a [profile](./profiles.md) managed by [`nix-env`](@docroot@/command-ref/nix-env.md). + +Here is an example of how this file might look like after installing `hello` from Nixpkgs: + +```nix +[{ + meta = { + available = true; + broken = false; + changelog = + "https://git.savannah.gnu.org/cgit/hello.git/plain/NEWS?h=v2.12.1"; + description = "A program that produces a familiar, friendly greeting"; + homepage = "https://www.gnu.org/software/hello/manual/"; + insecure = false; + license = { + deprecated = false; + free = true; + fullName = "GNU General Public License v3.0 or later"; + redistributable = true; + shortName = "gpl3Plus"; + spdxId = "GPL-3.0-or-later"; + url = "https://spdx.org/licenses/GPL-3.0-or-later.html"; + }; + longDescription = '' + GNU Hello is a program that prints "Hello, world!" when you run it. + It is fully customizable. + ''; + maintainers = [{ + email = "edolstra+nixpkgs@gmail.com"; + github = "edolstra"; + githubId = 1148549; + name = "Eelco Dolstra"; + }]; + name = "hello-2.12.1"; + outputsToInstall = [ "out" ]; + platforms = [ + "i686-cygwin" + "x86_64-cygwin" + "x86_64-darwin" + "i686-darwin" + "aarch64-darwin" + "armv7a-darwin" + "i686-freebsd13" + "x86_64-freebsd13" + "aarch64-genode" + "i686-genode" + "x86_64-genode" + "x86_64-solaris" + "js-ghcjs" + "aarch64-linux" + "armv5tel-linux" + "armv6l-linux" + "armv7a-linux" + "armv7l-linux" + "i686-linux" + "m68k-linux" + "microblaze-linux" + "microblazeel-linux" + "mipsel-linux" + "mips64el-linux" + "powerpc64-linux" + "powerpc64le-linux" + "riscv32-linux" + "riscv64-linux" + "s390-linux" + "s390x-linux" + "x86_64-linux" + "mmix-mmixware" + "aarch64-netbsd" + "armv6l-netbsd" + "armv7a-netbsd" + "armv7l-netbsd" + "i686-netbsd" + "m68k-netbsd" + "mipsel-netbsd" + "powerpc-netbsd" + "riscv32-netbsd" + "riscv64-netbsd" + "x86_64-netbsd" + "aarch64_be-none" + "aarch64-none" + "arm-none" + "armv6l-none" + "avr-none" + "i686-none" + "microblaze-none" + "microblazeel-none" + "msp430-none" + "or1k-none" + "m68k-none" + "powerpc-none" + "powerpcle-none" + "riscv32-none" + "riscv64-none" + "rx-none" + "s390-none" + "s390x-none" + "vc4-none" + "x86_64-none" + "i686-openbsd" + "x86_64-openbsd" + "x86_64-redox" + "wasm64-wasi" + "wasm32-wasi" + "x86_64-windows" + "i686-windows" + ]; + position = + "/nix/store/7niq32w715567hbph0q13m5lqna64c1s-nixos-unstable.tar.gz/nixos-unstable.tar.gz/pkgs/applications/misc/hello/default.nix:34"; + unfree = false; + unsupported = false; + }; + name = "hello-2.12.1"; + out = { + outPath = "/nix/store/260q5867crm1xjs4khgqpl6vr9kywql1-hello-2.12.1"; + }; + outPath = "/nix/store/260q5867crm1xjs4khgqpl6vr9kywql1-hello-2.12.1"; + outputs = [ "out" ]; + system = "x86_64-linux"; + type = "derivation"; +}] +``` + +Each element in this list corresponds to an installed package. +It incorporates some attributes of the original derivation, including `meta`, `name`, `out`, `outPath`, `outputs`, `system`. +This information is used by Nix for querying and updating the package. diff --git a/doc/manual/src/command-ref/files/profiles.md b/doc/manual/src/command-ref/files/profiles.md new file mode 100644 index 000000000..b5c737880 --- /dev/null +++ b/doc/manual/src/command-ref/files/profiles.md @@ -0,0 +1,74 @@ +## Profiles + +A directory that contains links to profiles managed by [`nix-env`] and [`nix profile`]: + +- `$XDG_STATE_HOME/nix/profiles` for regular users +- `$NIX_STATE_DIR/profiles/per-user/root` if the user is `root` + +A profile is a directory of symlinks to files in the Nix store. + +### Filesystem layout + +Profiles are versioned as follows. When using a profile named *path*, *path* is a symlink to *path*`-`*N*`-link`, where *N* is the version of the profile. +In turn, *path*`-`*N*`-link` is a symlink to a path in the Nix store. +For example: + +```console +$ ls -l ~alice/.local/state/nix/profiles/profile* +lrwxrwxrwx 1 alice users 14 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile -> profile-7-link +lrwxrwxrwx 1 alice users 51 Oct 28 16:18 /home/alice/.local/state/nix/profiles/profile-5-link -> /nix/store/q69xad13ghpf7ir87h0b2gd28lafjj1j-profile +lrwxrwxrwx 1 alice users 51 Oct 29 13:20 /home/alice/.local/state/nix/profiles/profile-6-link -> /nix/store/6bvhpysd7vwz7k3b0pndn7ifi5xr32dg-profile +lrwxrwxrwx 1 alice users 51 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile-7-link -> /nix/store/mp0x6xnsg0b8qhswy6riqvimai4gm677-profile +``` + +Each of these symlinks is a root for the Nix garbage collector. + +The contents of the store path corresponding to each version of the +profile is a tree of symlinks to the files of the installed packages, +e.g. + +```console +$ ll -R ~eelco/.local/state/nix/profiles/profile-7-link/ +/home/eelco/.local/state/nix/profiles/profile-7-link/: +total 20 +dr-xr-xr-x 2 root root 4096 Jan 1 1970 bin +-r--r--r-- 2 root root 1402 Jan 1 1970 manifest.nix +dr-xr-xr-x 4 root root 4096 Jan 1 1970 share + +/home/eelco/.local/state/nix/profiles/profile-7-link/bin: +total 20 +lrwxrwxrwx 5 root root 79 Jan 1 1970 chromium -> /nix/store/ijm5k0zqisvkdwjkc77mb9qzb35xfi4m-chromium-86.0.4240.111/bin/chromium +lrwxrwxrwx 7 root root 87 Jan 1 1970 spotify -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/bin/spotify +lrwxrwxrwx 3 root root 79 Jan 1 1970 zoom-us -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/bin/zoom-us + +/home/eelco/.local/state/nix/profiles/profile-7-link/share/applications: +total 12 +lrwxrwxrwx 4 root root 120 Jan 1 1970 chromium-browser.desktop -> /nix/store/4cf803y4vzfm3gyk3vzhzb2327v0kl8a-chromium-unwrapped-86.0.4240.111/share/applications/chromium-browser.desktop +lrwxrwxrwx 7 root root 110 Jan 1 1970 spotify.desktop -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/share/applications/spotify.desktop +lrwxrwxrwx 3 root root 107 Jan 1 1970 us.zoom.Zoom.desktop -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/share/applications/us.zoom.Zoom.desktop + +… +``` + +Each profile version contains a manifest file: +- [`manifest.nix`](@docroot@/command-ref/files/manifest.nix.md) used by [`nix-env`](@docroot@/command-ref/nix-env.md). +- [`manifest.json`](@docroot@/command-ref/files/manifest.json.md) used by [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) (experimental). + +## User profile link + +A symbolic link to the user's current profile: + +- `~/.nix-profile` +- `$XDG_STATE_HOME/nix/profile` if [`use-xdg-base-directories`] is set to `true`. + +By default, this symlink points to: + +- `$XDG_STATE_HOME/nix/profiles/profile` for regular users +- `$NIX_STATE_DIR/profiles/per-user/root/profile` for `root` + +The `PATH` environment variable should include `/bin` subdirectory of the profile link (e.g. `~/.nix-profile/bin`) for the user environment to be visible to the user. +The [installer](@docroot@/installation/installing-binary.md) sets this up by default, unless you enable [`use-xdg-base-directories`]. + +[`nix-env`]: @docroot@/command-ref/nix-env.md +[`nix profile`]: @docroot@/command-ref/new-cli/nix3-profile.md +[`use-xdg-base-directories`]: @docroot@/command-ref/conf-file.md#conf-use-xdg-base-directories diff --git a/doc/manual/src/command-ref/nix-build.md b/doc/manual/src/command-ref/nix-build.md index 44de4cf53..f70bbd7f2 100644 --- a/doc/manual/src/command-ref/nix-build.md +++ b/doc/manual/src/command-ref/nix-build.md @@ -76,7 +76,7 @@ except for `--arg` and `--attr` / `-A` which are passed to `nix-instantiate`. # Examples ```console -$ nix-build '' -A firefox +$ nix-build '' --attr firefox store derivation is /nix/store/qybprl8sz2lc...-firefox-1.5.0.7.drv /nix/store/d18hyl92g30l...-firefox-1.5.0.7 @@ -91,7 +91,7 @@ If a derivation has multiple outputs, `nix-build` will build the default (first) output. You can also build all outputs: ```console -$ nix-build '' -A openssl.all +$ nix-build '' --attr openssl.all ``` This will create a symlink for each output named `result-outputname`. @@ -101,7 +101,7 @@ outputs `out`, `bin` and `man`, `nix-build` will create symlinks specific output: ```console -$ nix-build '' -A openssl.man +$ nix-build '' --attr openssl.man ``` This will create a symlink `result-man`. @@ -109,7 +109,7 @@ This will create a symlink `result-man`. Build a Nix expression given on the command line: ```console -$ nix-build -E 'with import { }; runCommand "foo" { } "echo bar > $out"' +$ nix-build --expr 'with import { }; runCommand "foo" { } "echo bar > $out"' $ cat ./result bar ``` @@ -118,5 +118,5 @@ Build the GNU Hello package from the latest revision of the master branch of Nixpkgs: ```console -$ nix-build https://github.com/NixOS/nixpkgs/archive/master.tar.gz -A hello +$ nix-build https://github.com/NixOS/nixpkgs/archive/master.tar.gz --attr hello ``` diff --git a/doc/manual/src/command-ref/nix-channel.md b/doc/manual/src/command-ref/nix-channel.md index 2027cc98d..a210583ae 100644 --- a/doc/manual/src/command-ref/nix-channel.md +++ b/doc/manual/src/command-ref/nix-channel.md @@ -22,6 +22,9 @@ This command has the following operations: channels. If *name* is omitted, it defaults to the last component of *url*, with the suffixes `-stable` or `-unstable` removed. + A channel URL must point to a directory containing a file `nixexprs.tar.gz`. + At the top level, that tarball must contain a single directory with a `default.nix` file that serves as the channel’s entry point. + - `--remove` *name*\ Removes the channel named *name* from the list of subscribed channels. @@ -49,6 +52,12 @@ The list of subscribed channels is stored in `~/.nix-channels`. {{#include ./env-common.md}} +# Files + +`nix-channel` operates on the following files. + +{{#include ./files/channels.md}} + # Examples To subscribe to the Nixpkgs channel and install the GNU Hello package: @@ -56,45 +65,18 @@ To subscribe to the Nixpkgs channel and install the GNU Hello package: ```console $ nix-channel --add https://nixos.org/channels/nixpkgs-unstable $ nix-channel --update -$ nix-env -iA nixpkgs.hello +$ nix-env --install --attr nixpkgs.hello ``` You can revert channel updates using `--rollback`: ```console -$ nix-instantiate --eval -E '(import {}).lib.version' +$ nix-instantiate --eval --expr '(import {}).lib.version' "14.04.527.0e935f1" $ nix-channel --rollback switching from generation 483 to 482 -$ nix-instantiate --eval -E '(import {}).lib.version' +$ nix-instantiate --eval --expr '(import {}).lib.version' "14.04.526.dbadfad" ``` - -# Files - - - `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`\ - `nix-channel` uses a `nix-env` profile to keep track of previous - versions of the subscribed channels. Every time you run `nix-channel - --update`, a new channel generation (that is, a symlink to the - channel Nix expressions in the Nix store) is created. This enables - `nix-channel --rollback` to revert to previous versions. - - - `~/.nix-defexpr/channels`\ - This is a symlink to - `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/channels`. It ensures that - `nix-env` can find your channels. In a multi-user installation, you - may also have `~/.nix-defexpr/channels_root`, which links to the - channels of the root user. - -# Channel format - -A channel URL should point to a directory containing the following -files: - - - `nixexprs.tar.xz`\ - A tarball containing Nix expressions and files referenced by them - (such as build scripts and patches). At the top level, the tarball - should contain a single directory. That directory must contain a - file `default.nix` that serves as the channel’s “entry point”. diff --git a/doc/manual/src/command-ref/nix-copy-closure.md b/doc/manual/src/command-ref/nix-copy-closure.md index 0801e8246..fbf6828da 100644 --- a/doc/manual/src/command-ref/nix-copy-closure.md +++ b/doc/manual/src/command-ref/nix-copy-closure.md @@ -87,5 +87,5 @@ environment: ```console $ nix-copy-closure --from alice@itchy.labs \ /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4 -$ nix-env -i /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4 +$ nix-env --install /nix/store/0dj0503hjxy5mbwlafv1rsbdiyx1gkdy-subversion-1.4.4 ``` diff --git a/doc/manual/src/command-ref/nix-env.md b/doc/manual/src/command-ref/nix-env.md index 42b5bca77..941723216 100644 --- a/doc/manual/src/command-ref/nix-env.md +++ b/doc/manual/src/command-ref/nix-env.md @@ -49,7 +49,7 @@ These pages can be viewed offline: # Selectors -Several commands, such as `nix-env -q` and `nix-env -i`, take a list of +Several commands, such as `nix-env --query ` and `nix-env --install `, take a list of arguments that specify the packages on which to operate. These are extended regular expressions that must match the entire name of the package. (For details on regular expressions, see **regex**(7).) The match is @@ -83,46 +83,8 @@ match. Here are some examples: # Files - - `~/.nix-defexpr`\ - The source for the default Nix expressions used by the - `--install`, `--upgrade`, and `--query --available` operations to - obtain derivations. The `--file` option may be used to override - this default. +`nix-env` operates on the following files. - If `~/.nix-defexpr` is a file, it is loaded as a Nix expression. If - the expression is a set, it is used as the default Nix expression. - If the expression is a function, an empty set is passed as argument - and the return value is used as the default Nix expression. +{{#include ./files/default-nix-expression.md}} - If `~/.nix-defexpr` is a directory containing a `default.nix` file, - that file is loaded as in the above paragraph. - - If `~/.nix-defexpr` is a directory without a `default.nix` file, - then its contents (both files and subdirectories) are loaded as Nix - expressions. The expressions are combined into a single set, each - expression under an attribute with the same name as the original - file or subdirectory. - - For example, if `~/.nix-defexpr` contains two files, `foo.nix` and - `bar.nix`, then the default Nix expression will essentially be - - ```nix - { - foo = import ~/.nix-defexpr/foo.nix; - bar = import ~/.nix-defexpr/bar.nix; - } - ``` - - The file `manifest.nix` is always ignored. Subdirectories without a - `default.nix` file are traversed recursively in search of more Nix - expressions, but the names of these intermediate directories are not - added to the attribute paths of the default Nix expression. - - The command `nix-channel` places symlinks to the downloaded Nix - expressions from each subscribed channel in this directory. - - - `~/.nix-profile`\ - A symbolic link to the user's current profile. By default, this - symlink points to `prefix/var/nix/profiles/default`. The `PATH` - environment variable should include `~/.nix-profile/bin` for the - user environment to be visible to the user. +{{#include ./files/profiles.md}} diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/src/command-ref/nix-env/delete-generations.md index 6f0af5384..92cb7f0d9 100644 --- a/doc/manual/src/command-ref/nix-env/delete-generations.md +++ b/doc/manual/src/command-ref/nix-env/delete-generations.md @@ -41,6 +41,6 @@ $ nix-env --delete-generations 30d ``` ```console -$ nix-env -p other_profile --delete-generations old +$ nix-env --profile other_profile --delete-generations old ``` diff --git a/doc/manual/src/command-ref/nix-env/install.md b/doc/manual/src/command-ref/nix-env/install.md index d754accfe..ad179cbc7 100644 --- a/doc/manual/src/command-ref/nix-env/install.md +++ b/doc/manual/src/command-ref/nix-env/install.md @@ -36,7 +36,7 @@ a number of possible ways: then the derivation with the highest version will be installed. You can force the installation of multiple derivations with the same - name by being specific about the versions. For instance, `nix-env -i + name by being specific about the versions. For instance, `nix-env --install gcc-3.3.6 gcc-4.1.1` will install both version of GCC (and will probably cause a user environment conflict\!). @@ -44,7 +44,7 @@ a number of possible ways: paths* that select attributes from the top-level Nix expression. This is faster than using derivation names and unambiguous. To find out the attribute paths of available - packages, use `nix-env -qaP`. + packages, use `nix-env --query --available --attr-path `. - If `--from-profile` *path* is given, *args* is a set of names denoting installed store paths in the profile *path*. This is an @@ -87,7 +87,7 @@ a number of possible ways: - `--remove-all` / `-r`\ Remove all previously installed packages first. This is equivalent - to running `nix-env -e '.*'` first, except that everything happens + to running `nix-env --uninstall '.*'` first, except that everything happens in a single transaction. {{#include ./opt-common.md}} @@ -103,9 +103,9 @@ a number of possible ways: To install a package using a specific attribute path from the active Nix expression: ```console -$ nix-env -iA gcc40mips +$ nix-env --install --attr gcc40mips installing `gcc-4.0.2' -$ nix-env -iA xorg.xorgserver +$ nix-env --install --attr xorg.xorgserver installing `xorg-server-1.2.0' ``` @@ -133,32 +133,32 @@ installing `gcc-3.3.2' To install all derivations in the Nix expression `foo.nix`: ```console -$ nix-env -f ~/foo.nix -i '.*' +$ nix-env --file ~/foo.nix --install '.*' ``` To copy the store path with symbolic name `gcc` from another profile: ```console -$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc +$ nix-env --install --from-profile /nix/var/nix/profiles/foo gcc ``` To install a specific [store derivation] (typically created by `nix-instantiate`): ```console -$ nix-env -i /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv +$ nix-env --install /nix/store/fibjb1bfbpm5mrsxc4mh2d8n37sxh91i-gcc-3.4.3.drv ``` To install a specific output path: ```console -$ nix-env -i /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3 +$ nix-env --install /nix/store/y3cgx0xj1p4iv9x0pnnmdhr8iyg741vk-gcc-3.4.3 ``` To install from a Nix expression specified on the command-line: ```console -$ nix-env -f ./foo.nix -i -E \ +$ nix-env --file ./foo.nix --install --expr \ 'f: (f {system = "i686-linux";}).subversionWithJava' ``` @@ -170,7 +170,7 @@ function defined in `./foo.nix`. A dry-run tells you which paths will be downloaded or built from source: ```console -$ nix-env -f '' -iA hello --dry-run +$ nix-env --file '' --install --attr hello --dry-run (dry run; not doing anything) installing ‘hello-2.10’ this path will be fetched (0.04 MiB download, 0.19 MiB unpacked): @@ -182,6 +182,6 @@ To install Firefox from the latest revision in the Nixpkgs/NixOS 14.12 channel: ```console -$ nix-env -f https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz -iA firefox +$ nix-env --file https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz --install --attr firefox ``` diff --git a/doc/manual/src/command-ref/nix-env/query.md b/doc/manual/src/command-ref/nix-env/query.md index 18f0ee210..c9b4d8513 100644 --- a/doc/manual/src/command-ref/nix-env/query.md +++ b/doc/manual/src/command-ref/nix-env/query.md @@ -137,7 +137,7 @@ derivation is shown unless `--no-name` is specified. To show installed packages: ```console -$ nix-env -q +$ nix-env --query bison-1.875c docbook-xml-4.2 firefox-1.0.4 @@ -149,7 +149,7 @@ ORBit2-2.8.3 To show available packages: ```console -$ nix-env -qa +$ nix-env --query --available firefox-1.0.7 GConf-2.4.0.1 MPlayer-1.0pre7 @@ -160,7 +160,7 @@ ORBit2-2.8.3 To show the status of available packages: ```console -$ nix-env -qas +$ nix-env --query --available --status -P- firefox-1.0.7 (not installed but present) --S GConf-2.4.0.1 (not present, but there is a substitute for fast installation) --S MPlayer-1.0pre3 (i.e., this is not the installed MPlayer, even though the version is the same!) @@ -171,14 +171,14 @@ IP- ORBit2-2.8.3 (installed and by definition present) To show available packages in the Nix expression `foo.nix`: ```console -$ nix-env -f ./foo.nix -qa +$ nix-env --file ./foo.nix --query --available foo-1.2.3 ``` To compare installed versions to what’s available: ```console -$ nix-env -qc +$ nix-env --query --compare-versions ... acrobat-reader-7.0 - ? (package is not available at all) autoconf-2.59 = 2.59 (same version) @@ -189,7 +189,7 @@ firefox-1.0.4 < 1.0.7 (a more recent version is available) To show all packages with “`zip`” in the name: ```console -$ nix-env -qa '.*zip.*' +$ nix-env --query --available '.*zip.*' bzip2-1.0.6 gzip-1.6 zip-3.0 @@ -199,7 +199,7 @@ zip-3.0 To show all packages with “`firefox`” or “`chromium`” in the name: ```console -$ nix-env -qa '.*(firefox|chromium).*' +$ nix-env --query --available '.*(firefox|chromium).*' chromium-37.0.2062.94 chromium-beta-38.0.2125.24 firefox-32.0.3 @@ -210,6 +210,6 @@ firefox-with-plugins-13.0.1 To show all packages in the latest revision of the Nixpkgs repository: ```console -$ nix-env -f https://github.com/NixOS/nixpkgs/archive/master.tar.gz -qa +$ nix-env --file https://github.com/NixOS/nixpkgs/archive/master.tar.gz --query --available ``` diff --git a/doc/manual/src/command-ref/nix-env/set-flag.md b/doc/manual/src/command-ref/nix-env/set-flag.md index 63f0a0ff9..e04b22a91 100644 --- a/doc/manual/src/command-ref/nix-env/set-flag.md +++ b/doc/manual/src/command-ref/nix-env/set-flag.md @@ -46,16 +46,16 @@ To prevent the currently installed Firefox from being upgraded: $ nix-env --set-flag keep true firefox ``` -After this, `nix-env -u` will ignore Firefox. +After this, `nix-env --upgrade ` will ignore Firefox. To disable the currently installed Firefox, then install a new Firefox while the old remains part of the profile: ```console -$ nix-env -q +$ nix-env --query firefox-2.0.0.9 (the current one) -$ nix-env --preserve-installed -i firefox-2.0.0.11 +$ nix-env --preserve-installed --install firefox-2.0.0.11 installing `firefox-2.0.0.11' building path(s) `/nix/store/myy0y59q3ig70dgq37jqwg1j0rsapzsl-user-environment' collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox' @@ -65,10 +65,10 @@ collision between `/nix/store/...-firefox-2.0.0.11/bin/firefox' $ nix-env --set-flag active false firefox setting flag on `firefox-2.0.0.9' -$ nix-env --preserve-installed -i firefox-2.0.0.11 +$ nix-env --preserve-installed --install firefox-2.0.0.11 installing `firefox-2.0.0.11' -$ nix-env -q +$ nix-env --query firefox-2.0.0.11 (the enabled one) firefox-2.0.0.9 (the disabled one) ``` diff --git a/doc/manual/src/command-ref/nix-env/set.md b/doc/manual/src/command-ref/nix-env/set.md index c1cf75739..b9950eeab 100644 --- a/doc/manual/src/command-ref/nix-env/set.md +++ b/doc/manual/src/command-ref/nix-env/set.md @@ -25,6 +25,6 @@ The following updates a profile such that its current generation will contain just Firefox: ```console -$ nix-env -p /nix/var/nix/profiles/browser --set firefox +$ nix-env --profile /nix/var/nix/profiles/browser --set firefox ``` diff --git a/doc/manual/src/command-ref/nix-env/switch-generation.md b/doc/manual/src/command-ref/nix-env/switch-generation.md index e550325c4..38cf0534d 100644 --- a/doc/manual/src/command-ref/nix-env/switch-generation.md +++ b/doc/manual/src/command-ref/nix-env/switch-generation.md @@ -27,7 +27,7 @@ Switching will fail if the specified generation does not exist. # Examples ```console -$ nix-env -G 42 +$ nix-env --switch-generation 42 switching from generation 50 to 42 ``` diff --git a/doc/manual/src/command-ref/nix-env/switch-profile.md b/doc/manual/src/command-ref/nix-env/switch-profile.md index b389e4140..5ae2fdced 100644 --- a/doc/manual/src/command-ref/nix-env/switch-profile.md +++ b/doc/manual/src/command-ref/nix-env/switch-profile.md @@ -22,5 +22,5 @@ the symlink `~/.nix-profile` is made to point to *path*. # Examples ```console -$ nix-env -S ~/my-profile +$ nix-env --switch-profile ~/my-profile ``` diff --git a/doc/manual/src/command-ref/nix-env/uninstall.md b/doc/manual/src/command-ref/nix-env/uninstall.md index e9ec8a15e..734cc7675 100644 --- a/doc/manual/src/command-ref/nix-env/uninstall.md +++ b/doc/manual/src/command-ref/nix-env/uninstall.md @@ -24,5 +24,5 @@ designated by the symbolic names *drvnames* are removed. ```console $ nix-env --uninstall gcc -$ nix-env -e '.*' (remove everything) +$ nix-env --uninstall '.*' (remove everything) ``` diff --git a/doc/manual/src/command-ref/nix-env/upgrade.md b/doc/manual/src/command-ref/nix-env/upgrade.md index f88ffcbee..322dfbda2 100644 --- a/doc/manual/src/command-ref/nix-env/upgrade.md +++ b/doc/manual/src/command-ref/nix-env/upgrade.md @@ -76,21 +76,21 @@ version is installed. # Examples ```console -$ nix-env --upgrade -A nixpkgs.gcc +$ nix-env --upgrade --attr nixpkgs.gcc upgrading `gcc-3.3.1' to `gcc-3.4' ``` When there are no updates available, nothing will happen: ```console -$ nix-env --upgrade -A nixpkgs.pan +$ nix-env --upgrade --attr nixpkgs.pan ``` Using `-A` is preferred when possible, as it is faster and unambiguous but it is also possible to upgrade to a specific version by matching the derivation name: ```console -$ nix-env -u gcc-3.3.2 --always +$ nix-env --upgrade gcc-3.3.2 --always upgrading `gcc-3.4' to `gcc-3.3.2' ``` @@ -98,7 +98,7 @@ To try to upgrade everything (matching packages based on the part of the derivation name without version): ```console -$ nix-env -u +$ nix-env --upgrade upgrading `hello-2.1.2' to `hello-2.1.3' upgrading `mozilla-1.2' to `mozilla-1.4' ``` diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index e55fb2afd..e1b4a3e80 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -88,7 +88,7 @@ Instantiate [store derivation]s from a Nix expression, and build them using `nix $ nix-instantiate test.nix (instantiate) /nix/store/cigxbmvy6dzix98dxxh9b6shg7ar5bvs-perl-BerkeleyDB-0.26.drv -$ nix-store -r $(nix-instantiate test.nix) (build) +$ nix-store --realise $(nix-instantiate test.nix) (build) ... /nix/store/qhqk4n8ci095g3sdp93x7rgwyh9rdvgk-perl-BerkeleyDB-0.26 (output path) @@ -100,30 +100,30 @@ dr-xr-xr-x 2 eelco users 4096 1970-01-01 01:00 lib You can also give a Nix expression on the command line: ```console -$ nix-instantiate -E 'with import { }; hello' +$ nix-instantiate --expr 'with import { }; hello' /nix/store/j8s4zyv75a724q38cb0r87rlczaiag4y-hello-2.8.drv ``` This is equivalent to: ```console -$ nix-instantiate '' -A hello +$ nix-instantiate '' --attr hello ``` Parsing and evaluating Nix expressions: ```console -$ nix-instantiate --parse -E '1 + 2' +$ nix-instantiate --parse --expr '1 + 2' 1 + 2 ``` ```console -$ nix-instantiate --eval -E '1 + 2' +$ nix-instantiate --eval --expr '1 + 2' 3 ``` ```console -$ nix-instantiate --eval --xml -E '1 + 2' +$ nix-instantiate --eval --xml --expr '1 + 2' @@ -133,7 +133,7 @@ $ nix-instantiate --eval --xml -E '1 + 2' The difference between non-strict and strict evaluation: ```console -$ nix-instantiate --eval --xml -E 'rec { x = "foo"; y = x; }' +$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }' ... @@ -148,7 +148,7 @@ Note that `y` is left unevaluated (the XML representation doesn’t attempt to show non-normal forms). ```console -$ nix-instantiate --eval --xml --strict -E 'rec { x = "foo"; y = x; }' +$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }' ... diff --git a/doc/manual/src/command-ref/nix-shell.md b/doc/manual/src/command-ref/nix-shell.md index 576e5ba0b..195b72be5 100644 --- a/doc/manual/src/command-ref/nix-shell.md +++ b/doc/manual/src/command-ref/nix-shell.md @@ -89,7 +89,7 @@ All options not listed here are passed to `nix-store - `--packages` / `-p` *packages*…\ Set up an environment in which the specified packages are present. The command line arguments are interpreted as attribute names inside - the Nix Packages collection. Thus, `nix-shell -p libjpeg openjdk` + the Nix Packages collection. Thus, `nix-shell --packages libjpeg openjdk` will start a shell in which the packages denoted by the attribute names `libjpeg` and `openjdk` are present. @@ -118,7 +118,7 @@ To build the dependencies of the package Pan, and start an interactive shell in which to build it: ```console -$ nix-shell '' -A pan +$ nix-shell '' --attr pan [nix-shell]$ eval ${unpackPhase:-unpackPhase} [nix-shell]$ cd $sourceRoot [nix-shell]$ eval ${patchPhase:-patchPhase} @@ -137,7 +137,7 @@ To clear the environment first, and do some additional automatic initialisation of the interactive shell: ```console -$ nix-shell '' -A pan --pure \ +$ nix-shell '' --attr pan --pure \ --command 'export NIX_DEBUG=1; export NIX_CORES=8; return' ``` @@ -146,13 +146,13 @@ Nix expressions can also be given on the command line using the `-E` and packages `sqlite` and `libX11`: ```console -$ nix-shell -E 'with import { }; runCommand "dummy" { buildInputs = [ sqlite xorg.libX11 ]; } ""' +$ nix-shell --expr 'with import { }; runCommand "dummy" { buildInputs = [ sqlite xorg.libX11 ]; } ""' ``` A shorter way to do the same is: ```console -$ nix-shell -p sqlite xorg.libX11 +$ nix-shell --packages sqlite xorg.libX11 [nix-shell]$ echo $NIX_LDFLAGS … -L/nix/store/j1zg5v…-sqlite-3.8.0.2/lib -L/nix/store/0gmcz9…-libX11-1.6.1/lib … ``` @@ -162,7 +162,7 @@ the `buildInputs = [ ... ]` shown above, not only package names. So the following is also legal: ```console -$ nix-shell -p sqlite 'git.override { withManual = false; }' +$ nix-shell --packages sqlite 'git.override { withManual = false; }' ``` The `-p` flag looks up Nixpkgs in the Nix search path. You can override @@ -171,7 +171,7 @@ gives you a shell containing the Pan package from a specific revision of Nixpkgs: ```console -$ nix-shell -p pan -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/8a3eea054838b55aca962c3fbde9c83c102b8bf2.tar.gz +$ nix-shell --packages pan -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/8a3eea054838b55aca962c3fbde9c83c102b8bf2.tar.gz [nix-shell:~]$ pan --version Pan 0.139 @@ -185,7 +185,7 @@ done by starting the script with the following lines: ```bash #! /usr/bin/env nix-shell -#! nix-shell -i real-interpreter -p packages +#! nix-shell -i real-interpreter --packages packages ``` where *real-interpreter* is the “real” script interpreter that will be @@ -202,7 +202,7 @@ For example, here is a Python script that depends on Python and the ```python #! /usr/bin/env nix-shell -#! nix-shell -i python -p python pythonPackages.prettytable +#! nix-shell -i python --packages python pythonPackages.prettytable import prettytable @@ -217,7 +217,7 @@ requires Perl and the `HTML::TokeParser::Simple` and `LWP` packages: ```perl #! /usr/bin/env nix-shell -#! nix-shell -i perl -p perl perlPackages.HTMLTokeParserSimple perlPackages.LWP +#! nix-shell -i perl --packages perl perlPackages.HTMLTokeParserSimple perlPackages.LWP use HTML::TokeParser::Simple; @@ -235,7 +235,7 @@ package like Terraform: ```bash #! /usr/bin/env nix-shell -#! nix-shell -i bash -p "terraform.withPlugins (plugins: [ plugins.openstack ])" +#! nix-shell -i bash --packages "terraform.withPlugins (plugins: [ plugins.openstack ])" terraform apply ``` @@ -251,7 +251,7 @@ branch): ```haskell #! /usr/bin/env nix-shell -#! nix-shell -i runghc -p "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])" +#! nix-shell -i runghc --packages "haskellPackages.ghcWithPackages (ps: [ps.download-curl ps.tagsoup])" #! nix-shell -I nixpkgs=https://github.com/NixOS/nixpkgs/archive/nixos-20.03.tar.gz import Network.Curl.Download diff --git a/doc/manual/src/command-ref/nix-store/dump.md b/doc/manual/src/command-ref/nix-store/dump.md index 62656d599..c2f3c42ef 100644 --- a/doc/manual/src/command-ref/nix-store/dump.md +++ b/doc/manual/src/command-ref/nix-store/dump.md @@ -23,7 +23,7 @@ produce the same NAR archive. For instance, directory entries are always sorted so that the actual on-disk order doesn’t influence the result. This means that the cryptographic hash of a NAR dump of a path is usable as a fingerprint of the contents of the path. Indeed, -the hashes of store paths stored in Nix’s database (see `nix-store -q +the hashes of store paths stored in Nix’s database (see `nix-store --query --hash`) are SHA-256 hashes of the NAR dump of each store path. NAR archives support filenames of unlimited length and 64-bit file diff --git a/doc/manual/src/command-ref/nix-store/export.md b/doc/manual/src/command-ref/nix-store/export.md index aeea38636..1bc46f53b 100644 --- a/doc/manual/src/command-ref/nix-store/export.md +++ b/doc/manual/src/command-ref/nix-store/export.md @@ -31,7 +31,7 @@ To copy a whole closure, do something like: ```console -$ nix-store --export $(nix-store -qR paths) > out +$ nix-store --export $(nix-store --query --requisites paths) > out ``` To import the whole closure again, run: diff --git a/doc/manual/src/command-ref/nix-store/opt-common.md b/doc/manual/src/command-ref/nix-store/opt-common.md index bf6566555..dd9a6bf21 100644 --- a/doc/manual/src/command-ref/nix-store/opt-common.md +++ b/doc/manual/src/command-ref/nix-store/opt-common.md @@ -11,7 +11,7 @@ The following options are allowed for all `nix-store` operations, but may not al be created in `/nix/var/nix/gcroots/auto/`. For instance, ```console - $ nix-store --add-root /home/eelco/bla/result -r ... + $ nix-store --add-root /home/eelco/bla/result --realise ... $ ls -l /nix/var/nix/gcroots/auto lrwxrwxrwx 1 ... 2005-03-13 21:10 dn54lcypm8f8... -> /home/eelco/bla/result diff --git a/doc/manual/src/command-ref/nix-store/query.md b/doc/manual/src/command-ref/nix-store/query.md index 9f7dbd3e8..cd45a4932 100644 --- a/doc/manual/src/command-ref/nix-store/query.md +++ b/doc/manual/src/command-ref/nix-store/query.md @@ -145,7 +145,7 @@ Print the closure (runtime dependencies) of the `svn` program in the current user environment: ```console -$ nix-store -qR $(which svn) +$ nix-store --query --requisites $(which svn) /nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4 /nix/store/9lz9yc6zgmc0vlqmn2ipcpkjlmbi51vv-glibc-2.3.4 ... @@ -154,7 +154,7 @@ $ nix-store -qR $(which svn) Print the build-time dependencies of `svn`: ```console -$ nix-store -qR $(nix-store -qd $(which svn)) +$ nix-store --query --requisites $(nix-store --query --deriver $(which svn)) /nix/store/02iizgn86m42q905rddvg4ja975bk2i4-grep-2.5.1.tar.bz2.drv /nix/store/07a2bzxmzwz5hp58nf03pahrv2ygwgs3-gcc-wrapper.sh /nix/store/0ma7c9wsbaxahwwl04gbw3fcd806ski4-glibc-2.3.4.drv @@ -168,7 +168,7 @@ the derivation (`-qd`), not the closure of the output path that contains Show the build-time dependencies as a tree: ```console -$ nix-store -q --tree $(nix-store -qd $(which svn)) +$ nix-store --query --tree $(nix-store --query --deriver $(which svn)) /nix/store/7i5082kfb6yjbqdbiwdhhza0am2xvh6c-subversion-1.1.4.drv +---/nix/store/d8afh10z72n8l1cr5w42366abiblgn54-builder.sh +---/nix/store/fmzxmpjx2lh849ph0l36snfj9zdibw67-bash-3.0.drv @@ -180,7 +180,7 @@ $ nix-store -q --tree $(nix-store -qd $(which svn)) Show all paths that depend on the same OpenSSL library as `svn`: ```console -$ nix-store -q --referrers $(nix-store -q --binding openssl $(nix-store -qd $(which svn))) +$ nix-store --query --referrers $(nix-store --query --binding openssl $(nix-store --query --deriver $(which svn))) /nix/store/23ny9l9wixx21632y2wi4p585qhva1q8-sylpheed-1.0.0 /nix/store/5mbglq5ldqld8sj57273aljwkfvj22mc-subversion-1.1.4 /nix/store/dpmvp969yhdqs7lm2r1a3gng7pyq6vy4-subversion-1.1.3 @@ -191,7 +191,7 @@ Show all paths that directly or indirectly depend on the Glibc (C library) used by `svn`: ```console -$ nix-store -q --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}') +$ nix-store --query --referrers-closure $(ldd $(which svn) | grep /libc.so | awk '{print $3}') /nix/store/034a6h4vpz9kds5r6kzb9lhh81mscw43-libgnomeprintui-2.8.2 /nix/store/15l3yi0d45prm7a82pcrknxdh6nzmxza-gawk-3.1.4 ... @@ -204,7 +204,7 @@ Make a picture of the runtime dependency graph of the current user environment: ```console -$ nix-store -q --graph ~/.nix-profile | dot -Tps > graph.ps +$ nix-store --query --graph ~/.nix-profile | dot -Tps > graph.ps $ gv graph.ps ``` @@ -212,7 +212,7 @@ Show every garbage collector root that points to a store path that depends on `svn`: ```console -$ nix-store -q --roots $(which svn) +$ nix-store --query --roots $(which svn) /nix/var/nix/profiles/default-81-link /nix/var/nix/profiles/default-82-link /home/eelco/.local/state/nix/profiles/profile-97-link diff --git a/doc/manual/src/command-ref/nix-store/read-log.md b/doc/manual/src/command-ref/nix-store/read-log.md index 4a88e9382..d1ff17891 100644 --- a/doc/manual/src/command-ref/nix-store/read-log.md +++ b/doc/manual/src/command-ref/nix-store/read-log.md @@ -27,7 +27,7 @@ substitute, then the log is unavailable. # Example ```console -$ nix-store -l $(which ktorrent) +$ nix-store --read-log $(which ktorrent) building /nix/store/dhc73pvzpnzxhdgpimsd9sw39di66ph1-ktorrent-2.2.1 unpacking sources unpacking source archive /nix/store/p8n1jpqs27mgkjw07pb5269717nzf5f8-ktorrent-2.2.1.tar.gz diff --git a/doc/manual/src/command-ref/nix-store/realise.md b/doc/manual/src/command-ref/nix-store/realise.md index f61a20100..6b50d2145 100644 --- a/doc/manual/src/command-ref/nix-store/realise.md +++ b/doc/manual/src/command-ref/nix-store/realise.md @@ -99,7 +99,7 @@ This operation is typically used to build [store derivation]s produced by [store derivation]: @docroot@/glossary.md#gloss-store-derivation ```console -$ nix-store -r $(nix-instantiate ./test.nix) +$ nix-store --realise $(nix-instantiate ./test.nix) /nix/store/31axcgrlbfsxzmfff1gyj1bf62hvkby2-aterm-2.3.1 ``` @@ -108,7 +108,7 @@ This is essentially what [`nix-build`](@docroot@/command-ref/nix-build.md) does. To test whether a previously-built derivation is deterministic: ```console -$ nix-build '' -A hello --check -K +$ nix-build '' --attr hello --check -K ``` Use [`nix-store --read-log`](./read-log.md) to show the stderr and stdout of a build: diff --git a/doc/manual/src/command-ref/nix-store/verify-path.md b/doc/manual/src/command-ref/nix-store/verify-path.md index 59ffe92a3..927201599 100644 --- a/doc/manual/src/command-ref/nix-store/verify-path.md +++ b/doc/manual/src/command-ref/nix-store/verify-path.md @@ -24,6 +24,6 @@ path has changed, and 1 otherwise. To verify the integrity of the `svn` command and all its dependencies: ```console -$ nix-store --verify-path $(nix-store -qR $(which svn)) +$ nix-store --verify-path $(nix-store --query --requisites $(which svn)) ``` diff --git a/doc/manual/src/command-ref/opt-common.md b/doc/manual/src/command-ref/opt-common.md index 7a012250d..54c0a1d0d 100644 --- a/doc/manual/src/command-ref/opt-common.md +++ b/doc/manual/src/command-ref/opt-common.md @@ -162,11 +162,11 @@ Most Nix commands accept the following command-line options: }: ... ``` - So if you call this Nix expression (e.g., when you do `nix-env -iA + So if you call this Nix expression (e.g., when you do `nix-env --install --attr pkgname`), the function will be called automatically using the value [`builtins.currentSystem`](@docroot@/language/builtins.md) for the `system` argument. You can override this using `--arg`, e.g., - `nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that + `nix-env --install --attr pkgname --arg system \"i686-freebsd\"`. (Note that since the argument is a Nix string literal, you have to escape the quotes.) @@ -199,7 +199,7 @@ Most Nix commands accept the following command-line options: For `nix-shell`, this option is commonly used to give you a shell in which you can build the packages returned by the expression. If you want to get a shell which contain the *built* packages ready for - use, give your expression to the `nix-shell -p` convenience flag + use, give your expression to the `nix-shell --packages ` convenience flag instead. - [`-I`](#opt-I) *path*\ diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index ca69f076a..b954a2167 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -77,7 +77,7 @@ $ nix-shell To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix-shell -A devShells.x86_64-linux.native-clang11StdenvPackages +$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages ``` > **Note** @@ -139,7 +139,7 @@ $ nix build .#packages.aarch64-linux.default for flake-enabled Nix, or ```console -$ nix-build -A packages.aarch64-linux.default +$ nix-build --attr packages.aarch64-linux.default ``` for classic Nix. @@ -166,7 +166,7 @@ $ nix build .#nix-ccacheStdenv for flake-enabled Nix, or ```console -$ nix-build -A nix-ccacheStdenv +$ nix-build --attr nix-ccacheStdenv ``` for classic Nix. diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index eeb19ad50..e142bd415 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -101,11 +101,8 @@ derivation. - [output-addressed store object]{#gloss-output-addressed-store-object}\ - A store object whose store path hashes its content. This - includes derivations, the outputs of - [content-addressed derivations](#gloss-content-addressed-derivation), - and the outputs of - [fixed-output derivations](#gloss-fixed-output-derivation). + A [store object] whose [store path] is determined by its contents. + This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). - [substitute]{#gloss-substitute}\ A substitute is a command invocation stored in the [Nix database] that @@ -163,7 +160,7 @@ build-time dependencies, while the closure of its output path is equivalent to its runtime dependencies. For correct deployment it is necessary to deploy whole closures, since otherwise at runtime - files could be missing. The command `nix-store -qR` prints out + files could be missing. The command `nix-store --query --requisites ` prints out closures of store paths. As an example, if the [store object] at path `P` contains a [reference] diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index 525654d35..ffabb250a 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -47,12 +47,6 @@ The install script will modify the first writable file from amongst `NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing the install script to disable this behaviour. -You can uninstall Nix simply by running: - -```console -$ rm -rf /nix -``` - # Multi User Installation The multi-user Nix installation creates system users, and a system @@ -84,155 +78,8 @@ The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. The installer will first back up these files with a `.backup-before-nix` extension. The installer will also create `/etc/profile.d/nix.sh`. -## Uninstalling - -### Linux - -If you are on Linux with systemd: - -1. Remove the Nix daemon service: - - ```console - sudo systemctl stop nix-daemon.service - sudo systemctl disable nix-daemon.socket nix-daemon.service - sudo systemctl daemon-reload - ``` - -1. Remove systemd service files: - - ```console - sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket - ``` - -1. The installer script uses systemd-tmpfiles to create the socket directory. - You may also want to remove the configuration for that: - - ```console - sudo rm /etc/tmpfiles.d/nix-daemon.conf - ``` - -Remove files created by Nix: - -```console -sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels -``` - -Remove build users and their group: - -```console -for i in $(seq 1 32); do - sudo userdel nixbld$i -done -sudo groupdel nixbld -``` - -There may also be references to Nix in - -- `/etc/profile` -- `/etc/bashrc` -- `/etc/zshrc` - -which you may remove. - -### macOS - -1. Edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing - `nix-daemon.sh`, which should look like this: - - ```bash - # Nix - if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then - . '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' - fi - # End Nix - ``` - - If these files haven't been altered since installing Nix you can simply put - the backups back in place: - - ```console - sudo mv /etc/zshrc.backup-before-nix /etc/zshrc - sudo mv /etc/bashrc.backup-before-nix /etc/bashrc - sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc - ``` - - This will stop shells from sourcing the file and bringing everything you - installed using Nix in scope. - -2. Stop and remove the Nix daemon services: - - ```console - sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist - sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist - sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist - sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist - ``` - - This stops the Nix daemon and prevents it from being started next time you - boot the system. - -3. Remove the `nixbld` group and the `_nixbuildN` users: - - ```console - sudo dscl . -delete /Groups/nixbld - for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done - ``` - - This will remove all the build users that no longer serve a purpose. - -4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store - volume on `/nix`, which looks like - `UUID= /nix apfs rw,noauto,nobrowse,suid,owners` or - `LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic - mounting of the Nix Store volume. - -5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only - line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`. - This will prevent the creation of the empty `/nix` directory to provide a - mountpoint for the Nix Store volume. - -6. Remove the files Nix added to your system: - - ```console - sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels - ``` - - This gets rid of any data Nix may have created except for the store which is - removed next. - -7. Remove the Nix Store volume: - - ```console - sudo diskutil apfs deleteVolume /nix - ``` - - This will remove the Nix Store volume and everything that was added to the - store. - - If the output indicates that the command couldn't remove the volume, you should - make sure you don't have an _unmounted_ Nix Store volume. Look for a - "Nix Store" volume in the output of the following command: - - ```console - diskutil list - ``` - - If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil - deleteVolume command, but replace `/nix` with the store volume's `diskXsY` - identifier. - -> **Note** -> -> After you complete the steps here, you will still have an empty `/nix` -> directory. This is an expected sign of a successful uninstall. The empty -> `/nix` directory will disappear the next time you reboot. -> -> You do not have to reboot to finish uninstalling Nix. The uninstall is -> complete. macOS (Catalina+) directly controls root directories and its -> read-only root will prevent you from manually deleting the empty `/nix` -> mountpoint. - # macOS Installation + []{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} @@ -281,19 +128,16 @@ this to run the installer, but it may help if you run into trouble: # Installing a pinned Nix version from a URL -NixOS.org hosts version-specific installation URLs for all Nix versions -since 1.11.16, at `https://releases.nixos.org/nix/nix-version/install`. +Version-specific installation URLs for all Nix versions +since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). +The corresponding SHA-256 hash can be found in the directory for the given version. -These install scripts can be used the same as the main NixOS.org -installation script: +These install scripts can be used the same as usual: ```console -$ curl -L https://nixos.org/nix/install | sh +$ curl -L https://releases.nixos.org/nix/nix-/install | sh ``` -In the same directory of the install script are sha256 sums, and gpg -signature files. - # Installing from a binary tarball You can also download a binary tarball that contains Nix and all its diff --git a/doc/manual/src/installation/uninstall.md b/doc/manual/src/installation/uninstall.md new file mode 100644 index 000000000..9ead5e53c --- /dev/null +++ b/doc/manual/src/installation/uninstall.md @@ -0,0 +1,148 @@ +# Uninstalling Nix + +## Single User + +If you have a [single-user installation](./installing-binary.md#single-user-installation) of Nix, uninstall it by running: + +```console +$ rm -rf /nix +``` + +## Multi User + +Removing a [multi-user installation](./installing-binary.md#multi-user-installation) of Nix is more involved, and depends on the operating system. + +### Linux + +If you are on Linux with systemd: + +1. Remove the Nix daemon service: + + ```console + sudo systemctl stop nix-daemon.service + sudo systemctl disable nix-daemon.socket nix-daemon.service + sudo systemctl daemon-reload + ``` + +Remove files created by Nix: + +```console +sudo rm -rf /etc/nix /etc/profile.d/nix.sh /etc/tmpfiles.d/nix-daemon.conf /nix ~root/.nix-channels ~root/.nix-defexpr ~root/.nix-profile +``` + +Remove build users and their group: + +```console +for i in $(seq 1 32); do + sudo userdel nixbld$i +done +sudo groupdel nixbld +``` + +There may also be references to Nix in + +- `/etc/bash.bashrc` +- `/etc/bashrc` +- `/etc/profile` +- `/etc/zsh/zshrc` +- `/etc/zshrc` + +which you may remove. + +### macOS + +1. Edit `/etc/zshrc`, `/etc/bashrc`, and `/etc/bash.bashrc` to remove the lines sourcing `nix-daemon.sh`, which should look like this: + + ```bash + # Nix + if [ -e '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' ]; then + . '/nix/var/nix/profiles/default/etc/profile.d/nix-daemon.sh' + fi + # End Nix + ``` + + If these files haven't been altered since installing Nix you can simply put + the backups back in place: + + ```console + sudo mv /etc/zshrc.backup-before-nix /etc/zshrc + sudo mv /etc/bashrc.backup-before-nix /etc/bashrc + sudo mv /etc/bash.bashrc.backup-before-nix /etc/bash.bashrc + ``` + + This will stop shells from sourcing the file and bringing everything you + installed using Nix in scope. + +2. Stop and remove the Nix daemon services: + + ```console + sudo launchctl unload /Library/LaunchDaemons/org.nixos.nix-daemon.plist + sudo rm /Library/LaunchDaemons/org.nixos.nix-daemon.plist + sudo launchctl unload /Library/LaunchDaemons/org.nixos.darwin-store.plist + sudo rm /Library/LaunchDaemons/org.nixos.darwin-store.plist + ``` + + This stops the Nix daemon and prevents it from being started next time you + boot the system. + +3. Remove the `nixbld` group and the `_nixbuildN` users: + + ```console + sudo dscl . -delete /Groups/nixbld + for u in $(sudo dscl . -list /Users | grep _nixbld); do sudo dscl . -delete /Users/$u; done + ``` + + This will remove all the build users that no longer serve a purpose. + +4. Edit fstab using `sudo vifs` to remove the line mounting the Nix Store + volume on `/nix`, which looks like + `UUID= /nix apfs rw,noauto,nobrowse,suid,owners` or + `LABEL=Nix\040Store /nix apfs rw,nobrowse`. This will prevent automatic + mounting of the Nix Store volume. + +5. Edit `/etc/synthetic.conf` to remove the `nix` line. If this is the only + line in the file you can remove it entirely, `sudo rm /etc/synthetic.conf`. + This will prevent the creation of the empty `/nix` directory to provide a + mountpoint for the Nix Store volume. + +6. Remove the files Nix added to your system: + + ```console + sudo rm -rf /etc/nix /var/root/.nix-profile /var/root/.nix-defexpr /var/root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels + ``` + + This gets rid of any data Nix may have created except for the store which is + removed next. + +7. Remove the Nix Store volume: + + ```console + sudo diskutil apfs deleteVolume /nix + ``` + + This will remove the Nix Store volume and everything that was added to the + store. + + If the output indicates that the command couldn't remove the volume, you should + make sure you don't have an _unmounted_ Nix Store volume. Look for a + "Nix Store" volume in the output of the following command: + + ```console + diskutil list + ``` + + If you _do_ see a "Nix Store" volume, delete it by re-running the diskutil + deleteVolume command, but replace `/nix` with the store volume's `diskXsY` + identifier. + +> **Note** +> +> After you complete the steps here, you will still have an empty `/nix` +> directory. This is an expected sign of a successful uninstall. The empty +> `/nix` directory will disappear the next time you reboot. +> +> You do not have to reboot to finish uninstalling Nix. The uninstall is +> complete. macOS (Catalina+) directly controls root directories and its +> read-only root will prevent you from manually deleting the empty `/nix` +> mountpoint. + diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 24efc4681..6d09f54d8 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -2,13 +2,13 @@ Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c 'nix-channel --update && -nix-env -iA nixpkgs.nix && +nix-env --install --attr nixpkgs.nix && launchctl remove org.nixos.nix-daemon && launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'` Single-user installations of Nix should run this: `nix-channel --update; -nix-env -iA nixpkgs.nix nixpkgs.cacert` +nix-env --install --attr nixpkgs.nix nixpkgs.cacert` Multi-user Nix users on Linux should run this with sudo: `nix-channel ---update; nix-env -iA nixpkgs.nix nixpkgs.cacert; systemctl +--update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl daemon-reload; systemctl restart nix-daemon` diff --git a/doc/manual/src/introduction.md b/doc/manual/src/introduction.md index b54346db8..76489bc1b 100644 --- a/doc/manual/src/introduction.md +++ b/doc/manual/src/introduction.md @@ -76,7 +76,7 @@ there after an upgrade. This means that you can _roll back_ to the old version: ```console -$ nix-env --upgrade -A nixpkgs.some-package +$ nix-env --upgrade --attr nixpkgs.some-package $ nix-env --rollback ``` @@ -122,7 +122,7 @@ Nix expressions generally describe how to build a package from source, so an installation action like ```console -$ nix-env --install -A nixpkgs.firefox +$ nix-env --install --attr nixpkgs.firefox ``` _could_ cause quite a bit of build activity, as not only Firefox but @@ -158,7 +158,7 @@ Pan newsreader, as described by [its Nix expression](https://github.com/NixOS/nixpkgs/blob/master/pkgs/applications/networking/newsreaders/pan/default.nix): ```console -$ nix-shell '' -A pan +$ nix-shell '' --attr pan ``` You’re then dropped into a shell where you can edit, build and test diff --git a/doc/manual/src/language/builtin-constants.md b/doc/manual/src/language/builtin-constants.md index 78d066a82..e6bc7e915 100644 --- a/doc/manual/src/language/builtin-constants.md +++ b/doc/manual/src/language/builtin-constants.md @@ -1,20 +1,43 @@ # Built-in Constants -Here are the constants built into the Nix expression evaluator: +These constants are built into the Nix language evaluator: - - `builtins`\ - The set `builtins` contains all the built-in functions and values. - You can use `builtins` to test for the availability of features in - the Nix installation, e.g., - - ```nix - if builtins ? getEnv then builtins.getEnv "PATH" else "" - ``` - - This allows a Nix expression to fall back gracefully on older Nix - installations that don’t have the desired built-in function. +- [`builtins`]{#builtins-builtins} (attribute set) - - [`builtins.currentSystem`]{#builtins-currentSystem}\ - The built-in value `currentSystem` evaluates to the Nix platform - identifier for the Nix installation on which the expression is being - evaluated, such as `"i686-linux"` or `"x86_64-darwin"`. + Contains all the [built-in functions](./builtins.md) and values, in order to avoid polluting the global scope. + + Since built-in functions were added over time, [testing for attributes](./operators.md#has-attribute) in `builtins` can be used for graceful fallback on older Nix installations: + + ```nix + if builtins ? getEnv then builtins.getEnv "PATH" else "" + ``` + +- [`builtins.currentSystem`]{#builtins-currentSystem} (string) + + The built-in value `currentSystem` evaluates to the Nix platform + identifier for the Nix installation on which the expression is being + evaluated, such as `"i686-linux"` or `"x86_64-darwin"`. + + Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval). + +- [`builtins.currentTime`]{#builtins-currentTime} (integer) + + Return the [Unix time](https://en.wikipedia.org/wiki/Unix_time) at first evaluation. + Repeated references to that name will re-use the initially obtained value. + + Example: + + ```console + $ nix repl + Welcome to Nix 2.15.1 Type :? for help. + + nix-repl> builtins.currentTime + 1683705525 + + nix-repl> builtins.currentTime + 1683705525 + ``` + + The [store path](@docroot@/glossary.md#gloss-store-path) of a derivation depending on `currentTime` will differ for each evaluation. + + Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval). diff --git a/doc/manual/src/language/builtins-prefix.md b/doc/manual/src/language/builtins-prefix.md index c631a8453..35e3dccc3 100644 --- a/doc/manual/src/language/builtins-prefix.md +++ b/doc/manual/src/language/builtins-prefix.md @@ -1,16 +1,16 @@ # Built-in Functions -This section lists the functions built into the Nix expression -evaluator. (The built-in function `derivation` is discussed above.) -Some built-ins, such as `derivation`, are always in scope of every Nix -expression; you can just access them right away. But to prevent -polluting the namespace too much, most built-ins are not in -scope. Instead, you can access them through the `builtins` built-in -value, which is a set that contains all built-in functions and values. -For instance, `derivation` is also available as `builtins.derivation`. +This section lists the functions built into the Nix language evaluator. +All built-in functions are available through the global [`builtins`](./builtin-constants.md#builtins-builtins) constant. + +For convenience, some built-ins are can be accessed directly: + +- [`derivation`](#builtins-derivation) +- [`import`](#builtins-import) +- [`abort`](#builtins-abort) +- [`throw`](#builtins-throw)
-
derivation attrs; - builtins.derivation attrs
+
derivation attrs

derivation is described in its own section.

diff --git a/doc/manual/src/language/constructs.md b/doc/manual/src/language/constructs.md index 1c01f2cc7..c53eb8889 100644 --- a/doc/manual/src/language/constructs.md +++ b/doc/manual/src/language/constructs.md @@ -2,8 +2,11 @@ ## Recursive sets -Recursive sets are just normal sets, but the attributes can refer to -each other. For example, +Recursive sets are like normal [attribute sets](./values.md#attribute-set), but the attributes can refer to each other. + +> *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}` + +Example: ```nix rec { @@ -12,7 +15,9 @@ rec { }.x ``` -evaluates to `123`. Note that without `rec` the binding `x = y;` would +This evaluates to `123`. + +Note that without `rec` the binding `x = y;` would refer to the variable `y` in the surrounding scope, if one exists, and would be invalid if no such variable exists. That is, in a normal (non-recursive) set, attributes are not added to the lexical scope; in a @@ -33,7 +38,10 @@ will crash with an `infinite recursion encountered` error message. ## Let-expressions A let-expression allows you to define local variables for an expression. -For instance, + +> *let-in* = `let` [ *identifier* = *expr* ]... `in` *expr* + +Example: ```nix let @@ -42,18 +50,19 @@ let in x + y ``` -evaluates to `"foobar"`. +This evaluates to `"foobar"`. ## Inheriting attributes -When defining a set or in a let-expression it is often convenient to -copy variables from the surrounding lexical scope (e.g., when you want -to propagate attributes). This can be shortened using the `inherit` -keyword. For instance, +When defining an [attribute set](./values.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). +This can be shortened using the `inherit` keyword. + +Example: ```nix let x = 123; in -{ inherit x; +{ + inherit x; y = 456; } ``` @@ -62,15 +71,23 @@ is equivalent to ```nix let x = 123; in -{ x = x; +{ + x = x; y = 456; } ``` -and both evaluate to `{ x = 123; y = 456; }`. (Note that this works -because `x` is added to the lexical scope by the `let` construct.) It is -also possible to inherit attributes from another set. For instance, in -this fragment from `all-packages.nix`, +and both evaluate to `{ x = 123; y = 456; }`. + +> **Note** +> +> This works because `x` is added to the lexical scope by the `let` construct. + +It is also possible to inherit attributes from another attribute set. + +Example: + +In this fragment from `all-packages.nix`, ```nix graphviz = (import ../tools/graphics/graphviz) { diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md index a07d976ad..f8382ae19 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/src/language/operators.md @@ -35,17 +35,14 @@ ## Attribute selection +> *attrset* `.` *attrpath* \[ `or` *expr* \] + Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*. -If the attribute doesn’t exist, return *value* if provided, otherwise abort evaluation. +If the attribute doesn’t exist, return the *expr* after `or` if provided, otherwise abort evaluation. - +An attribute path is a dot-separated list of [attribute names](./values.md#attribute-set). -An attribute path is a dot-separated list of attribute names. -An attribute name can be an identifier or a string. - -> *attrpath* = *name* [ `.` *name* ]... \ -> *name* = *identifier* | *string* \ -> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*` +> *attrpath* = *name* [ `.` *name* ]... [Attribute selection]: #attribute-selection diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index c85124278..2ae3e143a 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -164,9 +164,17 @@ Note that lists are only lazy in values, and they are strict in length. An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). +An attribute name can be an identifier or a [string](#string). +An identifier must start with a letter (`a-z`, `A-Z`) or underscore (`_`), and can otherwise contain letters (`a-z`, `A-Z`), numbers (`0-9`), underscores (`_`), apostrophes (`'`), or dashes (`-`). + +> *name* = *identifier* | *string* \ +> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*` + Names and values are separated by an equal sign (`=`). Each value is an arbitrary expression terminated by a semicolon (`;`). +> *attrset* = `{` [ *name* `=` *expr* `;` `]`... `}` + Attributes can appear in any order. An attribute name may only occur once. @@ -182,21 +190,29 @@ Example: This defines a set with attributes named `x`, `text`, `y`. -Attributes can be selected from a set using the `.` operator. For -instance, +Attributes can be accessed with the [`.` operator](./operators.md#attribute-selection). + +Example: ```nix { a = "Foo"; b = "Bar"; }.a ``` -evaluates to `"Foo"`. It is possible to provide a default value in an -attribute selection using the `or` keyword. For example, +This evaluates to `"Foo"`. + +It is possible to provide a default value in an attribute selection using the `or` keyword. + +Example: ```nix { a = "Foo"; b = "Bar"; }.c or "Xyzzy" ``` -will evaluate to `"Xyzzy"` because there is no `c` attribute in the set. +```nix +{ a = "Foo"; b = "Bar"; }.c.d.e.f.g or "Xyzzy" +``` + +will both evaluate to `"Xyzzy"` because there is no `c` attribute in the set. You can use arbitrary double-quoted strings as attribute names: diff --git a/doc/manual/src/package-management/basic-package-mgmt.md b/doc/manual/src/package-management/basic-package-mgmt.md index 5f1d7a89c..6b86e763e 100644 --- a/doc/manual/src/package-management/basic-package-mgmt.md +++ b/doc/manual/src/package-management/basic-package-mgmt.md @@ -47,7 +47,7 @@ $ nix-channel --update You can view the set of available packages in Nixpkgs: ```console -$ nix-env -qaP +$ nix-env --query --available --attr-path nixpkgs.aterm aterm-2.2 nixpkgs.bash bash-3.0 nixpkgs.binutils binutils-2.15 @@ -65,7 +65,7 @@ If you downloaded Nixpkgs yourself, or if you checked it out from GitHub, then you need to pass the path to your Nixpkgs tree using the `-f` flag: ```console -$ nix-env -qaPf /path/to/nixpkgs +$ nix-env --query --available --attr-path --file /path/to/nixpkgs aterm aterm-2.2 bash bash-3.0 … @@ -77,7 +77,7 @@ Nixpkgs. You can filter the packages by name: ```console -$ nix-env -qaP firefox +$ nix-env --query --available --attr-path firefox nixpkgs.firefox-esr firefox-91.3.0esr nixpkgs.firefox firefox-94.0.1 ``` @@ -85,7 +85,7 @@ nixpkgs.firefox firefox-94.0.1 and using regular expressions: ```console -$ nix-env -qaP 'firefox.*' +$ nix-env --query --available --attr-path 'firefox.*' ``` It is also possible to see the *status* of available packages, i.e., @@ -93,7 +93,7 @@ whether they are installed into the user environment and/or present in the system: ```console -$ nix-env -qaPs +$ nix-env --query --available --attr-path --status … -PS nixpkgs.bash bash-3.0 --S nixpkgs.binutils binutils-2.15 @@ -110,10 +110,10 @@ which is Nix’s mechanism for doing binary deployment. It just means that Nix knows that it can fetch a pre-built package from somewhere (typically a network server) instead of building it locally. -You can install a package using `nix-env -iA`. For instance, +You can install a package using `nix-env --install --attr `. For instance, ```console -$ nix-env -iA nixpkgs.subversion +$ nix-env --install --attr nixpkgs.subversion ``` will install the package called `subversion` from `nixpkgs` channel (which is, of course, the @@ -143,14 +143,14 @@ instead of the attribute path, as `nix-env` does not record which attribute was used for installing: ```console -$ nix-env -e subversion +$ nix-env --uninstall subversion ``` Upgrading to a new version is just as easy. If you have a new release of Nix Packages, you can do: ```console -$ nix-env -uA nixpkgs.subversion +$ nix-env --upgrade --attr nixpkgs.subversion ``` This will *only* upgrade Subversion if there is a “newer” version in the @@ -163,15 +163,15 @@ whatever version is in the Nix expressions, use `-i` instead of `-u`; You can also upgrade all packages for which there are newer versions: ```console -$ nix-env -u +$ nix-env --upgrade ``` Sometimes it’s useful to be able to ask what `nix-env` would do, without actually doing it. For instance, to find out what packages would be -upgraded by `nix-env -u`, you can do +upgraded by `nix-env --upgrade `, you can do ```console -$ nix-env -u --dry-run +$ nix-env --upgrade --dry-run (dry run; not doing anything) upgrading `libxslt-1.1.0' to `libxslt-1.1.10' upgrading `graphviz-1.10' to `graphviz-1.12' diff --git a/doc/manual/src/package-management/binary-cache-substituter.md b/doc/manual/src/package-management/binary-cache-substituter.md index 5befad9f8..855eaf470 100644 --- a/doc/manual/src/package-management/binary-cache-substituter.md +++ b/doc/manual/src/package-management/binary-cache-substituter.md @@ -9,7 +9,7 @@ The daemon that handles binary cache requests via HTTP, `nix-serve`, is not part of the Nix distribution, but you can install it from Nixpkgs: ```console -$ nix-env -iA nixpkgs.nix-serve +$ nix-env --install --attr nixpkgs.nix-serve ``` You can then start the server, listening for HTTP connections on @@ -35,7 +35,7 @@ On the client side, you can tell Nix to use your binary cache using `--substituters`, e.g.: ```console -$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/ +$ nix-env --install --attr nixpkgs.firefox --substituters http://avalon:8080/ ``` The option `substituters` tells Nix to use this binary cache in diff --git a/doc/manual/src/package-management/channels.md b/doc/manual/src/package-management/channels.md index 93c8b41a6..8e4da180b 100644 --- a/doc/manual/src/package-management/channels.md +++ b/doc/manual/src/package-management/channels.md @@ -43,7 +43,7 @@ operations (via the symlink `~/.nix-defexpr/channels`). Consequently, you can then say ```console -$ nix-env -u +$ nix-env --upgrade ``` to upgrade all packages in your profile to the latest versions available diff --git a/doc/manual/src/package-management/copy-closure.md b/doc/manual/src/package-management/copy-closure.md index d3fac4d76..14326298b 100644 --- a/doc/manual/src/package-management/copy-closure.md +++ b/doc/manual/src/package-management/copy-closure.md @@ -15,7 +15,7 @@ With `nix-store path (that is, the path and all its dependencies) to a file, and then unpack that file into another Nix store. For example, - $ nix-store --export $(nix-store -qR $(type -p firefox)) > firefox.closure + $ nix-store --export $(nix-store --query --requisites $(type -p firefox)) > firefox.closure writes the closure of Firefox to a file. You can then copy this file to another machine and install the closure: @@ -27,7 +27,7 @@ store are ignored. It is also possible to pipe the export into another command, e.g. to copy and install a closure directly to/on another machine: - $ nix-store --export $(nix-store -qR $(type -p firefox)) | bzip2 | \ + $ nix-store --export $(nix-store --query --requisites $(type -p firefox)) | bzip2 | \ ssh alice@itchy.example.org "bunzip2 | nix-store --import" However, `nix-copy-closure` is generally more efficient because it only diff --git a/doc/manual/src/package-management/profiles.md b/doc/manual/src/package-management/profiles.md index d1a2580d4..1d9e672a8 100644 --- a/doc/manual/src/package-management/profiles.md +++ b/doc/manual/src/package-management/profiles.md @@ -39,7 +39,7 @@ just Subversion 1.1.2 (arrows in the figure indicate symlinks). This would be what we would obtain if we had done ```console -$ nix-env -iA nixpkgs.subversion +$ nix-env --install --attr nixpkgs.subversion ``` on a set of Nix expressions that contained Subversion 1.1.2. @@ -54,7 +54,7 @@ environment is generated based on the current one. For instance, generation 43 was created from generation 42 when we did ```console -$ nix-env -iA nixpkgs.subversion nixpkgs.firefox +$ nix-env --install --attr nixpkgs.subversion nixpkgs.firefox ``` on a set of Nix expressions that contained Firefox and a new version of @@ -127,7 +127,7 @@ All `nix-env` operations work on the profile pointed to by (abbreviation `-p`): ```console -$ nix-env -p /nix/var/nix/profiles/other-profile -iA nixpkgs.subversion +$ nix-env --profile /nix/var/nix/profiles/other-profile --install --attr nixpkgs.subversion ``` This will *not* change the `~/.nix-profile` symlink. diff --git a/doc/manual/src/package-management/ssh-substituter.md b/doc/manual/src/package-management/ssh-substituter.md index c59933f61..7014c3cc8 100644 --- a/doc/manual/src/package-management/ssh-substituter.md +++ b/doc/manual/src/package-management/ssh-substituter.md @@ -6,7 +6,7 @@ automatically fetching any store paths in Firefox’s closure if they are available on the server `avalon`: ```console -$ nix-env -iA nixpkgs.firefox --substituters ssh://alice@avalon +$ nix-env --install --attr nixpkgs.firefox --substituters ssh://alice@avalon ``` This works similar to the binary cache substituter that Nix usually @@ -25,7 +25,7 @@ You can also copy the closure of some store path, without installing it into your profile, e.g. ```console -$ nix-store -r /nix/store/m85bxg…-firefox-34.0.5 --substituters +$ nix-store --realise /nix/store/m85bxg…-firefox-34.0.5 --substituters ssh://alice@avalon ``` diff --git a/doc/manual/src/release-notes/rl-2.16.md b/doc/manual/src/release-notes/rl-2.16.md new file mode 100644 index 000000000..97b40d0b8 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.16.md @@ -0,0 +1,8 @@ +# Release 2.16 (2023-05-31) + +* Speed-up of downloads from binary caches. + The number of parallel downloads (also known as substitutions) has been separated from the [`--max-jobs` setting](../command-ref/conf-file.md#conf-max-jobs). + The new setting is called [`max-substitution-jobs`](../command-ref/conf-file.md#conf-max-substitution-jobs). + The number of parallel downloads is now set to 16 by default (previously, the default was 1 due to the coupling to build jobs). + +* The function [`builtins.replaceStrings`](@docroot@/language/builtins.md#builtins-replaceStrings) is now lazy in the value of its second argument `to`. That is, `to` is only evaluated when its corresponding pattern in `from` is matched in the string `s`. diff --git a/docker.nix b/docker.nix index 52199af66..bd16b71cd 100644 --- a/docker.nix +++ b/docker.nix @@ -190,6 +190,12 @@ let cp -a ${rootEnv}/* $out/ ln -s ${manifest} $out/manifest.nix ''; + flake-registry-path = if (flake-registry == null) then + null + else if (builtins.readFileType (toString flake-registry)) == "directory" then + "${flake-registry}/flake-registry.json" + else + flake-registry; in pkgs.runCommand "base-system" { @@ -202,7 +208,7 @@ let ]; allowSubstitutes = false; preferLocalBuild = true; - } '' + } ('' env set -x mkdir -p $out/etc @@ -249,15 +255,15 @@ let ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh - '' + (lib.optionalString (flake-registry != null) '' + '' + (lib.optionalString (flake-registry-path != null) '' nixCacheDir="/root/.cache/nix" mkdir -p $out$nixCacheDir globalFlakeRegistryPath="$nixCacheDir/flake-registry.json" - ln -s ${flake-registry}/flake-registry.json $out$globalFlakeRegistryPath + ln -s ${flake-registry-path} $out$globalFlakeRegistryPath mkdir -p $out/nix/var/nix/gcroots/auto rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath)) ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName - ''); + '')); in pkgs.dockerTools.buildLayeredImageWithNixDb { diff --git a/maintainers/README.md b/maintainers/README.md index 618bfb4e4..d13349438 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -42,12 +42,12 @@ The team meets twice a week: - Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) - 1. Triage issues and pull requests from the _No Status_ column (30 min) - 2. Discuss issues and pull requests from the _To discuss_ column (30 min) + 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) + 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min) - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) - 1. Code review on pull requests from _In review_. + 1. Code review on pull requests from [In review](#in-review). 2. Other chores and tasks. Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50). @@ -58,64 +58,74 @@ The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19 Items on the board progress through the following states: -- No Status +### No Status - During the discussion meeting, the team triages new items. - To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance. +During the discussion meeting, the team triages new items. +To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance. - On every meeting, at least one item from each of the following categories is inspected: +On every meeting, at least one item from each of the following categories is inspected: - 1. [critical](https://github.com/NixOS/nix/labels/critical) - 2. [security](https://github.com/NixOS/nix/labels/security) - 3. [regression](https://github.com/NixOS/nix/labels/regression) - 4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc) - 5. [tests of existing functionality](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Atests+-label%3Afeature+sort%3Areactions-%2B1-desc) +1. [critical](https://github.com/NixOS/nix/labels/critical) +2. [security](https://github.com/NixOS/nix/labels/security) +3. [regression](https://github.com/NixOS/nix/labels/regression) +4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc) +5. [tests of existing functionality](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Atests+-label%3Afeature+sort%3Areactions-%2B1-desc) - - [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc) - - [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc) - - [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc) - - [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc) +- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc) +- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc) +- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc) +- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc) - Team members can also add pull requests or issues they would like the whole team to consider. +Team members can also add pull requests or issues they would like the whole team to consider. +To ensure process quality and reliability, all non-trivial pull requests must be triaged before merging. - If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_. +If there is disagreement on the general idea behind an issue or pull request, it is moved to [To discuss](#to-discuss). +Otherwise, the issue or pull request in questions get the label [`idea approved`](https://github.com/NixOS/nix/labels/idea%20approved). +For issues this means that an implementation is welcome and will be prioritised for review. +For pull requests this means that: +- Unfinished work is encouraged to be continued. +- A reviewer is assigned to take responsibility for getting the pull request merged. + The item is moved to the [Assigned](#assigned) column. +- If needed, the team can decide to do a collarorative review. + Then the item is moved to the [In review](#in-review) column, and review session is scheduled. - To ensure process quality and reliability, all non-trivial pull requests must be triaged before merging. - What constitutes a trivial pull request is up to maintainers' judgement. +What constitutes a trivial pull request is up to maintainers' judgement. -- To discuss +### To discuss - Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings. +Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings. - This may be where the merit of the change itself or the implementation strategy is contested by a team member. +This may be where the merit of the change itself or the implementation strategy is contested by a team member. - As a general guideline, the order of items is determined as follows: +As a general guideline, the order of items is determined as follows: - - Prioritise pull requests over issues +- Prioritise pull requests over issues - Contributors who took the time to implement concrete change proposals should not wait indefinitely. + Contributors who took the time to implement concrete change proposals should not wait indefinitely. - - Prioritise fixing bugs and testing over documentation, improvements or new features +- Prioritise fixing bugs and testing over documentation, improvements or new features - The team values stability and accessibility higher than raw functionality. + The team values stability and accessibility higher than raw functionality. - - Interleave issues and PRs +- Interleave issues and PRs - This way issues without attempts at a solution get a chance to get addressed. + This way issues without attempts at a solution get a chance to get addressed. -- In review +### In review - Pull requests in this column are reviewed together during work meetings. - This is both for spreading implementation knowledge and for establishing common values in code reviews. +Pull requests in this column are reviewed together during work meetings. +This is both for spreading implementation knowledge and for establishing common values in code reviews. - When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member. +When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member. -- Assigned for merging +### Assigned - One team member is assigned to each of these pull requests. - They will communicate with the authors, and make the final approval once all remaining issues are addressed. +One team member is assigned to each of these pull requests. +They will communicate with the authors, and make the final approval once all remaining issues are addressed. - If more substantive issues arise, the assignee can move the pull request back to _To discuss_ to involve the team again. +If more substantive issues arise, the assignee can move the pull request back to [To discuss](#to-discuss) or [In review](#in-review) to involve the team again. + +### Flowchart The process is illustrated in the following diagram: diff --git a/maintainers/release-process.md b/maintainers/release-process.md index ec9e96489..d85266b81 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -119,8 +119,7 @@ release: TODO: This script requires the right AWS credentials. Document. TODO: This script currently requires a - `/home/eelco/Dev/nix-pristine` and - `/home/eelco/Dev/nixpkgs-pristine`. + `/home/eelco/Dev/nix-pristine`. TODO: trigger nixos.org netlify: https://docs.netlify.com/configure-builds/build-hooks/ @@ -141,7 +140,7 @@ release: $ git checkout master $ git pull $ NEW_VERSION=2.13.0 - $ echo -n $NEW_VERSION > .version + $ echo $NEW_VERSION > .version $ git checkout -b bump-$NEW_VERSION $ git commit -a -m 'Bump version' $ git push --set-upstream origin bump-$NEW_VERSION diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 77469148a..ebc536f12 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -15,7 +15,6 @@ my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n"; my $releasesBucketName = "nix-releases"; my $channelsBucketName = "nix-channels"; -my $nixpkgsDir = "/home/eelco/Dev/nixpkgs-pristine"; my $TMPDIR = $ENV{'TMPDIR'} // "/tmp"; @@ -81,6 +80,38 @@ my $s3_us = Net::Amazon::S3->new( my $channelsBucket = $s3_us->bucket($channelsBucketName) or die; +sub getStorePath { + my ($jobName, $output) = @_; + my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); + return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'"; +} + +sub copyManual { + my $manual = getStorePath("build.x86_64-linux", "doc"); + print "$manual\n"; + + my $manualNar = "$tmpDir/$releaseName-manual.nar.xz"; + print "$manualNar\n"; + + unless (-e $manualNar) { + system("NIX_REMOTE=$binaryCache nix store dump-path '$manual' | xz > '$manualNar'.tmp") == 0 + or die "unable to fetch $manual\n"; + rename("$manualNar.tmp", $manualNar) or die; + } + + unless (-e "$tmpDir/manual") { + system("xz -d < '$manualNar' | nix-store --restore $tmpDir/manual.tmp") == 0 + or die "unable to unpack $manualNar\n"; + rename("$tmpDir/manual.tmp/share/doc/nix/manual", "$tmpDir/manual") or die; + system("rm -rf '$tmpDir/manual.tmp'") == 0 or die; + } + + system("aws s3 sync '$tmpDir/manual' s3://$releasesBucketName/$releaseDir/manual") == 0 + or die "syncing manual to S3\n"; +} + +copyManual; + sub downloadFile { my ($jobName, $productNr, $dstName) = @_; @@ -180,9 +211,20 @@ if ($isLatest) { system("docker manifest push nixos/nix:latest") == 0 or die; } +# Upload nix-fallback-paths.nix. +write_file("$tmpDir/fallback-paths.nix", + "{\n" . + " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" . + " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" . + " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" . + " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" . + " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" . + "}\n"); + # Upload release files to S3. for my $fn (glob "$tmpDir/*") { my $name = basename($fn); + next if $name eq "manual"; my $dstKey = "$releaseDir/" . $name; unless (defined $releasesBucket->head_key($dstKey)) { print STDERR "uploading $fn to s3://$releasesBucketName/$dstKey...\n"; @@ -190,8 +232,7 @@ for my $fn (glob "$tmpDir/*") { my $configuration = (); $configuration->{content_type} = "application/octet-stream"; - if ($fn =~ /.sha256|install/) { - # Text files + if ($fn =~ /.sha256|install|\.nix$/) { $configuration->{content_type} = "text/plain"; } @@ -200,28 +241,6 @@ for my $fn (glob "$tmpDir/*") { } } -# Update nix-fallback-paths.nix. -if ($isLatest) { - system("cd $nixpkgsDir && git pull") == 0 or die; - - sub getStorePath { - my ($jobName) = @_; - my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); - return $buildInfo->{buildoutputs}->{out}->{path} or die "cannot get store path for '$jobName'"; - } - - write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix", - "{\n" . - " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" . - " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" . - " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" . - " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" . - " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" . - "}\n"); - - system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die; -} - # Update the "latest" symlink. $channelsBucket->add_key( "nix-latest/install", "", diff --git a/mk/cxx-big-literal.mk b/mk/cxx-big-literal.mk new file mode 100644 index 000000000..85365df8e --- /dev/null +++ b/mk/cxx-big-literal.mk @@ -0,0 +1,5 @@ +%.gen.hh: % + @echo 'R"foo(' >> $@.tmp + $(trace-gen) cat $< >> $@.tmp + @echo ')foo"' >> $@.tmp + @mv $@.tmp $@ diff --git a/mk/lib.mk b/mk/lib.mk index 92f0983d5..34fa624d8 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -101,6 +101,7 @@ include mk/libraries.mk include mk/programs.mk include mk/patterns.mk include mk/templates.mk +include mk/cxx-big-literal.mk include mk/tests.mk diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index 7c66538b0..c11783158 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -246,8 +246,15 @@ printf -v _OLD_LINE_FMT "%b" $'\033[1;7;31m-'"$ESC ${RED}%L${ESC}" printf -v _NEW_LINE_FMT "%b" $'\033[1;7;32m+'"$ESC ${GREEN}%L${ESC}" _diff() { + # macOS Ventura doesn't ship with GNU diff. Print similar output except + # without +/- markers or dimming + if diff --version | grep -q "Apple diff"; then + printf -v CHANGED_GROUP_FORMAT "%b" "${GREEN}%>${RED}%<${ESC}" + diff --changed-group-format="$CHANGED_GROUP_FORMAT" "$@" + else # simple colorized diff comatible w/ pre `--color` versions - diff --unchanged-group-format="$_UNCHANGED_GRP_FMT" --old-line-format="$_OLD_LINE_FMT" --new-line-format="$_NEW_LINE_FMT" --unchanged-line-format=" %L" "$@" + diff --unchanged-group-format="$_UNCHANGED_GRP_FMT" --old-line-format="$_OLD_LINE_FMT" --new-line-format="$_NEW_LINE_FMT" --unchanged-line-format=" %L" "$@" + fi } confirm_rm() { @@ -873,7 +880,7 @@ configure_shell_profile() { fi done - task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}" + task "Setting up shell profiles for Fish with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}" for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do if [ ! -d "$fish_prefix" ]; then # this specific prefix (ie: /etc/fish) is very likely to exist diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index 7dd567747..07b34033a 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -92,7 +92,7 @@ poly_configure_nix_daemon_service() { task "Setting up the nix-daemon systemd service" _sudo "to create the nix-daemon tmpfiles config" \ - ln -sfn /nix/var/nix/profiles/default/$TMPFILES_SRC $TMPFILES_DEST + ln -sfn "/nix/var/nix/profiles/default$TMPFILES_SRC" "$TMPFILES_DEST" _sudo "to run systemd-tmpfiles once to pick that path up" \ systemd-tmpfiles --create --prefix=/nix/var/nix diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index ce9c7f45a..323e04fdb 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -258,6 +258,8 @@ static int main_build_remote(int argc, char * * argv) connected: close(5); + assert(sshStore); + std::cerr << "# accept\n" << storeUri << "\n"; auto inputs = readStrings(source); @@ -286,23 +288,48 @@ connected: uploadLock = -1; auto drv = store->readDerivation(*drvPath); + + std::optional optResult; + + // If we don't know whether we are trusted (e.g. `ssh://` + // stores), we assume we are. This is necessary for backwards + // compat. + bool trustedOrLegacy = ({ + std::optional trusted = sshStore->isTrustedClient(); + !trusted || *trusted; + }); + + // See the very large comment in `case wopBuildDerivation:` in + // `src/libstore/daemon.cc` that explains the trust model here. + // + // This condition mirrors that: that code enforces the "rules" outlined there; + // we do the best we can given those "rules". + if (trustedOrLegacy || drv.type().isCA()) { + // Hijack the inputs paths of the derivation to include all + // the paths that come from the `inputDrvs` set. We don’t do + // that for the derivations whose `inputDrvs` is empty + // because: + // + // 1. It’s not needed + // + // 2. Changing the `inputSrcs` set changes the associated + // output ids, which break CA derivations + if (!drv.inputDrvs.empty()) + drv.inputSrcs = store->parseStorePathSet(inputs); + optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv); + auto & result = *optResult; + if (!result.success()) + throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); + } else { + copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute); + auto res = sshStore->buildPathsWithResults({ DerivedPath::Built { *drvPath, OutputsSpec::All {} } }); + // One path to build should produce exactly one build result + assert(res.size() == 1); + optResult = std::move(res[0]); + } + + auto outputHashes = staticOutputHashes(*store, drv); - - // Hijack the inputs paths of the derivation to include all the paths - // that come from the `inputDrvs` set. - // We don’t do that for the derivations whose `inputDrvs` is empty - // because - // 1. It’s not needed - // 2. Changing the `inputSrcs` set changes the associated output ids, - // which break CA derivations - if (!drv.inputDrvs.empty()) - drv.inputSrcs = store->parseStorePathSet(inputs); - - auto result = sshStore->buildDerivation(*drvPath, drv); - - if (!result.success()) - throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg); - std::set missingRealisations; StorePathSet missingPaths; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) { @@ -311,6 +338,8 @@ connected: auto thisOutputId = DrvOutput{ thisOutputHash, outputName }; if (!store->queryRealisation(thisOutputId)) { debug("missing output %s", outputName); + assert(optResult); + auto & result = *optResult; auto i = result.builtOutputs.find(outputName); assert(i != result.builtOutputs.end()); auto & newRealisation = i->second; diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index bedf11e2c..6c4648b34 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -121,6 +121,8 @@ ref EvalCommand::getEvalState() #endif ; + evalState->repair = repair; + if (startReplOnEvalErrors) { evalState->debugRepl = &AbstractNixRepl::runSimple; }; diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 5b6477c82..ff3abd534 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -153,7 +153,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) for (auto & i : autoArgs) { auto v = state.allocValue(); if (i.second[0] == 'E') - state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), absPath("."))); + state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd()))); else v->mkString(((std::string_view) i.second).substr(1)); res.insert(state.symbols.create(i.first), v); @@ -161,19 +161,19 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) return res.finish(); } -Path lookupFileArg(EvalState & state, std::string_view s) +SourcePath lookupFileArg(EvalState & state, std::string_view s) { if (EvalSettings::isPseudoUrl(s)) { auto storePath = fetchers::downloadTarball( state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath; - return state.store->toRealPath(storePath); + return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } else if (hasPrefix(s, "flake:")) { experimentalFeatureSettings.require(Xp::Flakes); auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false); auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath; - return state.store->toRealPath(storePath); + return state.rootPath(CanonPath(state.store->toRealPath(storePath))); } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { @@ -182,7 +182,7 @@ Path lookupFileArg(EvalState & state, std::string_view s) } else - return absPath(std::string(s)); + return state.rootPath(CanonPath::fromCwd(s)); } } diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh index b69db11dd..b65cb5b20 100644 --- a/src/libcmd/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -2,14 +2,16 @@ ///@file #include "args.hh" +#include "common-args.hh" namespace nix { class Store; class EvalState; class Bindings; +struct SourcePath; -struct MixEvalArgs : virtual Args +struct MixEvalArgs : virtual Args, virtual MixRepair { static constexpr auto category = "Common evaluation options"; @@ -25,6 +27,6 @@ private: std::map autoArgs; }; -Path lookupFileArg(EvalState & state, std::string_view s); +SourcePath lookupFileArg(EvalState & state, std::string_view s); } diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index f674f32bd..a17c6f12a 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -3,8 +3,11 @@ namespace nix { -Strings editorFor(const Path & file, uint32_t line) +Strings editorFor(const SourcePath & file, uint32_t line) { + auto path = file.getPhysicalPath(); + if (!path) + throw Error("cannot open '%s' in an editor because it has no physical path", file); auto editor = getEnv("EDITOR").value_or("cat"); auto args = tokenizeString(editor); if (line > 0 && ( @@ -13,7 +16,7 @@ Strings editorFor(const Path & file, uint32_t line) editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos)) args.push_back(fmt("+%d", line)); - args.push_back(file); + args.push_back(path->abs()); return args; } diff --git a/src/libcmd/editor-for.hh b/src/libcmd/editor-for.hh index c8c4e9d9b..fbf4307c9 100644 --- a/src/libcmd/editor-for.hh +++ b/src/libcmd/editor-for.hh @@ -2,6 +2,7 @@ ///@file #include "types.hh" +#include "input-accessor.hh" namespace nix { @@ -9,6 +10,6 @@ namespace nix { * Helper function to generate args that invoke $EDITOR on * filename:lineno. */ -Strings editorFor(const Path & file, uint32_t line); +Strings editorFor(const SourcePath & file, uint32_t line); } diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc index cf513126d..b35ca2910 100644 --- a/src/libcmd/installable-attr-path.cc +++ b/src/libcmd/installable-attr-path.cc @@ -46,7 +46,15 @@ std::pair InstallableAttrPath::toValue(EvalState & state) DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() { - auto v = toValue(*state).first; + auto [v, pos] = toValue(*state); + + if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths( + *v, + pos, + fmt("while evaluating the attribute '%s'", attrPath))) + { + return { *derivedPathWithInfo }; + } Bindings & autoArgs = *cmd.getAutoArgs(*state); diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index a3352af76..eb944240b 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -95,32 +95,13 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths() // FIXME: use eval cache? auto v = attr->forceValue(); - if (v.type() == nPath) { - PathSet context; - auto storePath = state->copyPathToStore(context, Path(v.path)); - return {{ - .path = DerivedPath::Opaque { - .path = std::move(storePath), - }, - .info = make_ref(), - }}; + if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths( + v, + noPos, + fmt("while evaluating the flake output attribute '%s'", attrPath))) + { + return { *derivedPathWithInfo }; } - - else if (v.type() == nString) { - PathSet context; - auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath)); - auto storePath = state->store->maybeParseStorePath(s); - if (storePath && context.count(std::string(s))) { - return {{ - .path = DerivedPath::Opaque { - .path = std::move(*storePath), - }, - .info = make_ref(), - }}; - } else - throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s); - } - else throw Error("flake output attribute '%s' is not a derivation or path", attrPath); } @@ -235,7 +216,7 @@ FlakeRef InstallableFlake::nixpkgsFlakeRef() const } } - return InstallableValue::nixpkgsFlakeRef(); + return defaultNixpkgsFlakeRef(); } } diff --git a/src/libcmd/installable-flake.hh b/src/libcmd/installable-flake.hh index afe64d977..7ac4358d2 100644 --- a/src/libcmd/installable-flake.hh +++ b/src/libcmd/installable-flake.hh @@ -67,9 +67,22 @@ struct InstallableFlake : InstallableValue std::shared_ptr getLockedFlake() const; - FlakeRef nixpkgsFlakeRef() const override; + FlakeRef nixpkgsFlakeRef() const; }; +/** + * Default flake ref for referring to Nixpkgs. For flakes that don't + * have their own Nixpkgs input, or other installables. + * + * It is a layer violation for Nix to know about Nixpkgs; currently just + * `nix develop` does. Be wary of using this / + * `InstallableFlake::nixpkgsFlakeRef` more places. + */ +static inline FlakeRef defaultNixpkgsFlakeRef() +{ + return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}); +} + ref openEvalCache( EvalState & state, std::shared_ptr lockedFlake); diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index 3a7ede4e2..1eff293cc 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -41,4 +41,26 @@ ref InstallableValue::require(ref installable) return ref { castedInstallable }; } +std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) +{ + if (v.type() == nPath) { + auto storePath = v.path().fetchToStore(state->store); + return {{ + .path = DerivedPath::Opaque { + .path = std::move(storePath), + }, + .info = make_ref(), + }}; + } + + else if (v.type() == nString) { + return {{ + .path = state->coerceToDerivedPath(pos, v, errorCtx), + .info = make_ref(), + }}; + } + + else return std::nullopt; +} + } diff --git a/src/libcmd/installable-value.hh b/src/libcmd/installable-value.hh index bfb3bfeed..3138ce8ec 100644 --- a/src/libcmd/installable-value.hh +++ b/src/libcmd/installable-value.hh @@ -96,13 +96,26 @@ struct InstallableValue : Installable UnresolvedApp toApp(EvalState & state); - virtual FlakeRef nixpkgsFlakeRef() const - { - return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}); - } - static InstallableValue & require(Installable & installable); static ref require(ref installable); + +protected: + + /** + * Handles either a plain path, or a string with a single string + * context elem in the right format. The latter case is handled by + * `EvalState::coerceToDerivedPath()`; see it for details. + * + * @param v Value that is hopefully a string or path per the above. + * + * @param pos Position of value to aid with diagnostics. + * + * @param errorCtx Arbitrary message for use in potential error message when something is wrong with `v`. + * + * @result A derived path (with empty info, for now) if the value + * matched the above criteria. + */ + std::optional trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx); }; } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 0a2fe0073..a2b882355 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -449,7 +449,7 @@ Installables SourceExprCommand::parseInstallables( else if (file) state->evalFile(lookupFileArg(*state, *file), *vFile); else { - auto e = state->parseExprFromString(*expr, absPath(".")); + auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd())); state->eval(e, *vFile); } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 80c08bf1c..4b160a100 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -55,8 +55,6 @@ struct NixRepl , gc #endif { - std::string curDir; - size_t debugTraceIndex; Strings loadedFiles; @@ -114,7 +112,6 @@ NixRepl::NixRepl(const Strings & searchPath, nix::ref store, refstaticBaseEnv.get())) , historyFile(getDataDir() + "/nix/repl-history") { - curDir = absPath("."); } @@ -594,14 +591,14 @@ bool NixRepl::processLine(std::string line) Value v; evalString(arg, v); - const auto [path, line] = [&] () -> std::pair { + const auto [path, line] = [&] () -> std::pair { if (v.type() == nPath || v.type() == nString) { - PathSet context; + NixStringContext context; auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit"); return {path, 0}; } else if (v.isLambda()) { auto pos = state->positions[v.lambda.fun->pos]; - if (auto path = std::get_if(&pos.origin)) + if (auto path = std::get_if(&pos.origin)) return {*path, pos.line}; else throw Error("'%s' cannot be shown in an editor", pos); @@ -876,8 +873,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) Expr * NixRepl::parseString(std::string s) { - Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv); - return e; + return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv); } @@ -925,7 +921,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m break; case nPath: - str << ANSI_GREEN << v.path << ANSI_NORMAL; // !!! escaping? + str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping? break; case nNull: @@ -940,7 +936,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m if (isDrv) { str << "«derivation "; Bindings::iterator i = v.attrs->find(state->sDrvPath); - PathSet context; + NixStringContext context; if (i != v.attrs->end()) str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation")); else diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 7c0705091..ab654c1b0 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -106,7 +106,7 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin } -std::pair findPackageFilename(EvalState & state, Value & v, std::string what) +std::pair findPackageFilename(EvalState & state, Value & v, std::string what) { Value * v2; try { @@ -118,21 +118,25 @@ std::pair findPackageFilename(EvalState & state, Value & // FIXME: is it possible to extract the Pos object instead of doing this // toString + parsing? - auto pos = state.forceString(*v2, noPos, "while evaluating the 'meta.position' attribute of a derivation"); + NixStringContext context; + auto path = state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation"); - auto colon = pos.rfind(':'); - if (colon == std::string::npos) - throw ParseError("cannot parse meta.position attribute '%s'", pos); + auto fn = path.path.abs(); + + auto fail = [fn]() { + throw ParseError("cannot parse 'meta.position' attribute '%s'", fn); + }; - std::string filename(pos, 0, colon); - unsigned int lineno; try { - lineno = std::stoi(std::string(pos, colon + 1, std::string::npos)); + auto colon = fn.rfind(':'); + if (colon == std::string::npos) fail(); + std::string filename(fn, 0, colon); + auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos)); + return {CanonPath(fn.substr(0, colon)), lineno}; } catch (std::invalid_argument & e) { - throw ParseError("cannot parse line number '%s'", pos); + fail(); + abort(); } - - return { std::move(filename), lineno }; } diff --git a/src/libexpr/attr-path.hh b/src/libexpr/attr-path.hh index b2bfb5d04..eb00ffb93 100644 --- a/src/libexpr/attr-path.hh +++ b/src/libexpr/attr-path.hh @@ -20,7 +20,7 @@ std::pair findAlongAttrPath( /** * Heuristic to find the filename and lineno or a nix value. */ -std::pair findPackageFilename(EvalState & state, Value & v, std::string what); +std::pair findPackageFilename(EvalState & state, Value & v, std::string what); std::vector parseAttrPath(EvalState & state, std::string_view s); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 1219b2471..9e734e654 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -47,7 +47,7 @@ struct AttrDb { auto state(_state->lock()); - Path cacheDir = getCacheDir() + "/nix/eval-cache-v4"; + Path cacheDir = getCacheDir() + "/nix/eval-cache-v5"; createDirs(cacheDir); Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite"; @@ -300,7 +300,7 @@ struct AttrDb NixStringContext context; if (!queryAttribute.isNull(3)) for (auto & s : tokenizeString>(queryAttribute.getStr(3), ";")) - context.push_back(NixStringContextElem::parse(cfg, s)); + context.insert(NixStringContextElem::parse(s)); return {{rowId, string_t{queryAttribute.getStr(2), context}}}; } case AttrType::Bool: @@ -442,8 +442,10 @@ Value & AttrCursor::forceValue() if (v.type() == nString) cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), string_t{v.string.s, {}}}; - else if (v.type() == nPath) - cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}}; + else if (v.type() == nPath) { + auto path = v.path().path; + cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}}; + } else if (v.type() == nBool) cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean}; else if (v.type() == nInt) @@ -580,7 +582,7 @@ std::string AttrCursor::getString() if (v.type() != nString && v.type() != nPath) root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); - return v.type() == nString ? v.string.s : v.path; + return v.type() == nString ? v.string.s : v.path().to_string(); } string_t AttrCursor::getStringWithContext() @@ -619,10 +621,13 @@ string_t AttrCursor::getStringWithContext() auto & v = forceValue(); - if (v.type() == nString) - return {v.string.s, v.getContext(*root->state.store)}; + if (v.type() == nString) { + NixStringContext context; + copyContext(v, context); + return {v.string.s, std::move(context)}; + } else if (v.type() == nPath) - return {v.path, {}}; + return {v.path().to_string(), {}}; else root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 6668add8c..71fd6e6e4 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -4,6 +4,7 @@ #include "util.hh" #include "store-api.hh" #include "derivations.hh" +#include "downstream-placeholder.hh" #include "globals.hh" #include "eval-inline.hh" #include "filetransfer.hh" @@ -94,7 +95,6 @@ RootValue allocRootValue(Value * v) #endif } - void Value::print(const SymbolTable & symbols, std::ostream & str, std::set * seen) const { @@ -111,7 +111,7 @@ void Value::print(const SymbolTable & symbols, std::ostream & str, printLiteralString(str, string.s); break; case tPath: - str << path; // !!! escaping? + str << path().to_string(); // !!! escaping? break; case tNull: str << "null"; @@ -535,6 +535,7 @@ EvalState::EvalState( , sOutputSpecified(symbols.create("outputSpecified")) , repair(NoRepair) , emptyBindings(0) + , derivationInternal(rootPath(CanonPath("/builtin/derivation.nix"))) , store(store) , buildStore(buildStore ? buildStore : store) , debugRepl(nullptr) @@ -609,15 +610,14 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & { allowPath(storePath); - auto path = store->printStorePath(storePath); - v.mkString(path, PathSet({path})); + mkStorePathString(storePath, v); } -Path EvalState::checkSourcePath(const Path & path_) +SourcePath EvalState::checkSourcePath(const SourcePath & path_) { if (!allowedPaths) return path_; - auto i = resolvedPaths.find(path_); + auto i = resolvedPaths.find(path_.path.abs()); if (i != resolvedPaths.end()) return i->second; @@ -627,9 +627,9 @@ Path EvalState::checkSourcePath(const Path & path_) * attacker can't append ../../... to a path that would be in allowedPaths * and thus leak symlink targets. */ - Path abspath = canonPath(path_); + Path abspath = canonPath(path_.path.abs()); - if (hasPrefix(abspath, corepkgsPrefix)) return abspath; + if (hasPrefix(abspath, corepkgsPrefix)) return CanonPath(abspath); for (auto & i : *allowedPaths) { if (isDirOrInDir(abspath, i)) { @@ -647,11 +647,11 @@ Path EvalState::checkSourcePath(const Path & path_) /* Resolve symlinks. */ debug("checking access to '%s'", abspath); - Path path = canonPath(abspath, true); + SourcePath path = CanonPath(canonPath(abspath, true)); for (auto & i : *allowedPaths) { - if (isDirOrInDir(path, i)) { - resolvedPaths[path_] = path; + if (isDirOrInDir(path.path.abs(), i)) { + resolvedPaths.insert_or_assign(path_.path.abs(), path); return path; } } @@ -679,12 +679,12 @@ void EvalState::checkURI(const std::string & uri) /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - checkSourcePath(uri); + checkSourcePath(CanonPath(uri)); return; } if (hasPrefix(uri, "file://")) { - checkSourcePath(std::string(uri, 7)); + checkSourcePath(CanonPath(std::string(uri, 7))); return; } @@ -692,7 +692,7 @@ void EvalState::checkURI(const std::string & uri) } -Path EvalState::toRealPath(const Path & path, const PathSet & context) +Path EvalState::toRealPath(const Path & path, const NixStringContext & context) { // FIXME: check whether 'path' is in 'context'. return @@ -944,34 +944,34 @@ void Value::mkString(std::string_view s) } -static void copyContextToValue(Value & v, const PathSet & context) +static void copyContextToValue(Value & v, const NixStringContext & context) { if (!context.empty()) { size_t n = 0; v.string.context = (const char * *) allocBytes((context.size() + 1) * sizeof(char *)); for (auto & i : context) - v.string.context[n++] = dupString(i.c_str()); + v.string.context[n++] = dupString(i.to_string().c_str()); v.string.context[n] = 0; } } -void Value::mkString(std::string_view s, const PathSet & context) +void Value::mkString(std::string_view s, const NixStringContext & context) { mkString(s); copyContextToValue(*this, context); } -void Value::mkStringMove(const char * s, const PathSet & context) +void Value::mkStringMove(const char * s, const NixStringContext & context) { mkString(s); copyContextToValue(*this, context); } -void Value::mkPath(std::string_view s) +void Value::mkPath(const SourcePath & path) { - mkPath(makeImmutableString(s)); + mkPath(makeImmutableString(path.path.abs())); } @@ -1027,9 +1027,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr) void EvalState::mkPos(Value & v, PosIdx p) { auto pos = positions[p]; - if (auto path = std::get_if(&pos.origin)) { + if (auto path = std::get_if(&pos.origin)) { auto attrs = buildBindings(3); - attrs.alloc(sFile).mkString(*path); + attrs.alloc(sFile).mkString(path->path.abs()); attrs.alloc(sLine).mkInt(pos.line); attrs.alloc(sColumn).mkInt(pos.column); v.mkAttrs(attrs); @@ -1038,6 +1038,37 @@ void EvalState::mkPos(Value & v, PosIdx p) } +void EvalState::mkStorePathString(const StorePath & p, Value & v) +{ + v.mkString( + store->printStorePath(p), + NixStringContext { + NixStringContextElem::Opaque { .path = p }, + }); +} + + +void EvalState::mkOutputString( + Value & value, + const StorePath & drvPath, + const std::string outputName, + std::optional optOutputPath) +{ + value.mkString( + optOutputPath + ? store->printStorePath(*std::move(optOutputPath)) + /* Downstream we would substitute this for an actual path once + we build the floating CA derivation */ + : DownstreamPlaceholder::unknownCaOutput(drvPath, outputName).render(), + NixStringContext { + NixStringContextElem::Built { + .drvPath = drvPath, + .output = outputName, + } + }); +} + + /* Create a thunk for the delayed computation of the given expression in the given environment. But if the expression is a variable, then look it up right away. This significantly reduces the number @@ -1085,7 +1116,7 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) } -void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial) +void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial) { auto path = checkSourcePath(path_); @@ -1095,7 +1126,7 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial) return; } - Path resolvedPath = resolveExprPath(path); + auto resolvedPath = resolveExprPath(path); if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) { v = i->second; return; @@ -1123,8 +1154,8 @@ void EvalState::resetFileCache() void EvalState::cacheFile( - const Path & path, - const Path & resolvedPath, + const SourcePath & path, + const SourcePath & resolvedPath, Expr * e, Value & v, bool mustBeTrivial) @@ -1138,7 +1169,7 @@ void EvalState::cacheFile( *e, this->baseEnv, e->getPos() ? static_cast>(positions[e->getPos()]) : nullptr, - "while evaluating the file '%1%':", resolvedPath) + "while evaluating the file '%1%':", resolvedPath.to_string()) : nullptr; // Enforce that 'flake.nix' is a direct attrset, not a @@ -1148,7 +1179,7 @@ void EvalState::cacheFile( error("file '%s' must be an attribute set", path).debugThrow(); eval(e, v); } catch (Error & e) { - addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath); + addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string()); throw; } @@ -1409,8 +1440,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) } catch (Error & e) { if (pos2) { auto pos2r = state.positions[pos2]; - auto origin = std::get_if(&pos2r.origin); - if (!(origin && *origin == state.derivationNixPath)) + auto origin = std::get_if(&pos2r.origin); + if (!(origin && *origin == state.derivationInternal)) state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)); } @@ -1900,7 +1931,7 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) { - PathSet context; + NixStringContext context; std::vector s; size_t sSize = 0; NixInt n = 0; @@ -1983,7 +2014,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) else if (firstType == nPath) { if (!context.empty()) state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); - v.mkPath(canonPath(str())); + v.mkPath(CanonPath(canonPath(str()))); } else v.mkStringMove(c_str(), context); } @@ -2109,26 +2140,15 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string } -void copyContext(const Value & v, PathSet & context) +void copyContext(const Value & v, NixStringContext & context) { if (v.string.context) for (const char * * p = v.string.context; *p; ++p) - context.insert(*p); + context.insert(NixStringContextElem::parse(*p)); } -NixStringContext Value::getContext(const Store & store) -{ - NixStringContext res; - assert(internalType == tString); - if (string.context) - for (const char * * p = string.context; *p; ++p) - res.push_back(NixStringContextElem::parse(store, *p)); - return res; -} - - -std::string_view EvalState::forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx) +std::string_view EvalState::forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx) { auto s = forceString(v, pos, errorCtx); copyContext(v, context); @@ -2158,7 +2178,7 @@ bool EvalState::isDerivation(Value & v) std::optional EvalState::tryAttrsToString(const PosIdx pos, Value & v, - PathSet & context, bool coerceMore, bool copyToStore) + NixStringContext & context, bool coerceMore, bool copyToStore) { auto i = v.attrs->find(sToString); if (i != v.attrs->end()) { @@ -2172,8 +2192,14 @@ std::optional EvalState::tryAttrsToString(const PosIdx pos, Value & return {}; } -BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &context, - std::string_view errorCtx, bool coerceMore, bool copyToStore, bool canonicalizePath) +BackedStringView EvalState::coerceToString( + const PosIdx pos, + Value & v, + NixStringContext & context, + std::string_view errorCtx, + bool coerceMore, + bool copyToStore, + bool canonicalizePath) { forceValue(v, pos); @@ -2183,12 +2209,14 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet & } if (v.type() == nPath) { - BackedStringView path(PathView(v.path)); - if (canonicalizePath) - path = canonPath(*path); - if (copyToStore) - path = store->printStorePath(copyPathToStore(context, std::move(path).toOwned())); - return path; + return + !canonicalizePath && !copyToStore + ? // FIXME: hack to preserve path literals that end in a + // slash, as in /foo/${x}. + v._path + : copyToStore + ? store->printStorePath(copyPathToStore(context, v.path())) + : std::string(v.path().path.abs()); } if (v.type() == nAttrs) { @@ -2249,40 +2277,40 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet & } -StorePath EvalState::copyPathToStore(PathSet & context, const Path & path) +StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path) { - if (nix::isDerivation(path)) + if (nix::isDerivation(path.path.abs())) error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); - auto dstPath = [&]() -> StorePath - { - auto i = srcToStore.find(path); - if (i != srcToStore.end()) return i->second; + auto i = srcToStore.find(path); - auto dstPath = settings.readOnlyMode - ? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first - : store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair); - allowPath(dstPath); - srcToStore.insert_or_assign(path, dstPath); - printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); - return dstPath; - }(); + auto dstPath = i != srcToStore.end() + ? i->second + : [&]() { + auto dstPath = path.fetchToStore(store, path.baseName(), nullptr, repair); + allowPath(dstPath); + srcToStore.insert_or_assign(path, dstPath); + printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); + return dstPath; + }(); - context.insert(store->printStorePath(dstPath)); + context.insert(NixStringContextElem::Opaque { + .path = dstPath + }); return dstPath; } -Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx) +SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) { auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (path == "" || path[0] != '/') error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); - return path; + return CanonPath(path); } -StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx) +StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx) { auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (auto storePath = store->maybeParseStorePath(path)) @@ -2291,6 +2319,80 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & co } +std::pair EvalState::coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx) +{ + NixStringContext context; + auto s = forceString(v, context, pos, errorCtx); + auto csize = context.size(); + if (csize != 1) + error( + "string '%s' has %d entries in its context. It should only have exactly one entry", + s, csize) + .withTrace(pos, errorCtx).debugThrow(); + auto derivedPath = std::visit(overloaded { + [&](NixStringContextElem::Opaque && o) -> DerivedPath { + return DerivedPath::Opaque { + .path = std::move(o.path), + }; + }, + [&](NixStringContextElem::DrvDeep &&) -> DerivedPath { + error( + "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", + s).withTrace(pos, errorCtx).debugThrow(); + }, + [&](NixStringContextElem::Built && b) -> DerivedPath { + return DerivedPath::Built { + .drvPath = std::move(b.drvPath), + .outputs = OutputsSpec::Names { std::move(b.output) }, + }; + }, + }, ((NixStringContextElem &&) *context.begin()).raw()); + return { + std::move(derivedPath), + std::move(s), + }; +} + + +DerivedPath EvalState::coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx) +{ + auto [derivedPath, s_] = coerceToDerivedPathUnchecked(pos, v, errorCtx); + auto s = s_; + std::visit(overloaded { + [&](const DerivedPath::Opaque & o) { + auto sExpected = store->printStorePath(o.path); + if (s != sExpected) + error( + "path string '%s' has context with the different path '%s'", + s, sExpected) + .withTrace(pos, errorCtx).debugThrow(); + }, + [&](const DerivedPath::Built & b) { + // TODO need derived path with single output to make this + // total. Will add as part of RFC 92 work and then this is + // cleaned up. + auto output = *std::get(b.outputs).begin(); + + auto drv = store->readDerivation(b.drvPath); + auto i = drv.outputs.find(output); + if (i == drv.outputs.end()) + throw Error("derivation '%s' does not have output '%s'", store->printStorePath(b.drvPath), output); + auto optOutputPath = i->second.path(*store, drv.name, output); + // This is testing for the case of CA derivations + auto sExpected = optOutputPath + ? store->printStorePath(*optOutputPath) + : DownstreamPlaceholder::unknownCaOutput(b.drvPath, output).render(); + if (s != sExpected) + error( + "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", + s, output, store->printStorePath(b.drvPath), sExpected) + .withTrace(pos, errorCtx).debugThrow(); + } + }, derivedPath.raw()); + return derivedPath; +} + + bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx) { forceValue(v1, noPos); @@ -2321,7 +2423,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return strcmp(v1.string.s, v2.string.s) == 0; case nPath: - return strcmp(v1.path, v2.path) == 0; + return strcmp(v1._path, v2._path) == 0; case nNull: return true; @@ -2448,8 +2550,8 @@ void EvalState::printStats() else obj["name"] = nullptr; if (auto pos = positions[fun->pos]) { - if (auto path = std::get_if(&pos.origin)) - obj["file"] = *path; + if (auto path = std::get_if(&pos.origin)) + obj["file"] = path->to_string(); obj["line"] = pos.line; obj["column"] = pos.column; } @@ -2463,8 +2565,8 @@ void EvalState::printStats() for (auto & i : attrSelects) { json obj = json::object(); if (auto pos = positions[i.first]) { - if (auto path = std::get_if(&pos.origin)) - obj["file"] = *path; + if (auto path = std::get_if(&pos.origin)) + obj["file"] = path->to_string(); obj["line"] = pos.line; obj["column"] = pos.column; } @@ -2489,7 +2591,7 @@ void EvalState::printStats() } -std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const +std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ .msg = hintfmt("cannot coerce %1% to a string", showType()) @@ -2518,7 +2620,7 @@ Strings EvalSettings::getDefaultNixPath() { Strings res; auto add = [&](const Path & p, const std::string & s = std::string()) { - if (pathExists(p)) { + if (pathAccessible(p)) { if (s.empty()) { res.push_back(p); } else { diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index b3b985683..d6f4560a5 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "config.hh" #include "experimental-features.hh" +#include "input-accessor.hh" #include #include @@ -20,6 +21,7 @@ namespace nix { class Store; class EvalState; class StorePath; +struct DerivedPath; enum RepairFlag : bool; @@ -56,20 +58,14 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env); -void copyContext(const Value & v, PathSet & context); - - -/** - * Cache for calls to addToStore(); maps source paths to the store - * paths. - */ -typedef std::map SrcToStore; +void copyContext(const Value & v, NixStringContext & context); std::string printValue(const EvalState & state, const Value & v); std::ostream & operator << (std::ostream & os, const ValueType t); +// FIXME: maybe change this to an std::variant. typedef std::pair SearchPathElem; typedef std::list SearchPath; @@ -137,8 +133,6 @@ public: SymbolTable symbols; PosTable positions; - static inline std::string derivationNixPath = "//builtin/derivation.nix"; - const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, @@ -149,7 +143,6 @@ public: sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, sPrefix, sOutputSpecified; - Symbol sDerivationNix; /** * If set, force copying files to the Nix store even if they @@ -165,6 +158,8 @@ public: Bindings emptyBindings; + const SourcePath derivationInternal; + /** * Store used to materialise .drv files. */ @@ -234,15 +229,18 @@ public: } private: - SrcToStore srcToStore; + + /* Cache for calls to addToStore(); maps source paths to the store + paths. */ + std::map srcToStore; /** * A cache from path names to parse trees. */ #if HAVE_BOEHMGC - typedef std::map, traceable_allocator>> FileParseCache; + typedef std::map, traceable_allocator>> FileParseCache; #else - typedef std::map FileParseCache; + typedef std::map FileParseCache; #endif FileParseCache fileParseCache; @@ -250,9 +248,9 @@ private: * A cache from path names to values. */ #if HAVE_BOEHMGC - typedef std::map, traceable_allocator>> FileEvalCache; + typedef std::map, traceable_allocator>> FileEvalCache; #else - typedef std::map FileEvalCache; + typedef std::map FileEvalCache; #endif FileEvalCache fileEvalCache; @@ -263,7 +261,7 @@ private: /** * Cache used by checkSourcePath(). */ - std::unordered_map resolvedPaths; + std::unordered_map resolvedPaths; /** * Cache used by prim_match(). @@ -294,6 +292,12 @@ public: SearchPath getSearchPath() { return searchPath; } + /** + * Return a `SourcePath` that refers to `path` in the root + * filesystem. + */ + SourcePath rootPath(CanonPath path); + /** * Allow access to a path. */ @@ -314,7 +318,7 @@ public: * Check whether access to a path is allowed and throw an error if * not. Otherwise return the canonicalised path. */ - Path checkSourcePath(const Path & path); + SourcePath checkSourcePath(const SourcePath & path); void checkURI(const std::string & uri); @@ -327,19 +331,19 @@ public: * intended to distinguish between import-from-derivation and * sources stored in the actual /nix/store. */ - Path toRealPath(const Path & path, const PathSet & context); + Path toRealPath(const Path & path, const NixStringContext & context); /** * Parse a Nix expression from the specified file. */ - Expr * parseExprFromFile(const Path & path); - Expr * parseExprFromFile(const Path & path, std::shared_ptr & staticEnv); + Expr * parseExprFromFile(const SourcePath & path); + Expr * parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv); /** * Parse a Nix expression from the specified string. */ - Expr * parseExprFromString(std::string s, const Path & basePath, std::shared_ptr & staticEnv); - Expr * parseExprFromString(std::string s, const Path & basePath); + Expr * parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr & staticEnv); + Expr * parseExprFromString(std::string s, const SourcePath & basePath); Expr * parseStdin(); @@ -348,14 +352,14 @@ public: * form. Optionally enforce that the top-level expression is * trivial (i.e. doesn't require arbitrary computation). */ - void evalFile(const Path & path, Value & v, bool mustBeTrivial = false); + void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false); /** * Like `evalFile`, but with an already parsed expression. */ void cacheFile( - const Path & path, - const Path & resolvedPath, + const SourcePath & path, + const SourcePath & resolvedPath, Expr * e, Value & v, bool mustBeTrivial = false); @@ -365,8 +369,8 @@ public: /** * Look up a file in the search path. */ - Path findFile(const std::string_view path); - Path findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); + SourcePath findFile(const std::string_view path); + SourcePath findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); /** * If the specified search path element is a URI, download it. @@ -423,7 +427,7 @@ public: */ void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx); std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx); - std::string_view forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx); + std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx); std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx); [[gnu::noinline]] @@ -439,7 +443,7 @@ public: bool isDerivation(Value & v); std::optional tryAttrsToString(const PosIdx pos, Value & v, - PathSet & context, bool coerceMore = false, bool copyToStore = true); + NixStringContext & context, bool coerceMore = false, bool copyToStore = true); /** * String coercion. @@ -449,12 +453,12 @@ public: * booleans and lists to a string. If `copyToStore` is set, * referenced paths are copied to the Nix store as a side effect. */ - BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context, + BackedStringView coerceToString(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx, bool coerceMore = false, bool copyToStore = true, bool canonicalizePath = true); - StorePath copyPathToStore(PathSet & context, const Path & path); + StorePath copyPathToStore(NixStringContext & context, const SourcePath & path); /** * Path coercion. @@ -463,12 +467,34 @@ public: * path. The result is guaranteed to be a canonicalised, absolute * path. Nothing is copied to the store. */ - Path coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx); + SourcePath coerceToPath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx); /** * Like coerceToPath, but the result must be a store path. */ - StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx); + StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx); + + /** + * Part of `coerceToDerivedPath()` without any store IO which is exposed for unit testing only. + */ + std::pair coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx); + + /** + * Coerce to `DerivedPath`. + * + * Must be a string which is either a literal store path or a + * "placeholder (see `DownstreamPlaceholder`). + * + * Even more importantly, the string context must be exactly one + * element, which is either a `NixStringContextElem::Opaque` or + * `NixStringContextElem::Built`. (`NixStringContextEleme::DrvDeep` + * is not permitted). + * + * The string is parsed based on the context --- the context is the + * source of truth, and ultimately tells us what we want, and then + * we ensure the string corresponds to it. + */ + DerivedPath coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx); public: @@ -525,7 +551,7 @@ private: char * text, size_t length, Pos::Origin origin, - Path basePath, + const SourcePath & basePath, std::shared_ptr & staticEnv); public: @@ -573,6 +599,37 @@ public: void mkThunk_(Value & v, Expr * expr); void mkPos(Value & v, PosIdx pos); + /** + * Create a string representing a store path. + * + * The string is the printed store path with a context containing a single + * `NixStringContextElem::Opaque` element of that store path. + */ + void mkStorePathString(const StorePath & storePath, Value & v); + + /** + * Create a string representing a `DerivedPath::Built`. + * + * The string is the printed store path with a context containing a single + * `NixStringContextElem::Built` element of the drv path and output name. + * + * @param value Value we are settings + * + * @param drvPath Path the drv whose output we are making a string for + * + * @param outputName Name of the output + * + * @param optOutputPath Optional output path for that string. Must + * be passed if and only if output store object is input-addressed. + * Will be printed to form string if passed, otherwise a placeholder + * will be used (see `DownstreamPlaceholder`). + */ + void mkOutputString( + Value & value, + const StorePath & drvPath, + const std::string outputName, + std::optional optOutputPath); + void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx); /** @@ -584,7 +641,7 @@ public: * Realise the given context, and return a mapping from the placeholders * used to construct the associated value to their final store path */ - [[nodiscard]] StringMap realiseContext(const PathSet & context); + [[nodiscard]] StringMap realiseContext(const NixStringContext & context); private: @@ -650,7 +707,7 @@ std::string showType(const Value & v); /** * If `path` refers to a directory, then append "/default.nix". */ -Path resolveExprPath(Path path); +SourcePath resolveExprPath(const SourcePath & path); struct InvalidPathError : EvalError { @@ -688,7 +745,13 @@ struct EvalSettings : Config )"}; Setting pureEval{this, false, "pure-eval", - "Whether to restrict file system and network access to files specified by cryptographic hash."}; + R"( + Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state: + + - Restrict file system and network access to files specified by cryptographic hash + - Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime) + )" + }; Setting enableImportFromDerivation{ this, true, "allow-import-from-derivation", diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index ac396236f..60bb6a71e 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -222,9 +222,9 @@ static Flake getFlake( throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir); Value vInfo; - state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack + state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack - expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1)); + expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1)); if (auto description = vInfo.attrs->get(state.sDescription)) { expectType(state, nString, *description->value, description->pos); @@ -265,7 +265,7 @@ static Flake getFlake( state.symbols[setting.name], std::string(state.forceStringNoCtx(*setting.value, setting.pos, ""))); else if (setting.value->type() == nPath) { - PathSet emptyContext = {}; + NixStringContext emptyContext = {}; flake.config.settings.emplace( state.symbols[setting.name], state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned()); @@ -745,7 +745,7 @@ void callFlake(EvalState & state, state.vCallFlake = allocRootValue(state.allocValue()); state.eval(state.parseExprFromString( #include "call-flake.nix.gen.hh" - , "/"), **state.vCallFlake); + , CanonPath::root), **state.vCallFlake); } state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 1602fbffb..506a63677 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -71,7 +71,7 @@ std::optional DrvInfo::queryDrvPath() const { if (!drvPath && attrs) { Bindings::iterator i = attrs->find(state->sDrvPath); - PathSet context; + NixStringContext context; if (i == attrs->end()) drvPath = {std::nullopt}; else @@ -93,7 +93,7 @@ StorePath DrvInfo::queryOutPath() const { if (!outPath && attrs) { Bindings::iterator i = attrs->find(state->sOutPath); - PathSet context; + NixStringContext context; if (i != attrs->end()) outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation"); } @@ -124,7 +124,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall /* And evaluate its ‘outPath’ attribute. */ Bindings::iterator outPath = out->value->attrs->find(state->sOutPath); if (outPath == out->value->attrs->end()) continue; // FIXME: throw error? - PathSet context; + NixStringContext context; outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation")); } else outputs.emplace(output, std::nullopt); diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 1557cbbeb..4566a1388 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -32,9 +32,9 @@ struct PosAdapter : AbstractPos // Get rid of the null terminators added by the parser. return std::string(s.source->c_str()); }, - [](const Path & path) -> std::optional { + [](const SourcePath & path) -> std::optional { try { - return readFile(path); + return path.readFile(); } catch (Error &) { return std::nullopt; } @@ -48,7 +48,7 @@ struct PosAdapter : AbstractPos [&](const Pos::none_tag &) { out << "«none»"; }, [&](const Pos::Stdin &) { out << "«stdin»"; }, [&](const Pos::String & s) { out << "«string»"; }, - [&](const Path & path) { out << path; } + [&](const SourcePath & path) { out << path; } }, origin); } }; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index c2f817c9a..5ca3d1fa6 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -34,7 +34,7 @@ struct Pos struct Stdin { ref source; }; struct String { ref source; }; - typedef std::variant Origin; + typedef std::variant Origin; Origin origin; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 97e615c37..4d981712a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -31,7 +31,7 @@ namespace nix { EvalState & state; SymbolTable & symbols; Expr * result; - Path basePath; + SourcePath basePath; PosTable::Origin origin; std::optional error; }; @@ -509,7 +509,7 @@ string_parts_interpolated path_start : PATH { - Path path(absPath({$1.p, $1.l}, data->basePath)); + Path path(absPath({$1.p, $1.l}, data->basePath.path.abs())); /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; @@ -651,7 +651,7 @@ Expr * EvalState::parse( char * text, size_t length, Pos::Origin origin, - Path basePath, + const SourcePath & basePath, std::shared_ptr & staticEnv) { yyscan_t scanner; @@ -675,48 +675,36 @@ Expr * EvalState::parse( } -Path resolveExprPath(Path path) +SourcePath resolveExprPath(const SourcePath & path) { - assert(path[0] == '/'); - - unsigned int followCount = 0, maxFollow = 1024; - /* If `path' is a symlink, follow it. This is so that relative path references work. */ - struct stat st; - while (true) { - // Basic cycle/depth limit to avoid infinite loops. - if (++followCount >= maxFollow) - throw Error("too many symbolic links encountered while traversing the path '%s'", path); - st = lstat(path); - if (!S_ISLNK(st.st_mode)) break; - path = absPath(readLink(path), dirOf(path)); - } + auto path2 = path.resolveSymlinks(); /* If `path' refers to a directory, append `/default.nix'. */ - if (S_ISDIR(st.st_mode)) - path = canonPath(path + "/default.nix"); + if (path2.lstat().type == InputAccessor::tDirectory) + return path2 + "default.nix"; - return path; + return path2; } -Expr * EvalState::parseExprFromFile(const Path & path) +Expr * EvalState::parseExprFromFile(const SourcePath & path) { return parseExprFromFile(path, staticBaseEnv); } -Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) { - auto buffer = readFile(path); - // readFile should have left some extra space for terminators + auto buffer = path.readFile(); + // readFile hopefully have left some extra space for terminators buffer.append("\0\0", 2); - return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv); + return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); } -Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) { auto s = make_ref(std::move(s_)); s->append("\0\0", 2); @@ -724,7 +712,7 @@ Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std } -Expr * EvalState::parseExprFromString(std::string s, const Path & basePath) +Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) { return parseExprFromString(std::move(s), basePath, staticBaseEnv); } @@ -737,7 +725,7 @@ Expr * EvalState::parseStdin() // drainFD should have left some extra space for terminators buffer.append("\0\0", 2); auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); } @@ -757,13 +745,13 @@ void EvalState::addToSearchPath(const std::string & s) } -Path EvalState::findFile(const std::string_view path) +SourcePath EvalState::findFile(const std::string_view path) { return findFile(searchPath, path); } -Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos) +SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos) { for (auto & i : searchPath) { std::string suffix; @@ -779,11 +767,11 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c auto r = resolveSearchPathElem(i); if (!r.first) continue; Path res = r.second + suffix; - if (pathExists(res)) return canonPath(res); + if (pathExists(res)) return CanonPath(canonPath(res)); } if (hasPrefix(path, "nix/")) - return concatStrings(corepkgsPrefix, path.substr(4)); + return CanonPath(concatStrings(corepkgsPrefix, path.substr(4))); debugThrow(ThrownError({ .msg = hintfmt(evalSettings.pureEval diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc new file mode 100644 index 000000000..1d690b722 --- /dev/null +++ b/src/libexpr/paths.cc @@ -0,0 +1,10 @@ +#include "eval.hh" + +namespace nix { + +SourcePath EvalState::rootPath(CanonPath path) +{ + return std::move(path); +} + +} diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 510f674eb..42efca4e7 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1,5 +1,6 @@ #include "archive.hh" #include "derivations.hh" +#include "downstream-placeholder.hh" #include "eval-inline.hh" #include "eval.hh" #include "globals.hh" @@ -38,17 +39,16 @@ namespace nix { InvalidPathError::InvalidPathError(const Path & path) : EvalError("path '%s' is not valid", path), path(path) {} -StringMap EvalState::realiseContext(const PathSet & context) +StringMap EvalState::realiseContext(const NixStringContext & context) { std::vector drvs; StringMap res; - for (auto & c_ : context) { + for (auto & c : context) { auto ensureValid = [&](const StorePath & p) { if (!store->isValidPath(p)) debugThrowLastTrace(InvalidPathError(store->printStorePath(p))); }; - auto c = NixStringContextElem::parse(*store, c_); std::visit(overloaded { [&](const NixStringContextElem::Built & b) { drvs.push_back(DerivedPath::Built { @@ -88,7 +88,7 @@ StringMap EvalState::realiseContext(const PathSet & context) auto outputs = resolveDerivedPath(*store, drv); for (auto & [outputName, outputPath] : outputs) { res.insert_or_assign( - downstreamPlaceholder(*store, drv.drvPath, outputName), + DownstreamPlaceholder::unknownCaOutput(drv.drvPath, outputName).render(), store->printStorePath(outputPath) ); } @@ -110,16 +110,16 @@ struct RealisePathFlags { bool checkForPureEval = true; }; -static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {}) +static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {}) { - PathSet context; + NixStringContext context; auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path"); try { StringMap rewrites = state.realiseContext(context); - auto realPath = state.toRealPath(rewriteStrings(path, rewrites), context); + auto realPath = state.rootPath(CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))); return flags.checkForPureEval ? state.checkSourcePath(realPath) @@ -130,35 +130,31 @@ static Path realisePath(EvalState & state, const PosIdx pos, Value & v, const Re } } -/* Add and attribute to the given attribute map from the output name to - the output path, or a placeholder. - - Where possible the path is used, but for floating CA derivations we - may not know it. For sake of determinism we always assume we don't - and instead put in a place holder. In either case, however, the - string context will contain the drv path and output name, so - downstream derivations will have the proper dependency, and in - addition, before building, the placeholder will be rewritten to be - the actual path. - - The 'drv' and 'drvPath' outputs must correspond. */ +/** + * Add and attribute to the given attribute map from the output name to + * the output path, or a placeholder. + * + * Where possible the path is used, but for floating CA derivations we + * may not know it. For sake of determinism we always assume we don't + * and instead put in a place holder. In either case, however, the + * string context will contain the drv path and output name, so + * downstream derivations will have the proper dependency, and in + * addition, before building, the placeholder will be rewritten to be + * the actual path. + * + * The 'drv' and 'drvPath' outputs must correspond. + */ static void mkOutputString( EvalState & state, BindingsBuilder & attrs, const StorePath & drvPath, - const BasicDerivation & drv, const std::pair & o) { - auto optOutputPath = o.second.path(*state.store, drv.name, o.first); - attrs.alloc(o.first).mkString( - optOutputPath - ? state.store->printStorePath(*optOutputPath) - /* Downstream we would substitute this for an actual path once - we build the floating CA derivation */ - /* FIXME: we need to depend on the basic derivation, not - derivation */ - : downstreamPlaceholder(*state.store, drvPath, o.first), - {"!" + o.first + "!" + state.store->printStorePath(drvPath)}); + state.mkOutputString( + attrs.alloc(o.first), + drvPath, + o.first, + o.second.path(*state.store, Derivation::nameFromPath(drvPath), o.first)); } /* Load and evaluate an expression from path specified by the @@ -166,28 +162,30 @@ static void mkOutputString( static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v) { auto path = realisePath(state, pos, vPath); + auto path2 = path.path.abs(); // FIXME auto isValidDerivationInStore = [&]() -> std::optional { - if (!state.store->isStorePath(path)) + if (!state.store->isStorePath(path2)) return std::nullopt; - auto storePath = state.store->parseStorePath(path); - if (!(state.store->isValidPath(storePath) && isDerivation(path))) + auto storePath = state.store->parseStorePath(path2); + if (!(state.store->isValidPath(storePath) && isDerivation(path2))) return std::nullopt; return storePath; }; - if (auto optStorePath = isValidDerivationInStore()) { - auto storePath = *optStorePath; - Derivation drv = state.store->readDerivation(storePath); + if (auto storePath = isValidDerivationInStore()) { + Derivation drv = state.store->readDerivation(*storePath); auto attrs = state.buildBindings(3 + drv.outputs.size()); - attrs.alloc(state.sDrvPath).mkString(path, {"=" + path}); + attrs.alloc(state.sDrvPath).mkString(path2, { + NixStringContextElem::DrvDeep { .drvPath = *storePath }, + }); attrs.alloc(state.sName).mkString(drv.env["name"]); auto & outputsVal = attrs.alloc(state.sOutputs); state.mkList(outputsVal, drv.outputs.size()); for (const auto & [i, o] : enumerate(drv.outputs)) { - mkOutputString(state, attrs, storePath, drv, o); + mkOutputString(state, attrs, *storePath, o); (outputsVal.listElems()[i] = state.allocValue())->mkString(o.first); } @@ -198,7 +196,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v state.vImportedDrvToDerivation = allocRootValue(state.allocValue()); state.eval(state.parseExprFromString( #include "imported-drv-to-derivation.nix.gen.hh" - , "/"), **state.vImportedDrvToDerivation); + , CanonPath::root), **state.vImportedDrvToDerivation); } state.forceFunction(**state.vImportedDrvToDerivation, pos, "while evaluating imported-drv-to-derivation.nix.gen.hh"); @@ -206,10 +204,10 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v state.forceAttrs(v, pos, "while calling imported-drv-to-derivation.nix.gen.hh"); } - else if (path == corepkgsPrefix + "fetchurl.nix") { + else if (path2 == corepkgsPrefix + "fetchurl.nix") { state.eval(state.parseExprFromString( #include "fetchurl.nix.gen.hh" - , "/"), v); + , CanonPath::root), v); } else { @@ -330,7 +328,7 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu std::string sym(state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.importNative")); - void *handle = dlopen(path.c_str(), RTLD_LAZY | RTLD_LOCAL); + void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror())); @@ -358,7 +356,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto count = args[0]->listSize(); if (count == 0) state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); - PathSet context; + NixStringContext context; auto program = state.coerceToString(pos, *elems[0], context, "while evaluating the first element of the argument passed to builtins.exec", false, false).toOwned(); @@ -378,7 +376,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto output = runProgram(program, true, commandArgs); Expr * parsed; try { - parsed = state.parseExprFromString(std::move(output), "/"); + parsed = state.parseExprFromString(std::move(output), state.rootPath(CanonPath::root)); } catch (Error & e) { e.addTrace(state.positions[pos], "while parsing the output from '%1%'", program); throw; @@ -588,7 +586,7 @@ struct CompareValues case nString: return strcmp(v1->string.s, v2->string.s) < 0; case nPath: - return strcmp(v1->path, v2->path) < 0; + return strcmp(v1->_path, v2->_path) < 0; case nList: // Lexicographic comparison for (size_t i = 0;; i++) { @@ -700,12 +698,14 @@ static RegisterPrimOp primop_genericClosure(RegisterPrimOp::Info { .arity = 1, .doc = R"( Take an *attrset* with values named `startSet` and `operator` in order to - return a *list of attrsets* by starting with the `startSet`, recursively - applying the `operator` function to each element. The *attrsets* in the - `startSet` and produced by the `operator` must each contain value named - `key` which are comparable to each other. The result is produced by - repeatedly calling the operator for each element encountered with a - unique key, terminating when no new elements are produced. For example, + return a *list of attrsets* by starting with the `startSet` and recursively + applying the `operator` function to each `item`. The *attrsets* in the + `startSet` and the *attrsets* produced by `operator` must contain a value + named `key` which is comparable. The result is produced by calling `operator` + for each `item` with a value for `key` that has not been called yet including + newly produced `item`s. The function terminates when no new `item`s are + produced. The resulting *list of attrsets* contains only *attrsets* with a + unique key. For example, ``` builtins.genericClosure { @@ -768,7 +768,7 @@ static RegisterPrimOp primop_abort({ )", .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.abort").toOwned(); state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s)); @@ -787,7 +787,7 @@ static RegisterPrimOp primop_throw({ )", .fun = [](EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtin.throw").toOwned(); state.debugThrowLastTrace(ThrownError(s)); @@ -800,7 +800,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * state.forceValue(*args[1], pos); v = *args[1]; } catch (Error & e) { - PathSet context; + NixStringContext context; auto message = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.addErrorContext", false, false).toOwned(); @@ -1086,13 +1086,13 @@ drvName, Bindings * attrs, Value & v) Derivation drv; drv.name = drvName; - PathSet context; + NixStringContext context; bool contentAddressed = false; bool isImpure = false; std::optional outputHash; std::string outputHashAlgo; - std::optional ingestionMethod; + std::optional ingestionMethod; StringSet outputs; outputs.insert("out"); @@ -1105,7 +1105,10 @@ drvName, Bindings * attrs, Value & v) auto handleHashMode = [&](const std::string_view s) { if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive; else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat; - else + else if (s == "text") { + experimentalFeatureSettings.require(Xp::DynamicDerivations); + ingestionMethod = TextIngestionMethod {}; + } else state.debugThrowLastTrace(EvalError({ .msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), .errPos = state.positions[noPos] @@ -1149,16 +1152,14 @@ drvName, Bindings * attrs, Value & v) if (i->value->type() == nNull) continue; } - if (i->name == state.sContentAddressed) { - contentAddressed = state.forceBool(*i->value, noPos, context_below); - if (contentAddressed) - experimentalFeatureSettings.require(Xp::CaDerivations); + if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) { + contentAddressed = true; + experimentalFeatureSettings.require(Xp::CaDerivations); } - else if (i->name == state.sImpure) { - isImpure = state.forceBool(*i->value, noPos, context_below); - if (isImpure) - experimentalFeatureSettings.require(Xp::ImpureDerivations); + else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) { + isImpure = true; + experimentalFeatureSettings.require(Xp::ImpureDerivations); } /* The `args' attribute is special: it supplies the @@ -1232,8 +1233,7 @@ drvName, Bindings * attrs, Value & v) /* Everything in the context of the strings in the derivation attributes should be added as dependencies of the resulting derivation. */ - for (auto & c_ : context) { - auto c = NixStringContextElem::parse(*state.store, c_); + for (auto & c : context) { std::visit(overloaded { /* Since this allows the builder to gain access to every path in the dependency graph of the derivation (including @@ -1273,11 +1273,16 @@ drvName, Bindings * attrs, Value & v) })); /* Check whether the derivation name is valid. */ - if (isDerivation(drvName)) + if (isDerivation(drvName) && + !(ingestionMethod == ContentAddressMethod { TextIngestionMethod { } } && + outputs.size() == 1 && + *(outputs.begin()) == "out")) + { state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension), + .msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension), .errPos = state.positions[noPos] })); + } if (outputHash) { /* Handle fixed-output derivations. @@ -1293,21 +1298,15 @@ drvName, Bindings * attrs, Value & v) auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo)); auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); - auto outPath = state.store->makeFixedOutputPath(drvName, FixedOutputInfo { - .hash = { - .method = method, - .hash = h, - }, - .references = {}, - }); - drv.env["out"] = state.store->printStorePath(outPath); - drv.outputs.insert_or_assign("out", - DerivationOutput::CAFixed { - .hash = FixedOutputHash { - .method = method, - .hash = std::move(h), - }, - }); + + DerivationOutput::CAFixed dof { + .ca = ContentAddress::fromParts( + std::move(method), + std::move(h)), + }; + + drv.env["out"] = state.store->printStorePath(dof.path(*state.store, drvName, "out")); + drv.outputs.insert_or_assign("out", std::move(dof)); } else if (contentAddressed || isImpure) { @@ -1325,13 +1324,13 @@ drvName, Bindings * attrs, Value & v) if (isImpure) drv.outputs.insert_or_assign(i, DerivationOutput::Impure { - .method = method, + .method = method.raw, .hashType = ht, }); else drv.outputs.insert_or_assign(i, DerivationOutput::CAFloating { - .method = method, + .method = method.raw, .hashType = ht, }); } @@ -1392,9 +1391,11 @@ drvName, Bindings * attrs, Value & v) } auto result = state.buildBindings(1 + drv.outputs.size()); - result.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS}); + result.alloc(state.sDrvPath).mkString(drvPathS, { + NixStringContextElem::DrvDeep { .drvPath = drvPath }, + }); for (auto & i : drv.outputs) - mkOutputString(state, result, drvPath, drv, i); + mkOutputString(state, result, drvPath, i); v.mkAttrs(result); } @@ -1437,9 +1438,9 @@ static RegisterPrimOp primop_placeholder({ /* Convert the argument to a path. !!! obsolete? */ static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; - Path path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); - v.mkString(canonPath(path), context); + NixStringContext context; + auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); + v.mkString(path.path.abs(), context); } static RegisterPrimOp primop_toPath({ @@ -1468,22 +1469,23 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, .errPos = state.positions[pos] })); - PathSet context; - Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")); + NixStringContext context; + auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.storePath")).path; /* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink directly in the store. The latter condition is necessary so e.g. nix-push does the right thing. */ - if (!state.store->isStorePath(path)) path = canonPath(path, true); - if (!state.store->isInStore(path)) + if (!state.store->isStorePath(path.abs())) + path = CanonPath(canonPath(path.abs(), true)); + if (!state.store->isInStore(path.abs())) state.debugThrowLastTrace(EvalError({ .msg = hintfmt("path '%1%' is not in the Nix store", path), .errPos = state.positions[pos] })); - auto path2 = state.store->toStorePath(path).first; + auto path2 = state.store->toStorePath(path.abs()).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); - context.insert(state.store->printStorePath(path2)); - v.mkString(path, context); + context.insert(NixStringContextElem::Opaque { .path = path2 }); + v.mkString(path.abs(), context); } static RegisterPrimOp primop_storePath({ @@ -1499,7 +1501,7 @@ static RegisterPrimOp primop_storePath({ causes the path to be *copied* again to the Nix store, resulting in a new path (e.g. `/nix/store/ld01dnzc…-source-source`). - This function is not available in pure evaluation mode. + Not available in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval). )", .fun = prim_storePath, }); @@ -1514,7 +1516,7 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, auto path = realisePath(state, pos, *args[0], { .checkForPureEval = false }); try { - v.mkBool(pathExists(state.checkSourcePath(path))); + v.mkBool(state.checkSourcePath(path).pathExists()); } catch (SysError & e) { /* Don't give away info from errors while canonicalising ‘path’ in restricted mode. */ @@ -1538,7 +1540,7 @@ static RegisterPrimOp primop_pathExists({ following the last slash. */ static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context, "while evaluating the first argument passed to builtins.baseNameOf", false, false)), context); @@ -1560,12 +1562,18 @@ static RegisterPrimOp primop_baseNameOf({ of the argument. */ static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; - auto path = state.coerceToString(pos, *args[0], context, - "while evaluating the first argument passed to builtins.dirOf", + state.forceValue(*args[0], pos); + if (args[0]->type() == nPath) { + auto path = args[0]->path(); + v.mkPath(path.path.isRoot() ? path : path.parent()); + } else { + NixStringContext context; + auto path = state.coerceToString(pos, *args[0], context, + "while evaluating the first argument passed to 'builtins.dirOf'", false, false); - auto dir = dirOf(*path); - if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context); + auto dir = dirOf(*path); + v.mkString(dir, context); + } } static RegisterPrimOp primop_dirOf({ @@ -1583,13 +1591,13 @@ static RegisterPrimOp primop_dirOf({ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto path = realisePath(state, pos, *args[0]); - auto s = readFile(path); + auto s = path.readFile(); if (s.find((char) 0) != std::string::npos) state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path)); StorePathSet refs; - if (state.store->isInStore(path)) { + if (state.store->isInStore(path.path.abs())) { try { - refs = state.store->queryPathInfo(state.store->toStorePath(path).first)->references; + refs = state.store->queryPathInfo(state.store->toStorePath(path.path.abs()).first)->references; } catch (Error &) { // FIXME: should be InvalidPathError } // Re-scan references to filter down to just the ones that actually occur in the file. @@ -1597,7 +1605,12 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V refsSink << s; refs = refsSink.getResultPaths(); } - auto context = state.store->printStorePathSet(refs); + NixStringContext context; + for (auto && p : std::move(refs)) { + context.insert(NixStringContextElem::Opaque { + .path = std::move((StorePath &&)p), + }); + } v.mkString(s, context); } @@ -1628,7 +1641,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V i = getAttr(state, state.sPath, v2->attrs, "in an element of the __nixPath"); - PathSet context; + NixStringContext context; auto path = state.coerceToString(pos, *i->value, context, "while evaluating the `path` attribute of an element of the list passed to builtins.findFile", false, false).toOwned(); @@ -1670,7 +1683,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = realisePath(state, pos, *args[1]); - v.mkString(hashFile(*ht, path).to_string(Base16, false)); + v.mkString(hashString(*ht, path.readFile()).to_string(Base16, false)); } static RegisterPrimOp primop_hashFile({ @@ -1684,26 +1697,20 @@ static RegisterPrimOp primop_hashFile({ .fun = prim_hashFile, }); - -/* Stringize a directory entry enum. Used by `readFileType' and `readDir'. */ -static const char * dirEntTypeToString(unsigned char dtType) +static std::string_view fileTypeToString(InputAccessor::Type type) { - /* Enum DT_(DIR|LNK|REG|UNKNOWN) */ - switch(dtType) { - case DT_REG: return "regular"; break; - case DT_DIR: return "directory"; break; - case DT_LNK: return "symlink"; break; - default: return "unknown"; break; - } - return "unknown"; /* Unreachable */ + return + type == InputAccessor::Type::tRegular ? "regular" : + type == InputAccessor::Type::tDirectory ? "directory" : + type == InputAccessor::Type::tSymlink ? "symlink" : + "unknown"; } - static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v) { auto path = realisePath(state, pos, *args[0]); /* Retrieve the directory entry type and stringize it. */ - v.mkString(dirEntTypeToString(getFileType(path))); + v.mkString(fileTypeToString(path.lstat().type)); } static RegisterPrimOp primop_readFileType({ @@ -1724,8 +1731,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va // Retrieve directory entries for all nodes in a directory. // This is similar to `getFileType` but is optimized to reduce system calls // on many systems. - DirEntries entries = readDirectory(path); - + auto entries = path.readDirectory(); auto attrs = state.buildBindings(entries.size()); // If we hit unknown directory entry types we may need to fallback to @@ -1734,22 +1740,21 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va // `builtins.readFileType` application. Value * readFileType = nullptr; - for (auto & ent : entries) { - auto & attr = attrs.alloc(ent.name); - if (ent.type == DT_UNKNOWN) { + for (auto & [name, type] : entries) { + auto & attr = attrs.alloc(name); + if (!type) { // Some filesystems or operating systems may not be able to return // detailed node info quickly in this case we produce a thunk to // query the file type lazily. auto epath = state.allocValue(); - Path path2 = path + "/" + ent.name; - epath->mkString(path2); + epath->mkPath(path + name); if (!readFileType) readFileType = &state.getBuiltin("readFileType"); attr.mkApp(readFileType, epath); } else { // This branch of the conditional is much more likely. // Here we just stringize the directory entry type. - attr.mkString(dirEntTypeToString(ent.type)); + attr.mkString(fileTypeToString(*type)); } } @@ -1787,7 +1792,7 @@ static RegisterPrimOp primop_readDir({ static void prim_toXML(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::ostringstream out; - PathSet context; + NixStringContext context; printValueAsXML(state, true, false, *args[0], out, context, pos); v.mkString(out.str(), context); } @@ -1895,7 +1900,7 @@ static RegisterPrimOp primop_toXML({ static void prim_toJSON(EvalState & state, const PosIdx pos, Value * * args, Value & v) { std::ostringstream out; - PathSet context; + NixStringContext context; printValueAsJSON(state, true, *args[0], pos, out, context); v.mkString(out.str(), context); } @@ -1945,22 +1950,23 @@ static RegisterPrimOp primop_fromJSON({ as an input by derivations. */ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; std::string name(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.toFile")); std::string contents(state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.toFile")); StorePathSet refs; - for (auto path : context) { - if (path.at(0) != '/') + for (auto c : context) { + if (auto p = std::get_if(&c)) + refs.insert(p->path); + else state.debugThrowLastTrace(EvalError({ .msg = hintfmt( "in 'toFile': the file named '%1%' must not contain a reference " "to a derivation but contains (%2%)", - name, path), + name, c.to_string()), .errPos = state.positions[pos] })); - refs.insert(state.store->parseStorePath(path)); } auto storePath = settings.readOnlyMode @@ -2055,13 +2061,13 @@ static RegisterPrimOp primop_toFile({ static void addPath( EvalState & state, const PosIdx pos, - const std::string & name, + std::string_view name, Path path, Value * filterFun, FileIngestionMethod method, const std::optional expectedHash, Value & v, - const PathSet & context) + const NixStringContext & context) { try { // FIXME: handle CA derivation outputs (where path needs to @@ -2083,7 +2089,7 @@ static void addPath( path = evalSettings.pureEval && expectedHash ? path - : state.checkSourcePath(path); + : state.checkSourcePath(CanonPath(path)).path.abs(); PathFilter filter = filterFun ? ([&](const Path & path) { auto st = lstat(path); @@ -2135,10 +2141,11 @@ static void addPath( static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; - Path path = state.coerceToPath(pos, *args[1], context, "while evaluating the second argument (the path to filter) passed to builtins.filterSource"); + NixStringContext context; + auto path = state.coerceToPath(pos, *args[1], context, + "while evaluating the second argument (the path to filter) passed to builtins.filterSource"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); - addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); + addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } static RegisterPrimOp primop_filterSource({ @@ -2198,18 +2205,19 @@ static RegisterPrimOp primop_filterSource({ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.path"); - Path path; + std::optional path; std::string name; Value * filterFun = nullptr; auto method = FileIngestionMethod::Recursive; std::optional expectedHash; - PathSet context; + NixStringContext context; + + state.forceAttrs(*args[0], pos, "while evaluating the argument passed to 'builtins.path'"); for (auto & attr : *args[0]->attrs) { auto n = state.symbols[attr.name]; if (n == "path") - path = state.coerceToPath(attr.pos, *attr.value, context, "while evaluating the `path` attribute passed to builtins.path"); + path.emplace(state.coerceToPath(attr.pos, *attr.value, context, "while evaluating the 'path' attribute passed to 'builtins.path'")); else if (attr.name == state.sName) name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.path"); else if (n == "filter") @@ -2224,15 +2232,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value .errPos = state.positions[attr.pos] })); } - if (path.empty()) + if (!path) state.debugThrowLastTrace(EvalError({ .msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"), .errPos = state.positions[pos] })); if (name.empty()) - name = baseNameOf(path); + name = path->baseName(); - addPath(state, pos, name, path, filterFun, method, expectedHash, v, context); + addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context); } static RegisterPrimOp primop_path({ @@ -3538,7 +3546,7 @@ static RegisterPrimOp primop_lessThan({ `"/nix/store/whatever..."'. */ static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the first argument passed to builtins.toString", true, false); @@ -3577,7 +3585,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, { int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); if (start < 0) @@ -3611,7 +3619,7 @@ static RegisterPrimOp primop_substring({ static void prim_stringLength(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.stringLength"); v.mkInt(s->size()); } @@ -3637,7 +3645,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, .errPos = state.positions[pos] })); - PathSet context; // discarded + NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); v.mkString(hashString(*ht, s).to_string(Base16, false)); @@ -3683,7 +3691,7 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto regex = state.regexCache->get(re); - PathSet context; + NixStringContext context; const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match"); std::cmatch match; @@ -3763,7 +3771,7 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto regex = state.regexCache->get(re); - PathSet context; + NixStringContext context; const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split"); auto begin = std::cregex_iterator(str.begin(), str.end(), regex); @@ -3860,7 +3868,7 @@ static RegisterPrimOp primop_split({ static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto sep = state.forceString(*args[0], context, pos, "while evaluating the first argument (the separator string) passed to builtins.concatStringsSep"); state.forceList(*args[1], pos, "while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep"); @@ -3900,15 +3908,10 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a for (auto elem : args[0]->listItems()) from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings")); - std::vector> to; - to.reserve(args[1]->listSize()); - for (auto elem : args[1]->listItems()) { - PathSet ctx; - auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings"); - to.emplace_back(s, std::move(ctx)); - } + std::unordered_map cache; + auto to = args[1]->listItems(); - PathSet context; + NixStringContext context; auto s = state.forceString(*args[2], context, pos, "while evaluating the third argument passed to builtins.replaceStrings"); std::string res; @@ -3917,10 +3920,19 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a bool found = false; auto i = from.begin(); auto j = to.begin(); - for (; i != from.end(); ++i, ++j) + size_t j_index = 0; + for (; i != from.end(); ++i, ++j, ++j_index) if (s.compare(p, i->size(), *i) == 0) { found = true; - res += j->first; + auto v = cache.find(j_index); + if (v == cache.end()) { + NixStringContext ctx; + auto ts = state.forceString(**j, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings"); + v = (cache.emplace(j_index, ts)).first; + for (auto& path : ctx) + context.insert(path); + } + res += v->second; if (i->empty()) { if (p < s.size()) res += s[p]; @@ -3928,9 +3940,6 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a } else { p += i->size(); } - for (auto& path : j->second) - context.insert(path); - j->second.clear(); break; } if (!found) { @@ -3948,7 +3957,11 @@ static RegisterPrimOp primop_replaceStrings({ .args = {"from", "to", "s"}, .doc = R"( Given string *s*, replace every occurrence of the strings in *from* - with the corresponding string in *to*. For example, + with the corresponding string in *to*. + + The argument *to* is lazy, that is, it is only evaluated when its corresponding pattern in *from* is matched in the string *s* + + Example: ```nix builtins.replaceStrings ["oo" "a"] ["a" "i"] "foobar" @@ -4150,7 +4163,6 @@ void EvalState::createBaseEnv() /* Add a wrapper around the derivation primop that computes the `drvPath' and `outPath' attributes lazily. */ - sDerivationNix = symbols.create(derivationNixPath); auto vDerivation = allocValue(); addConstant("derivation", vDerivation); @@ -4167,7 +4179,7 @@ void EvalState::createBaseEnv() // the parser needs two NUL bytes as terminators; one of them // is implied by being a C string. "\0"; - eval(parse(code, sizeof(code), derivationNixPath, "/", staticBaseEnv), *vDerivation); + eval(parse(code, sizeof(code), derivationInternal, {CanonPath::root}, staticBaseEnv), *vDerivation); } diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index db43e5771..07bf400cf 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -7,7 +7,7 @@ namespace nix { static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext"); v.mkString(*s); } @@ -17,7 +17,7 @@ static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringCo static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext"); v.mkBool(!context.empty()); } @@ -33,17 +33,18 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext); drv.inputDrvs. */ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency"); - PathSet context2; - for (auto && p : context) { - auto c = NixStringContextElem::parse(*state.store, p); + NixStringContext context2; + for (auto && c : context) { if (auto * ptr = std::get_if(&c)) { - context2.emplace(state.store->printStorePath(ptr->drvPath)); + context2.emplace(NixStringContextElem::Opaque { + .path = ptr->drvPath + }); } else { /* Can reuse original item */ - context2.emplace(std::move(p)); + context2.emplace(std::move(c)); } } @@ -79,22 +80,21 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, bool allOutputs = false; Strings outputs; }; - PathSet context; + NixStringContext context; state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext"); auto contextInfos = std::map(); - for (const auto & p : context) { - NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p); + for (auto && i : context) { std::visit(overloaded { - [&](NixStringContextElem::DrvDeep & d) { - contextInfos[d.drvPath].allOutputs = true; + [&](NixStringContextElem::DrvDeep && d) { + contextInfos[std::move(d.drvPath)].allOutputs = true; }, - [&](NixStringContextElem::Built & b) { - contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output)); + [&](NixStringContextElem::Built && b) { + contextInfos[std::move(b.drvPath)].outputs.emplace_back(std::move(b.output)); }, - [&](NixStringContextElem::Opaque & o) { - contextInfos[o.path].path = true; + [&](NixStringContextElem::Opaque && o) { + contextInfos[std::move(o.path)].path = true; }, - }, ctx.raw()); + }, ((NixStringContextElem &&) i).raw()); } auto attrs = state.buildBindings(contextInfos.size()); @@ -129,7 +129,7 @@ static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext); */ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - PathSet context; + NixStringContext context; auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext"); @@ -143,13 +143,16 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar .msg = hintfmt("context key '%s' is not a store path", name), .errPos = state.positions[i.pos] }); + auto namePath = state.store->parseStorePath(name); if (!settings.readOnlyMode) - state.store->ensurePath(state.store->parseStorePath(name)); + state.store->ensurePath(namePath); state.forceAttrs(*i.value, i.pos, "while evaluating the value of a string context"); auto iter = i.value->attrs->find(sPath); if (iter != i.value->attrs->end()) { if (state.forceBool(*iter->value, iter->pos, "while evaluating the `path` attribute of a string context")) - context.emplace(name); + context.emplace(NixStringContextElem::Opaque { + .path = namePath, + }); } iter = i.value->attrs->find(sAllOutputs); @@ -161,7 +164,9 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar .errPos = state.positions[i.pos] }); } - context.insert(concatStrings("=", name)); + context.emplace(NixStringContextElem::DrvDeep { + .drvPath = namePath, + }); } } @@ -176,7 +181,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar } for (auto elem : iter->value->listItems()) { auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context"); - context.insert(concatStrings("!", outputName, "!", name)); + context.emplace(NixStringContextElem::Built { + .drvPath = namePath, + .output = std::string { outputName }, + }); } } } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 0dfa97fa3..4cf1f1e0b 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -18,7 +18,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg const auto & attrName = state.symbols[attr.name]; if (attrName == "fromPath") { - PathSet context; + NixStringContext context; fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, "while evaluating the 'fromPath' attribute passed to builtins.fetchClosure"); } @@ -27,7 +27,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg state.forceValue(*attr.value, attr.pos); toCA = true; if (attr.value->type() != nString || attr.value->string.s != std::string("")) { - PathSet context; + NixStringContext context; toPath = state.coerceToStorePath(attr.pos, *attr.value, context, "while evaluating the 'toPath' attribute passed to builtins.fetchClosure"); } @@ -114,8 +114,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg }); } - auto toPathS = state.store->printStorePath(*toPath); - v.mkString(toPathS, {toPathS}); + state.mkStorePathString(*toPath, v); } static RegisterPrimOp primop_fetchClosure({ diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index c41bd60b6..2c0d98e74 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -13,7 +13,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a std::optional rev; std::optional ref; std::string_view name = "source"; - PathSet context; + NixStringContext context; state.forceValue(*args[0], pos); @@ -73,8 +73,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a auto [tree, input2] = input.fetch(state.store); auto attrs2 = state.buildBindings(8); - auto storePath = state.store->printStorePath(tree.storePath); - attrs2.alloc(state.sOutPath).mkString(storePath, {storePath}); + state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath)); if (input2.getRef()) attrs2.alloc("branch").mkString(*input2.getRef()); // Backward compatibility: set 'rev' to diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 2e150c9d0..fe880aaa8 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -24,9 +24,8 @@ void emitTreeAttrs( auto attrs = state.buildBindings(8); - auto storePath = state.store->printStorePath(tree.storePath); - attrs.alloc(state.sOutPath).mkString(storePath, {storePath}); + state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath)); // FIXME: support arbitrary input attributes. @@ -107,7 +106,7 @@ static void fetchTree( const FetchTreeParams & params = FetchTreeParams{} ) { fetchers::Input input; - PathSet context; + NixStringContext context; state.forceValue(*args[0], pos); @@ -287,9 +286,9 @@ static RegisterPrimOp primop_fetchurl({ .name = "__fetchurl", .args = {"url"}, .doc = R"( - Download the specified URL and return the path of the downloaded - file. This function is not available if [restricted evaluation - mode](../command-ref/conf-file.md) is enabled. + Download the specified URL and return the path of the downloaded file. + + Not available in [restricted evaluation mode](@docroot@/command-ref/conf-file.md#conf-restrict-eval). )", .fun = prim_fetchurl, }); @@ -339,8 +338,7 @@ static RegisterPrimOp primop_fetchTarball({ stdenv.mkDerivation { … } ``` - This function is not available if [restricted evaluation - mode](../command-ref/conf-file.md) is enabled. + Not available in [restricted evaluation mode](@docroot@/command-ref/conf-file.md#conf-restrict-eval). )", .fun = prim_fetchTarball, }); @@ -471,14 +469,9 @@ static RegisterPrimOp primop_fetchGit({ } ``` - > **Note** - > - > Nix will refetch the branch in accordance with - > the option `tarball-ttl`. + Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting. - > **Note** - > - > This behavior is disabled in *Pure evaluation mode*. + This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval). - To fetch the content of a checked-out work directory: diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 8a5231781..e2a8b3c3a 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -3,6 +3,8 @@ #include "../../toml11/toml.hpp" +#include + namespace nix { static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, Value & val) @@ -58,8 +60,18 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V case toml::value_t::offset_datetime: case toml::value_t::local_date: case toml::value_t::local_time: - // We fail since Nix doesn't have date and time types - throw std::runtime_error("Dates and times are not supported"); + { + if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) { + auto attrs = state.buildBindings(2); + attrs.alloc("_type").mkString("timestamp"); + std::ostringstream s; + s << t; + attrs.alloc("value").mkString(s.str()); + v.mkAttrs(attrs); + } else { + throw std::runtime_error("Dates and times are not supported"); + } + } break;; case toml::value_t::empty: v.mkNull(); diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index d08672cfc..53ba70bdd 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -1,4 +1,5 @@ #include "print.hh" +#include namespace nix { @@ -25,11 +26,26 @@ printLiteralBool(std::ostream & str, bool boolean) return str; } +// Returns `true' is a string is a reserved keyword which requires quotation +// when printing attribute set field names. +// +// This list should generally be kept in sync with `./lexer.l'. +// You can test if a keyword needs to be added by running: +// $ nix eval --expr '{ = 1; }' +// For example `or' doesn't need to be quoted. +bool isReservedKeyword(const std::string_view str) +{ + static const std::unordered_set reservedKeywords = { + "if", "then", "else", "assert", "with", "let", "in", "rec", "inherit" + }; + return reservedKeywords.contains(str); +} + std::ostream & printIdentifier(std::ostream & str, std::string_view s) { if (s.empty()) str << "\"\""; - else if (s == "if") // FIXME: handle other keywords + else if (isReservedKeyword(s)) str << '"' << s << '"'; else { char c = s[0]; @@ -50,10 +66,10 @@ printIdentifier(std::ostream & str, std::string_view s) { return str; } -// FIXME: keywords static bool isVarName(std::string_view s) { if (s.size() == 0) return false; + if (isReservedKeyword(s)) return false; char c = s[0]; if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false; for (auto & i : s) diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index f9cfc3964..3b72ae201 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -35,6 +35,12 @@ namespace nix { */ std::ostream & printAttributeName(std::ostream & o, std::string_view s); + /** + * Returns `true' is a string is a reserved keyword which requires quotation + * when printing attribute set field names. + */ + bool isReservedKeyword(const std::string_view str); + /** * Print a string as an identifier in the Nix expression language syntax. * diff --git a/src/libexpr/tests/derived-path.cc b/src/libexpr/tests/derived-path.cc new file mode 100644 index 000000000..8210efef2 --- /dev/null +++ b/src/libexpr/tests/derived-path.cc @@ -0,0 +1,65 @@ +#include +#include +#include + +#include "tests/derived-path.hh" +#include "tests/libexpr.hh" + +namespace nix { + +// Testing of trivial expressions +class DerivedPathExpressionTest : public LibExprTest {}; + +// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is +// no a real fixture. +// +// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args +TEST_F(DerivedPathExpressionTest, force_init) +{ +} + +RC_GTEST_FIXTURE_PROP( + DerivedPathExpressionTest, + prop_opaque_path_round_trip, + (const DerivedPath::Opaque & o)) +{ + auto * v = state.allocValue(); + state.mkStorePathString(o.path, *v); + auto d = state.coerceToDerivedPath(noPos, *v, ""); + RC_ASSERT(DerivedPath { o } == d); +} + +// TODO use DerivedPath::Built for parameter once it supports a single output +// path only. + +RC_GTEST_FIXTURE_PROP( + DerivedPathExpressionTest, + prop_built_path_placeholder_round_trip, + (const StorePath & drvPath, const StorePathName & outputName)) +{ + auto * v = state.allocValue(); + state.mkOutputString(*v, drvPath, outputName.name, std::nullopt); + auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, ""); + DerivedPath::Built b { + .drvPath = drvPath, + .outputs = OutputsSpec::Names { outputName.name }, + }; + RC_ASSERT(DerivedPath { b } == d); +} + +RC_GTEST_FIXTURE_PROP( + DerivedPathExpressionTest, + prop_built_path_out_path_round_trip, + (const StorePath & drvPath, const StorePathName & outputName, const StorePath & outPath)) +{ + auto * v = state.allocValue(); + state.mkOutputString(*v, drvPath, outputName.name, outPath); + auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, ""); + DerivedPath::Built b { + .drvPath = drvPath, + .outputs = OutputsSpec::Names { outputName.name }, + }; + RC_ASSERT(DerivedPath { b } == d); +} + +} /* namespace nix */ diff --git a/src/libexpr/tests/error_traces.cc b/src/libexpr/tests/error_traces.cc index 24e95ac39..285651256 100644 --- a/src/libexpr/tests/error_traces.cc +++ b/src/libexpr/tests/error_traces.cc @@ -171,7 +171,7 @@ namespace nix { hintfmt("value is %s while a string was expected", "an integer"), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); - ASSERT_TRACE2("replaceStrings [ \"old\" ] [ true ] {}", + ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, hintfmt("value is %s while a string was expected", "a Boolean"), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); diff --git a/src/libexpr/tests/json.cc b/src/libexpr/tests/json.cc index 411bc0ac3..7586bdd9b 100644 --- a/src/libexpr/tests/json.cc +++ b/src/libexpr/tests/json.cc @@ -8,7 +8,7 @@ namespace nix { protected: std::string getJSONValue(Value& value) { std::stringstream ss; - PathSet ps; + NixStringContext ps; printValueAsJSON(state, true, value, noPos, ss, ps); return ss.str(); } diff --git a/src/libexpr/tests/libexpr.hh b/src/libexpr/tests/libexpr.hh index 69c932f05..b8e65aafe 100644 --- a/src/libexpr/tests/libexpr.hh +++ b/src/libexpr/tests/libexpr.hh @@ -28,7 +28,7 @@ namespace nix { } Value eval(std::string input, bool forceValue = true) { Value v; - Expr * e = state.parseExprFromString(input, ""); + Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root)); assert(e); state.eval(e, v); if (forceValue) diff --git a/src/libexpr/tests/local.mk b/src/libexpr/tests/local.mk index 3e5504f71..331a5ead6 100644 --- a/src/libexpr/tests/local.mk +++ b/src/libexpr/tests/local.mk @@ -12,7 +12,7 @@ libexpr-tests_SOURCES := \ $(wildcard $(d)/*.cc) \ $(wildcard $(d)/value/*.cc) -libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests +libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers diff --git a/src/libexpr/tests/value/context.cc b/src/libexpr/tests/value/context.cc index 083359b7a..0d9381577 100644 --- a/src/libexpr/tests/value/context.cc +++ b/src/libexpr/tests/value/context.cc @@ -8,69 +8,62 @@ namespace nix { -// Testing of trivial expressions -struct NixStringContextElemTest : public LibExprTest { - const Store & store() const { - return *LibExprTest::store; - } -}; - -TEST_F(NixStringContextElemTest, empty_invalid) { +TEST(NixStringContextElemTest, empty_invalid) { EXPECT_THROW( - NixStringContextElem::parse(store(), ""), + NixStringContextElem::parse(""), BadNixStringContextElem); } -TEST_F(NixStringContextElemTest, single_bang_invalid) { +TEST(NixStringContextElemTest, single_bang_invalid) { EXPECT_THROW( - NixStringContextElem::parse(store(), "!"), + NixStringContextElem::parse("!"), BadNixStringContextElem); } -TEST_F(NixStringContextElemTest, double_bang_invalid) { +TEST(NixStringContextElemTest, double_bang_invalid) { EXPECT_THROW( - NixStringContextElem::parse(store(), "!!/"), + NixStringContextElem::parse("!!/"), BadStorePath); } -TEST_F(NixStringContextElemTest, eq_slash_invalid) { +TEST(NixStringContextElemTest, eq_slash_invalid) { EXPECT_THROW( - NixStringContextElem::parse(store(), "=/"), + NixStringContextElem::parse("=/"), BadStorePath); } -TEST_F(NixStringContextElemTest, slash_invalid) { +TEST(NixStringContextElemTest, slash_invalid) { EXPECT_THROW( - NixStringContextElem::parse(store(), "/"), + NixStringContextElem::parse("/"), BadStorePath); } -TEST_F(NixStringContextElemTest, opaque) { - std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; - auto elem = NixStringContextElem::parse(store(), opaque); +TEST(NixStringContextElemTest, opaque) { + std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x"; + auto elem = NixStringContextElem::parse(opaque); auto * p = std::get_if(&elem); ASSERT_TRUE(p); - ASSERT_EQ(p->path, store().parseStorePath(opaque)); - ASSERT_EQ(elem.to_string(store()), opaque); + ASSERT_EQ(p->path, StorePath { opaque }); + ASSERT_EQ(elem.to_string(), opaque); } -TEST_F(NixStringContextElemTest, drvDeep) { - std::string_view drvDeep = "=/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; - auto elem = NixStringContextElem::parse(store(), drvDeep); +TEST(NixStringContextElemTest, drvDeep) { + std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; + auto elem = NixStringContextElem::parse(drvDeep); auto * p = std::get_if(&elem); ASSERT_TRUE(p); - ASSERT_EQ(p->drvPath, store().parseStorePath(drvDeep.substr(1))); - ASSERT_EQ(elem.to_string(store()), drvDeep); + ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) }); + ASSERT_EQ(elem.to_string(), drvDeep); } -TEST_F(NixStringContextElemTest, built) { - std::string_view built = "!foo!/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; - auto elem = NixStringContextElem::parse(store(), built); +TEST(NixStringContextElemTest, built) { + std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"; + auto elem = NixStringContextElem::parse(built); auto * p = std::get_if(&elem); ASSERT_TRUE(p); ASSERT_EQ(p->output, "foo"); - ASSERT_EQ(p->drvPath, store().parseStorePath(built.substr(5))); - ASSERT_EQ(elem.to_string(store()), built); + ASSERT_EQ(p->drvPath, StorePath { built.substr(5) }); + ASSERT_EQ(elem.to_string(), built); } } @@ -102,13 +95,15 @@ Gen Arbitrary::arbitra Gen Arbitrary::arbitrary() { - switch (*gen::inRange(0, 2)) { + switch (*gen::inRange(0, std::variant_size_v)) { case 0: return gen::just(*gen::arbitrary()); case 1: return gen::just(*gen::arbitrary()); - default: + case 2: return gen::just(*gen::arbitrary()); + default: + assert(false); } } @@ -116,12 +111,12 @@ Gen Arbitrary::arbitrary() namespace nix { -RC_GTEST_FIXTURE_PROP( +RC_GTEST_PROP( NixStringContextElemTest, prop_round_rip, (const NixStringContextElem & o)) { - RC_ASSERT(o == NixStringContextElem::parse(store(), o.to_string(store()))); + RC_ASSERT(o == NixStringContextElem::parse(o.to_string())); } } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index c35c876e3..4996a5bde 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -11,7 +11,7 @@ namespace nix { using json = nlohmann::json; json printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, PathSet & context, bool copyToStore) + Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore) { checkInterrupt(); @@ -36,9 +36,10 @@ json printValueAsJSON(EvalState & state, bool strict, case nPath: if (copyToStore) - out = state.store->printStorePath(state.copyPathToStore(context, v.path)); + out = state.store->printStorePath( + state.copyPathToStore(context, v.path())); else - out = v.path; + out = v.path().path.abs(); break; case nNull: @@ -94,13 +95,13 @@ json printValueAsJSON(EvalState & state, bool strict, } void printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore) + Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore) { str << printValueAsJSON(state, strict, v, pos, context, copyToStore); } json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, - PathSet & context, bool copyToStore) const + NixStringContext & context, bool copyToStore) const { state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); } diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh index 713356c7f..47ac90313 100644 --- a/src/libexpr/value-to-json.hh +++ b/src/libexpr/value-to-json.hh @@ -11,9 +11,9 @@ namespace nix { nlohmann::json printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, PathSet & context, bool copyToStore = true); + Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true); void printValueAsJSON(EvalState & state, bool strict, - Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true); + Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true); } diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 341c8922f..2539ad1c1 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -18,21 +18,21 @@ static XMLAttrs singletonAttrs(const std::string & name, const std::string & val static void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, + Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, const PosIdx pos); static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos) { - if (auto path = std::get_if(&pos.origin)) - xmlAttrs["path"] = *path; + if (auto path = std::get_if(&pos.origin)) + xmlAttrs["path"] = path->path.abs(); xmlAttrs["line"] = fmt("%1%", pos.line); xmlAttrs["column"] = fmt("%1%", pos.column); } static void showAttrs(EvalState & state, bool strict, bool location, - Bindings & attrs, XMLWriter & doc, PathSet & context, PathSet & drvsSeen) + Bindings & attrs, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen) { StringSet names; @@ -54,7 +54,7 @@ static void showAttrs(EvalState & state, bool strict, bool location, static void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, + Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, const PosIdx pos) { checkInterrupt(); @@ -78,7 +78,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, break; case nPath: - doc.writeEmptyElement("path", singletonAttrs("value", v.path)); + doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string())); break; case nNull: @@ -166,7 +166,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, void ExternalValueBase::printValueAsXML(EvalState & state, bool strict, - bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen, + bool location, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, const PosIdx pos) const { doc.writeEmptyElement("unevaluated"); @@ -174,7 +174,7 @@ void ExternalValueBase::printValueAsXML(EvalState & state, bool strict, void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, PathSet & context, const PosIdx pos) + Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos) { XMLWriter doc(true, out); XMLOpenElement root(doc, "expr"); diff --git a/src/libexpr/value-to-xml.hh b/src/libexpr/value-to-xml.hh index ace7ead0f..6d702c0f2 100644 --- a/src/libexpr/value-to-xml.hh +++ b/src/libexpr/value-to-xml.hh @@ -10,6 +10,6 @@ namespace nix { void printValueAsXML(EvalState & state, bool strict, bool location, - Value & v, std::ostream & out, PathSet & context, const PosIdx pos); + Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 7739f99df..89c0c36fd 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -5,6 +5,7 @@ #include "symbol-table.hh" #include "value/context.hh" +#include "input-accessor.hh" #if HAVE_BOEHMGC #include @@ -100,7 +101,7 @@ class ExternalValueBase * Coerce the value to a string. Defaults to uncoercable, i.e. throws an * error. */ - virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const; + virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; /** * Compare to another value of the same type. Defaults to uncomparable, @@ -112,13 +113,13 @@ class ExternalValueBase * Print the value as JSON. Defaults to unconvertable, i.e. throws an error */ virtual nlohmann::json printValueAsJSON(EvalState & state, bool strict, - PathSet & context, bool copyToStore = true) const; + NixStringContext & context, bool copyToStore = true) const; /** * Print the value as XML. Defaults to unevaluated */ virtual void printValueAsXML(EvalState & state, bool strict, bool location, - XMLWriter & doc, PathSet & context, PathSet & drvsSeen, + XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen, const PosIdx pos) const; virtual ~ExternalValueBase() @@ -188,7 +189,7 @@ public: const char * * context; // must be in sorted order } string; - const char * path; + const char * _path; Bindings * attrs; struct { size_t size; @@ -268,19 +269,24 @@ public: void mkString(std::string_view s); - void mkString(std::string_view s, const PathSet & context); + void mkString(std::string_view s, const NixStringContext & context); - void mkStringMove(const char * s, const PathSet & context); + void mkStringMove(const char * s, const NixStringContext & context); - inline void mkPath(const char * s) + inline void mkString(const Symbol & s) + { + mkString(((const std::string &) s).c_str()); + } + + void mkPath(const SourcePath & path); + + inline void mkPath(const char * path) { clearValue(); internalType = tPath; - path = s; + _path = path; } - void mkPath(std::string_view s); - inline void mkNull() { clearValue(); @@ -394,8 +400,6 @@ public: */ bool isTrivial() const; - NixStringContext getContext(const Store &); - auto listItems() { struct ListIterable @@ -423,6 +427,18 @@ public: auto begin = listElems(); return ConstListIterable { begin, begin + listSize() }; } + + SourcePath path() const + { + assert(internalType == tPath); + return SourcePath{CanonPath(_path)}; + } + + std::string_view str() const + { + assert(internalType == tString); + return std::string_view(string.s); + } }; diff --git a/src/libexpr/value/context.cc b/src/libexpr/value/context.cc index 61d9c53df..f76fc76e4 100644 --- a/src/libexpr/value/context.cc +++ b/src/libexpr/value/context.cc @@ -1,11 +1,10 @@ #include "value/context.hh" -#include "store-api.hh" #include namespace nix { -NixStringContextElem NixStringContextElem::parse(const Store & store, std::string_view s0) +NixStringContextElem NixStringContextElem::parse(std::string_view s0) { std::string_view s = s0; @@ -25,41 +24,41 @@ NixStringContextElem NixStringContextElem::parse(const Store & store, std::strin "String content element beginning with '!' should have a second '!'"); } return NixStringContextElem::Built { - .drvPath = store.parseStorePath(s.substr(index + 1)), + .drvPath = StorePath { s.substr(index + 1) }, .output = std::string(s.substr(0, index)), }; } case '=': { return NixStringContextElem::DrvDeep { - .drvPath = store.parseStorePath(s.substr(1)), + .drvPath = StorePath { s.substr(1) }, }; } default: { return NixStringContextElem::Opaque { - .path = store.parseStorePath(s), + .path = StorePath { s }, }; } } } -std::string NixStringContextElem::to_string(const Store & store) const { +std::string NixStringContextElem::to_string() const { return std::visit(overloaded { [&](const NixStringContextElem::Built & b) { std::string res; res += '!'; res += b.output; res += '!'; - res += store.printStorePath(b.drvPath); + res += b.drvPath.to_string(); return res; }, [&](const NixStringContextElem::DrvDeep & d) { std::string res; res += '='; - res += store.printStorePath(d.drvPath); + res += d.drvPath.to_string(); return res; }, [&](const NixStringContextElem::Opaque & o) { - return store.printStorePath(o.path); + return std::string { o.path.to_string() }; }, }, raw()); } diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 8719602d8..287ae08a9 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -26,8 +26,6 @@ public: } }; -class Store; - /** * Plain opaque path to some store object. * @@ -80,12 +78,15 @@ struct NixStringContextElem : _NixStringContextElem_Raw { using DrvDeep = NixStringContextElem_DrvDeep; using Built = NixStringContextElem_Built; - inline const Raw & raw() const { + inline const Raw & raw() const & { return static_cast(*this); } - inline Raw & raw() { + inline Raw & raw() & { return static_cast(*this); } + inline Raw && raw() && { + return static_cast(*this); + } /** * Decode a context string, one of: @@ -93,10 +94,10 @@ struct NixStringContextElem : _NixStringContextElem_Raw { * - ‘=’ * - ‘!!’ */ - static NixStringContextElem parse(const Store & store, std::string_view s); - std::string to_string(const Store & store) const; + static NixStringContextElem parse(std::string_view s); + std::string to_string() const; }; -typedef std::vector NixStringContext; +typedef std::set NixStringContext; } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 1da8c9609..47282f6c4 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -62,6 +62,7 @@ std::optional readHead(const Path & path) .program = "git", // FIXME: use 'HEAD' to avoid returning all refs .args = {"ls-remote", "--symref", path}, + .isInteractive = true, }); if (status != 0) return std::nullopt; @@ -350,7 +351,7 @@ struct GitInputScheme : InputScheme args.push_back(destDir); - runProgram("git", true, args); + runProgram("git", true, args, {}, true); } std::optional getSourcePath(const Input & input) override @@ -555,7 +556,7 @@ struct GitInputScheme : InputScheme : ref == "HEAD" ? *ref : "refs/heads/" + *ref; - runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }); + runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true); } catch (Error & e) { if (!pathExists(localRefFile)) throw; warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl); @@ -622,7 +623,7 @@ struct GitInputScheme : InputScheme // everything to ensure we get the rev. Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir)); runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force", - "--update-head-ok", "--", repoDir, "refs/*:refs/*" }); + "--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true); } runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() }); @@ -649,7 +650,7 @@ struct GitInputScheme : InputScheme { Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl)); - runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }); + runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true); } filter = isNotDotGitDirectory; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 1ed09d30d..6c1d573ce 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -21,7 +21,7 @@ struct DownloadUrl }; // A github, gitlab, or sourcehut host -const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check +const static std::string hostRegexS = "[a-zA-Z0-9.-]*"; // FIXME: check std::regex hostRegex(hostRegexS, std::regex::ECMAScript); struct GitArchiveInputScheme : InputScheme diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc new file mode 100644 index 000000000..f37a8058b --- /dev/null +++ b/src/libfetchers/input-accessor.cc @@ -0,0 +1,106 @@ +#include "input-accessor.hh" +#include "store-api.hh" + +namespace nix { + +std::ostream & operator << (std::ostream & str, const SourcePath & path) +{ + str << path.to_string(); + return str; +} + +std::string_view SourcePath::baseName() const +{ + return path.baseName().value_or("source"); +} + +SourcePath SourcePath::parent() const +{ + auto p = path.parent(); + assert(p); + return std::move(*p); +} + +InputAccessor::Stat SourcePath::lstat() const +{ + auto st = nix::lstat(path.abs()); + return InputAccessor::Stat { + .type = + S_ISREG(st.st_mode) ? InputAccessor::tRegular : + S_ISDIR(st.st_mode) ? InputAccessor::tDirectory : + S_ISLNK(st.st_mode) ? InputAccessor::tSymlink : + InputAccessor::tMisc, + .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR + }; +} + +std::optional SourcePath::maybeLstat() const +{ + // FIXME: merge these into one operation. + if (!pathExists()) + return {}; + return lstat(); +} + +InputAccessor::DirEntries SourcePath::readDirectory() const +{ + InputAccessor::DirEntries res; + for (auto & entry : nix::readDirectory(path.abs())) { + std::optional type; + switch (entry.type) { + case DT_REG: type = InputAccessor::Type::tRegular; break; + case DT_LNK: type = InputAccessor::Type::tSymlink; break; + case DT_DIR: type = InputAccessor::Type::tDirectory; break; + } + res.emplace(entry.name, type); + } + return res; +} + +StorePath SourcePath::fetchToStore( + ref store, + std::string_view name, + PathFilter * filter, + RepairFlag repair) const +{ + return + settings.readOnlyMode + ? store->computeStorePathForPath(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter).first + : store->addToStore(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter, repair); +} + +SourcePath SourcePath::resolveSymlinks() const +{ + SourcePath res(CanonPath::root); + + int linksAllowed = 1024; + + std::list todo; + for (auto & c : path) + todo.push_back(std::string(c)); + + while (!todo.empty()) { + auto c = *todo.begin(); + todo.pop_front(); + if (c == "" || c == ".") + ; + else if (c == "..") + res.path.pop(); + else { + res.path.push(c); + if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { + if (!linksAllowed--) + throw Error("infinite symlink recursion in path '%s'", path); + auto target = res.readLink(); + res.path.pop(); + if (hasPrefix(target, "/")) + res.path = CanonPath::root; + todo.splice(todo.begin(), tokenizeString>(target, "/")); + } + } + } + + return res; +} + +} diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh new file mode 100644 index 000000000..5a2f17f62 --- /dev/null +++ b/src/libfetchers/input-accessor.hh @@ -0,0 +1,167 @@ +#pragma once + +#include "ref.hh" +#include "types.hh" +#include "archive.hh" +#include "canon-path.hh" +#include "repair-flag.hh" + +namespace nix { + +class StorePath; +class Store; + +struct InputAccessor +{ + enum Type { + tRegular, tSymlink, tDirectory, + /** + Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things. + + Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`. + + Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types. + */ + tMisc + }; + + struct Stat + { + Type type = tMisc; + //uint64_t fileSize = 0; // regular files only + bool isExecutable = false; // regular files only + }; + + typedef std::optional DirEntry; + + typedef std::map DirEntries; +}; + +/** + * An abstraction for accessing source files during + * evaluation. Currently, it's just a wrapper around `CanonPath` that + * accesses files in the regular filesystem, but in the future it will + * support fetching files in other ways. + */ +struct SourcePath +{ + CanonPath path; + + SourcePath(CanonPath path) + : path(std::move(path)) + { } + + std::string_view baseName() const; + + /** + * Construct the parent of this `SourcePath`. Aborts if `this` + * denotes the root. + */ + SourcePath parent() const; + + /** + * If this `SourcePath` denotes a regular file (not a symlink), + * return its contents; otherwise throw an error. + */ + std::string readFile() const + { return nix::readFile(path.abs()); } + + /** + * Return whether this `SourcePath` denotes a file (of any type) + * that exists + */ + bool pathExists() const + { return nix::pathExists(path.abs()); } + + /** + * Return stats about this `SourcePath`, or throw an exception if + * it doesn't exist. + */ + InputAccessor::Stat lstat() const; + + /** + * Return stats about this `SourcePath`, or std::nullopt if it + * doesn't exist. + */ + std::optional maybeLstat() const; + + /** + * If this `SourcePath` denotes a directory (not a symlink), + * return its directory entries; otherwise throw an error. + */ + InputAccessor::DirEntries readDirectory() const; + + /** + * If this `SourcePath` denotes a symlink, return its target; + * otherwise throw an error. + */ + std::string readLink() const + { return nix::readLink(path.abs()); } + + /** + * Dump this `SourcePath` to `sink` as a NAR archive. + */ + void dumpPath( + Sink & sink, + PathFilter & filter = defaultPathFilter) const + { return nix::dumpPath(path.abs(), sink, filter); } + + /** + * Copy this `SourcePath` to the Nix store. + */ + StorePath fetchToStore( + ref store, + std::string_view name = "source", + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair) const; + + /** + * Return the location of this path in the "real" filesystem, if + * it has a physical location. + */ + std::optional getPhysicalPath() const + { return path; } + + std::string to_string() const + { return path.abs(); } + + /** + * Append a `CanonPath` to this path. + */ + SourcePath operator + (const CanonPath & x) const + { return {path + x}; } + + /** + * Append a single component `c` to this path. `c` must not + * contain a slash. A slash is implicitly added between this path + * and `c`. + */ + SourcePath operator + (std::string_view c) const + { return {path + c}; } + + bool operator == (const SourcePath & x) const + { + return path == x.path; + } + + bool operator != (const SourcePath & x) const + { + return path != x.path; + } + + bool operator < (const SourcePath & x) const + { + return path < x.path; + } + + /** + * Resolve any symlinks in this `SourcePath` (including its + * parents). The result is a `SourcePath` in which no element is a + * symlink. + */ + SourcePath resolveSymlinks() const; +}; + +std::ostream & operator << (std::ostream & str, const SourcePath & path); + +} diff --git a/src/libmain/common-args.hh b/src/libmain/common-args.hh index e7ed0d934..c35406c3b 100644 --- a/src/libmain/common-args.hh +++ b/src/libmain/common-args.hh @@ -2,6 +2,7 @@ ///@file #include "args.hh" +#include "repair-flag.hh" namespace nix { @@ -49,4 +50,21 @@ struct MixJSON : virtual Args } }; +struct MixRepair : virtual Args +{ + RepairFlag repair = NoRepair; + + MixRepair() + { + addFlag({ + .longName = "repair", + .description = + "During evaluation, rewrite missing or corrupted files in the Nix store. " + "During building, rebuild missing or corrupted store paths.", + .category = miscCategory, + .handler = {&repair, Repair}, + }); + } +}; + } diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index a4bb94b0e..df7d21e54 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -274,11 +274,13 @@ void DerivationGoal::haveDerivation() ) ) ); - else + else { + auto * cap = getDerivationCA(*drv); addWaitee(upcast_goal(worker.makePathSubstitutionGoal( status.known->path, buildMode == bmRepair ? Repair : NoRepair, - getDerivationCA(*drv)))); + cap ? std::optional { *cap } : std::nullopt))); + } } if (waitees.empty()) /* to prevent hang (no wake-up event) */ @@ -1020,43 +1022,33 @@ void DerivationGoal::resolvedFinished() StorePathSet outputPaths; - // `wantedOutputs` might merely indicate “all the outputs” - auto realWantedOutputs = std::visit(overloaded { - [&](const OutputsSpec::All &) { - return resolvedDrv.outputNames(); - }, - [&](const OutputsSpec::Names & names) { - return static_cast>(names); - }, - }, wantedOutputs.raw()); - - for (auto & wantedOutput : realWantedOutputs) { - auto initialOutput = get(initialOutputs, wantedOutput); - auto resolvedHash = get(resolvedHashes, wantedOutput); + for (auto & outputName : resolvedDrv.outputNames()) { + auto initialOutput = get(initialOutputs, outputName); + auto resolvedHash = get(resolvedHashes, outputName); if ((!initialOutput) || (!resolvedHash)) throw Error( "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)", - worker.store.printStorePath(drvPath), wantedOutput); + worker.store.printStorePath(drvPath), outputName); auto realisation = [&]{ - auto take1 = get(resolvedResult.builtOutputs, wantedOutput); + auto take1 = get(resolvedResult.builtOutputs, outputName); if (take1) return *take1; /* The above `get` should work. But sateful tracking of outputs in resolvedResult, this can get out of sync with the store, which is our actual source of truth. For now we just check the store directly if it fails. */ - auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, wantedOutput }); + auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, outputName }); if (take2) return *take2; throw Error( "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)", - worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput); + worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName); }(); if (drv->type().isPure()) { auto newRealisation = realisation; - newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput }; + newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; newRealisation.signatures.clear(); if (!drv->type().isFixed()) newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath); @@ -1064,7 +1056,7 @@ void DerivationGoal::resolvedFinished() worker.store.registerDrvOutput(newRealisation); } outputPaths.insert(realisation.outPath); - builtOutputs.emplace(wantedOutput, realisation); + builtOutputs.emplace(outputName, realisation); } runPostBuildHook( @@ -1160,7 +1152,7 @@ HookReply DerivationGoal::tryBuildHook() /* Tell the hook all the inputs that have to be copied to the remote system. */ - worker_proto::write(worker.store, hook->sink, inputPaths); + workerProtoWrite(worker.store, hook->sink, inputPaths); /* Tell the hooks the missing outputs that have to be copied back from the remote system. */ @@ -1171,7 +1163,7 @@ HookReply DerivationGoal::tryBuildHook() if (buildMode != bmCheck && status.known && status.known->isValid()) continue; missingOutputs.insert(outputName); } - worker_proto::write(worker.store, hook->sink, missingOutputs); + workerProtoWrite(worker.store, hook->sink, missingOutputs); } hook->sink = FdSink(); @@ -1406,7 +1398,7 @@ std::pair DerivationGoal::checkPathValidity() ); } } - if (info.wanted && info.known && info.known->isValid()) + if (info.known && info.known->isValid()) validOutputs.emplace(i.first, Realisation { drvOutput, info.known->path }); } @@ -1457,8 +1449,9 @@ void DerivationGoal::done( mcRunningBuilds.reset(); if (buildResult.success()) { - assert(!builtOutputs.empty()); - buildResult.builtOutputs = std::move(builtOutputs); + auto wantedBuiltOutputs = filterDrvOutputs(wantedOutputs, std::move(builtOutputs)); + assert(!wantedBuiltOutputs.empty()); + buildResult.builtOutputs = std::move(wantedBuiltOutputs); if (status == BuildResult::Built) worker.doneBuilds++; } else { diff --git a/src/libstore/build/derivation-goal.hh b/src/libstore/build/derivation-goal.hh index 7033b7a58..ee8f06f25 100644 --- a/src/libstore/build/derivation-goal.hh +++ b/src/libstore/build/derivation-goal.hh @@ -306,15 +306,13 @@ struct DerivationGoal : public Goal * Update 'initialOutputs' to determine the current status of the * outputs of the derivation. Also returns a Boolean denoting * whether all outputs are valid and non-corrupt, and a - * 'SingleDrvOutputs' structure containing the valid and wanted - * outputs. + * 'SingleDrvOutputs' structure containing the valid outputs. */ std::pair checkPathValidity(); /** * Aborts if any output is not valid or corrupt, and otherwise - * returns a 'SingleDrvOutputs' structure containing the wanted - * outputs. + * returns a 'SingleDrvOutputs' structure containing all outputs. */ SingleDrvOutputs assertPathValidity(); @@ -335,6 +333,8 @@ struct DerivationGoal : public Goal void waiteeDone(GoalPtr waitee, ExitCode result) override; StorePathSet exportReferences(const StorePathSet & storePaths); + + JobCategory jobCategory() override { return JobCategory::Build; }; }; MakeError(NotDeterministic, BuildError); diff --git a/src/libstore/build/drv-output-substitution-goal.hh b/src/libstore/build/drv-output-substitution-goal.hh index 697ddb283..5d1253a71 100644 --- a/src/libstore/build/drv-output-substitution-goal.hh +++ b/src/libstore/build/drv-output-substitution-goal.hh @@ -21,7 +21,7 @@ class Worker; class DrvOutputSubstitutionGoal : public Goal { /** - * The drv output we're trying to substitue + * The drv output we're trying to substitute */ DrvOutput id; @@ -72,6 +72,8 @@ public: void work() override; void handleEOF(int fd) override; + + JobCategory jobCategory() override { return JobCategory::Substitution; }; }; } diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 74eae0692..edd6cb6d2 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -110,7 +110,7 @@ void Store::ensurePath(const StorePath & path) } -void LocalStore::repairPath(const StorePath & path) +void Store::repairPath(const StorePath & path) { Worker worker(*this, *this); GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair); diff --git a/src/libstore/build/goal.hh b/src/libstore/build/goal.hh index c0e12a2ed..a313bf22c 100644 --- a/src/libstore/build/goal.hh +++ b/src/libstore/build/goal.hh @@ -34,6 +34,17 @@ typedef std::set> WeakGoals; */ typedef std::map WeakGoalMap; +/** + * Used as a hint to the worker on how to schedule a particular goal. For example, + * builds are typically CPU- and memory-bound, while substitutions are I/O bound. + * Using this information, the worker might decide to schedule more or fewer goals + * of each category in parallel. + */ +enum struct JobCategory { + Build, + Substitution, +}; + struct Goal : public std::enable_shared_from_this { typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode; @@ -150,6 +161,8 @@ public: void amDone(ExitCode result, std::optional ex = {}); virtual void cleanup() { } + + virtual JobCategory jobCategory() = 0; }; void addToWeakGoals(WeakGoals & goals, GoalPtr p); diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 4b978f2a4..0b0bd3328 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -357,7 +357,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull() for (auto & [_, status] : initialOutputs) { if (!status.known) continue; if (buildMode != bmCheck && status.known->isValid()) continue; - auto p = worker.store.printStorePath(status.known->path); + auto p = worker.store.toRealPath(status.known->path); if (pathExists(chrootRootDir + p)) renameFile((chrootRootDir + p), p); } @@ -1791,6 +1791,9 @@ void LocalDerivationGoal::runChild() for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" }) if (pathExists(path)) ss.push_back(path); + + if (settings.caFile != "") + dirsInChroot.try_emplace("/etc/ssl/certs/ca-certificates.crt", settings.caFile, true); } for (auto & i : ss) dirsInChroot.emplace(i, i); @@ -2441,37 +2444,51 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() throw BuildError( "output path %1% without valid stats info", actualPath); - if (outputHash.method == FileIngestionMethod::Flat) { + if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } || + outputHash.method == ContentAddressMethod { TextIngestionMethod {} }) + { /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) throw BuildError( "output path '%1%' should be a non-executable regular file " - "since recursive hashing is not enabled (outputHashMode=flat)", + "since recursive hashing is not enabled (one of outputHashMode={flat,text} is true)", actualPath); } rewriteOutput(); /* FIXME optimize and deduplicate with addToStore */ std::string oldHashPart { scratchPath->hashPart() }; HashModuloSink caSink { outputHash.hashType, oldHashPart }; - switch (outputHash.method) { - case FileIngestionMethod::Recursive: - dumpPath(actualPath, caSink); - break; - case FileIngestionMethod::Flat: - readFile(actualPath, caSink); - break; - } + std::visit(overloaded { + [&](const TextIngestionMethod &) { + readFile(actualPath, caSink); + }, + [&](const FileIngestionMethod & m2) { + switch (m2) { + case FileIngestionMethod::Recursive: + dumpPath(actualPath, caSink); + break; + case FileIngestionMethod::Flat: + readFile(actualPath, caSink); + break; + } + }, + }, outputHash.method.raw); auto got = caSink.finish().first; + + auto optCA = ContentAddressWithReferences::fromPartsOpt( + outputHash.method, + std::move(got), + rewriteRefs()); + if (!optCA) { + // TODO track distinct failure modes separately (at the time of + // writing there is just one but `nullopt` is unclear) so this + // message can't get out of sync. + throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing"); + } ValidPathInfo newInfo0 { worker.store, outputPathName(drv->name, outputName), - FixedOutputInfo { - .hash = { - .method = outputHash.method, - .hash = got, - }, - .references = rewriteRefs(), - }, + *std::move(optCA), Hash::dummy, }; if (*scratchPath != newInfo0.path) { @@ -2518,13 +2535,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() }, [&](const DerivationOutput::CAFixed & dof) { + auto wanted = dof.ca.getHash(); + auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { - .method = dof.hash.method, - .hashType = dof.hash.hash.type, + .method = dof.ca.getMethod(), + .hashType = wanted.type, }); /* Check wanted hash */ - const Hash & wanted = dof.hash.hash; assert(newInfo0.ca); auto got = newInfo0.ca->getHash(); if (wanted != got) { @@ -2537,6 +2555,11 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() wanted.to_string(SRI, true), got.to_string(SRI, true))); } + if (!newInfo0.references.empty()) + delayedException = std::make_exception_ptr( + BuildError("illegal path references in fixed-output derivation '%s'", + worker.store.printStorePath(drvPath))); + return newInfo0; }, @@ -2716,8 +2739,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() signRealisation(thisRealisation); worker.store.registerDrvOutput(thisRealisation); } - if (wantedOutputs.contains(outputName)) - builtOutputs.emplace(outputName, thisRealisation); + builtOutputs.emplace(outputName, thisRealisation); } return builtOutputs; diff --git a/src/libstore/build/personality.cc b/src/libstore/build/personality.cc index 4ad477869..1a6201758 100644 --- a/src/libstore/build/personality.cc +++ b/src/libstore/build/personality.cc @@ -21,7 +21,8 @@ void setPersonality(std::string_view system) && (std::string_view(SYSTEM) == "x86_64-linux" || (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64")))) || system == "armv7l-linux" - || system == "armv6l-linux") + || system == "armv6l-linux" + || system == "armv5tel-linux") { if (personality(PER_LINUX32) == -1) throw SysError("cannot set 32-bit personality"); diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 190fb455a..93867007d 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -200,11 +200,10 @@ void PathSubstitutionGoal::tryToRun() { trace("trying to run"); - /* Make sure that we are allowed to start a build. Note that even - if maxBuildJobs == 0 (no local builds allowed), we still allow - a substituter to run. This is because substitutions cannot be - distributed to another machine via the build hook. */ - if (worker.getNrLocalBuilds() >= std::max(1U, (unsigned int) settings.maxBuildJobs)) { + /* Make sure that we are allowed to start a substitution. Note that even + if maxSubstitutionJobs == 0, we still allow a substituter to run. This + prevents infinite waiting. */ + if (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) { worker.waitForBuildSlot(shared_from_this()); return; } diff --git a/src/libstore/build/substitution-goal.hh b/src/libstore/build/substitution-goal.hh index c2b7fc95a..9fc041920 100644 --- a/src/libstore/build/substitution-goal.hh +++ b/src/libstore/build/substitution-goal.hh @@ -115,6 +115,8 @@ public: void handleEOF(int fd) override; void cleanup() override; + + JobCategory jobCategory() override { return JobCategory::Substitution; }; }; } diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 6ad4a0e2b..ee334d54a 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -18,6 +18,7 @@ Worker::Worker(Store & store, Store & evalStore) { /* Debugging: prevent recursive workers. */ nrLocalBuilds = 0; + nrSubstitutions = 0; lastWokenUp = steady_time_point::min(); permanentFailure = false; timedOut = false; @@ -176,6 +177,12 @@ unsigned Worker::getNrLocalBuilds() } +unsigned Worker::getNrSubstitutions() +{ + return nrSubstitutions; +} + + void Worker::childStarted(GoalPtr goal, const std::set & fds, bool inBuildSlot, bool respectTimeouts) { @@ -187,7 +194,10 @@ void Worker::childStarted(GoalPtr goal, const std::set & fds, child.inBuildSlot = inBuildSlot; child.respectTimeouts = respectTimeouts; children.emplace_back(child); - if (inBuildSlot) nrLocalBuilds++; + if (inBuildSlot) { + if (goal->jobCategory() == JobCategory::Substitution) nrSubstitutions++; + else nrLocalBuilds++; + } } @@ -198,8 +208,13 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) if (i == children.end()) return; if (i->inBuildSlot) { - assert(nrLocalBuilds > 0); - nrLocalBuilds--; + if (goal->jobCategory() == JobCategory::Substitution) { + assert(nrSubstitutions > 0); + nrSubstitutions--; + } else { + assert(nrLocalBuilds > 0); + nrLocalBuilds--; + } } children.erase(i); @@ -220,7 +235,9 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) void Worker::waitForBuildSlot(GoalPtr goal) { debug("wait for build slot"); - if (getNrLocalBuilds() < settings.maxBuildJobs) + bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution; + if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) || + (isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs)) wakeUp(goal); /* we can do it right away */ else addToWeakGoals(wantingToBuild, goal); diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index bb51d641d..63624d910 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -88,11 +88,16 @@ private: std::list children; /** - * Number of build slots occupied. This includes local builds and - * substitutions but not remote builds via the build hook. + * Number of build slots occupied. This includes local builds but does not + * include substitutions or remote builds via the build hook. */ unsigned int nrLocalBuilds; + /** + * Number of substitution slots occupied. + */ + unsigned int nrSubstitutions; + /** * Maps used to prevent multiple instantiations of a goal for the * same derivation / path. @@ -220,12 +225,16 @@ public: void wakeUp(GoalPtr goal); /** - * Return the number of local build and substitution processes - * currently running (but not remote builds via the build - * hook). + * Return the number of local build processes currently running (but not + * remote builds via the build hook). */ unsigned int getNrLocalBuilds(); + /** + * Return the number of substitution processes currently running. + */ + unsigned int getNrSubstitutions(); + /** * Registers a running child process. `inBuildSlot` means that * the process counts towards the jobs limit. diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index 055b216db..04f7ac214 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -21,6 +21,27 @@ std::string makeFileIngestionPrefix(FileIngestionMethod m) } } +std::string ContentAddressMethod::renderPrefix() const +{ + return std::visit(overloaded { + [](TextIngestionMethod) -> std::string { return "text:"; }, + [](FileIngestionMethod m2) { + /* Not prefixed for back compat with things that couldn't produce text before. */ + return makeFileIngestionPrefix(m2); + }, + }, raw); +} + +ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) +{ + ContentAddressMethod method = FileIngestionMethod::Flat; + if (splitPrefix(m, "r:")) + method = FileIngestionMethod::Recursive; + else if (splitPrefix(m, "text:")) + method = TextIngestionMethod {}; + return method; +} + std::string ContentAddress::render() const { return std::visit(overloaded { @@ -36,14 +57,14 @@ std::string ContentAddress::render() const }, raw); } -std::string ContentAddressMethod::render() const +std::string ContentAddressMethod::render(HashType ht) const { return std::visit(overloaded { - [](const TextHashMethod & th) { - return std::string{"text:"} + printHashType(htSHA256); + [&](const TextIngestionMethod & th) { + return std::string{"text:"} + printHashType(ht); }, - [](const FixedOutputHashMethod & fshm) { - return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType); + [&](const FileIngestionMethod & fim) { + return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht); } }, raw); } @@ -51,7 +72,7 @@ std::string ContentAddressMethod::render() const /** * Parses content address strings up to the hash. */ -static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest) +static std::pair parseContentAddressMethodPrefix(std::string_view & rest) { std::string_view wholeInput { rest }; @@ -75,46 +96,47 @@ static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & r if (prefix == "text") { // No parsing of the ingestion method, "text" only support flat. HashType hashType = parseHashType_(); - if (hashType != htSHA256) - throw Error("text content address hash should use %s, but instead uses %s", - printHashType(htSHA256), printHashType(hashType)); - return TextHashMethod {}; + return { + TextIngestionMethod {}, + std::move(hashType), + }; } else if (prefix == "fixed") { // Parse method auto method = FileIngestionMethod::Flat; if (splitPrefix(rest, "r:")) method = FileIngestionMethod::Recursive; HashType hashType = parseHashType_(); - return FixedOutputHashMethod { - .fileIngestionMethod = method, - .hashType = std::move(hashType), + return { + std::move(method), + std::move(hashType), }; } else throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); } -ContentAddress ContentAddress::parse(std::string_view rawCa) { +ContentAddress ContentAddress::parse(std::string_view rawCa) +{ auto rest = rawCa; - ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest); + auto [caMethod, hashType_] = parseContentAddressMethodPrefix(rest); + auto hashType = hashType_; // work around clang bug - return std::visit( - overloaded { - [&](TextHashMethod & thm) { - return ContentAddress(TextHash { - .hash = Hash::parseNonSRIUnprefixed(rest, htSHA256) - }); - }, - [&](FixedOutputHashMethod & fohMethod) { - return ContentAddress(FixedOutputHash { - .method = fohMethod.fileIngestionMethod, - .hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)), - }); - }, - }, caMethod.raw); + return std::visit(overloaded { + [&](TextIngestionMethod &) { + return ContentAddress(TextHash { + .hash = Hash::parseNonSRIUnprefixed(rest, hashType) + }); + }, + [&](FileIngestionMethod & fim) { + return ContentAddress(FixedOutputHash { + .method = fim, + .hash = Hash::parseNonSRIUnprefixed(rest, hashType), + }); + }, + }, caMethod.raw); } -ContentAddressMethod ContentAddressMethod::parse(std::string_view caMethod) +std::pair ContentAddressMethod::parse(std::string_view caMethod) { std::string asPrefix = std::string{caMethod} + ":"; // parseContentAddressMethodPrefix takes its argument by reference @@ -134,6 +156,36 @@ std::string renderContentAddress(std::optional ca) return ca ? ca->render() : ""; } +ContentAddress ContentAddress::fromParts( + ContentAddressMethod method, Hash hash) noexcept +{ + return std::visit(overloaded { + [&](TextIngestionMethod _) -> ContentAddress { + return TextHash { + .hash = std::move(hash), + }; + }, + [&](FileIngestionMethod m2) -> ContentAddress { + return FixedOutputHash { + .method = std::move(m2), + .hash = std::move(hash), + }; + }, + }, method.raw); +} + +ContentAddressMethod ContentAddress::getMethod() const +{ + return std::visit(overloaded { + [](const TextHash & th) -> ContentAddressMethod { + return TextIngestionMethod {}; + }, + [](const FixedOutputHash & fsh) -> ContentAddressMethod { + return fsh.method; + }, + }, raw); +} + const Hash & ContentAddress::getHash() const { return std::visit(overloaded { @@ -146,6 +198,12 @@ const Hash & ContentAddress::getHash() const }, raw); } +std::string ContentAddress::printMethodAlgo() const +{ + return getMethod().renderPrefix() + + printHashType(getHash().type); +} + bool StoreReferences::empty() const { return !self && others.empty(); @@ -156,7 +214,8 @@ size_t StoreReferences::size() const return (self ? 1 : 0) + others.size(); } -ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) { +ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const ContentAddress & ca) noexcept +{ return std::visit(overloaded { [&](const TextHash & h) -> ContentAddressWithReferences { return TextInfo { @@ -173,4 +232,56 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con }, ca.raw); } +std::optional ContentAddressWithReferences::fromPartsOpt( + ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept +{ + return std::visit(overloaded { + [&](TextIngestionMethod _) -> std::optional { + if (refs.self) + return std::nullopt; + return ContentAddressWithReferences { + TextInfo { + .hash = { .hash = std::move(hash) }, + .references = std::move(refs.others), + } + }; + }, + [&](FileIngestionMethod m2) -> std::optional { + return ContentAddressWithReferences { + FixedOutputInfo { + .hash = { + .method = m2, + .hash = std::move(hash), + }, + .references = std::move(refs), + } + }; + }, + }, method.raw); +} + +ContentAddressMethod ContentAddressWithReferences::getMethod() const +{ + return std::visit(overloaded { + [](const TextInfo & th) -> ContentAddressMethod { + return TextIngestionMethod {}; + }, + [](const FixedOutputInfo & fsh) -> ContentAddressMethod { + return fsh.hash.method; + }, + }, raw); +} + +Hash ContentAddressWithReferences::getHash() const +{ + return std::visit(overloaded { + [](const TextInfo & th) { + return th.hash.hash; + }, + [](const FixedOutputInfo & fsh) { + return fsh.hash.hash; + }, + }, raw); +} + } diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index 2f98950fb..e1e80448b 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -21,8 +21,14 @@ namespace nix { * * Somewhat obscure, used by \ref Derivation derivations and * `builtins.toFile` currently. + * + * TextIngestionMethod is identical to FileIngestionMethod::Fixed except that + * the former may not have self-references and is tagged `text:${algo}:${hash}` + * rather than `fixed:${algo}:${hash}`. The contents of the store path are + * ingested and hashed identically, aside from the slightly different tag and + * restriction on self-references. */ -struct TextHashMethod : std::monostate { }; +struct TextIngestionMethod : std::monostate { }; /** * An enumeration of the main ways we can serialize file system @@ -46,13 +52,6 @@ enum struct FileIngestionMethod : uint8_t { */ std::string makeFileIngestionPrefix(FileIngestionMethod m); -struct FixedOutputHashMethod { - FileIngestionMethod fileIngestionMethod; - HashType hashType; - - GENERATE_CMP(FixedOutputHashMethod, me->fileIngestionMethod, me->hashType); -}; - /** * An enumeration of all the ways we can serialize file system objects. * @@ -64,8 +63,8 @@ struct FixedOutputHashMethod { struct ContentAddressMethod { typedef std::variant< - TextHashMethod, - FixedOutputHashMethod + TextIngestionMethod, + FileIngestionMethod > Raw; Raw raw; @@ -77,9 +76,36 @@ struct ContentAddressMethod : raw(std::forward(arg)...) { } - static ContentAddressMethod parse(std::string_view rawCaMethod); - std::string render() const; + /** + * Parse the prefix tag which indicates how the files + * were ingested, with the fixed output case not prefixed for back + * compat. + * + * @param [in] m A string that should begin with the prefix. + * @param [out] m The remainder of the string after the prefix. + */ + static ContentAddressMethod parsePrefix(std::string_view & m); + + /** + * Render the prefix tag which indicates how the files wre ingested. + * + * The rough inverse of `parsePrefix()`. + */ + std::string renderPrefix() const; + + /** + * Parse a content addressing method and hash type. + */ + static std::pair parse(std::string_view rawCaMethod); + + /** + * Render a content addressing method and hash type in a + * nicer way, prefixing both cases. + * + * The rough inverse of `parse()`. + */ + std::string render(HashType ht) const; }; @@ -147,8 +173,9 @@ struct ContentAddress { } /** - * Compute the content-addressability assertion (ValidPathInfo::ca) for - * paths created by Store::makeFixedOutputPath() / Store::addToStore(). + * Compute the content-addressability assertion + * (`ValidPathInfo::ca`) for paths created by + * `Store::makeFixedOutputPath()` / `Store::addToStore()`. */ std::string render() const; @@ -156,9 +183,27 @@ struct ContentAddress static std::optional parseOpt(std::string_view rawCaOpt); + /** + * Create a `ContentAddress` from 2 parts: + * + * @param method Way ingesting the file system data. + * + * @param hash Hash of ingested file system data. + */ + static ContentAddress fromParts( + ContentAddressMethod method, Hash hash) noexcept; + + ContentAddressMethod getMethod() const; + const Hash & getHash() const; + + std::string printMethodAlgo() const; }; +/** + * Render the `ContentAddress` if it exists to a string, return empty + * string otherwise. + */ std::string renderContentAddress(std::optional ca); @@ -244,10 +289,29 @@ struct ContentAddressWithReferences { } /** - * Create a ContentAddressWithReferences from a mere ContentAddress, by - * assuming no references in all cases. + * Create a `ContentAddressWithReferences` from a mere + * `ContentAddress`, by claiming no references. */ - static ContentAddressWithReferences withoutRefs(const ContentAddress &); + static ContentAddressWithReferences withoutRefs(const ContentAddress &) noexcept; + + /** + * Create a `ContentAddressWithReferences` from 3 parts: + * + * @param method Way ingesting the file system data. + * + * @param hash Hash of ingested file system data. + * + * @param refs References to other store objects or oneself. + * + * Do note that not all combinations are supported; `nullopt` is + * returns for invalid combinations. + */ + static std::optional fromPartsOpt( + ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept; + + ContentAddressMethod getMethod() const; + + Hash getHash() const; }; } diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index af9a76f1e..b6dd83684 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -263,7 +263,7 @@ static std::vector readDerivedPaths(Store & store, unsigned int cli { std::vector reqs; if (GET_PROTOCOL_MINOR(clientVersion) >= 30) { - reqs = worker_proto::read(store, from, Phantom> {}); + reqs = WorkerProto>::read(store, from); } else { for (auto & s : readStrings(from)) reqs.push_back(parsePathWithOutputs(store, s).toDerivedPath()); @@ -287,7 +287,7 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQueryValidPaths: { - auto paths = worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); SubstituteFlag substitute = NoSubstitute; if (GET_PROTOCOL_MINOR(clientVersion) >= 27) { @@ -300,7 +300,7 @@ static void performOp(TunnelLogger * logger, ref store, } auto res = store->queryValidPaths(paths, substitute); logger->stopWork(); - worker_proto::write(*store, to, res); + workerProtoWrite(*store, to, res); break; } @@ -316,11 +316,11 @@ static void performOp(TunnelLogger * logger, ref store, } case wopQuerySubstitutablePaths: { - auto paths = worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); logger->startWork(); auto res = store->querySubstitutablePaths(paths); logger->stopWork(); - worker_proto::write(*store, to, res); + workerProtoWrite(*store, to, res); break; } @@ -349,7 +349,7 @@ static void performOp(TunnelLogger * logger, ref store, paths = store->queryValidDerivers(path); else paths = store->queryDerivationOutputs(path); logger->stopWork(); - worker_proto::write(*store, to, paths); + workerProtoWrite(*store, to, paths); break; } @@ -367,7 +367,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto outputs = store->queryPartialDerivationOutputMap(path); logger->stopWork(); - worker_proto::write(*store, to, outputs); + workerProtoWrite(*store, to, outputs); break; } @@ -393,7 +393,7 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(clientVersion) >= 25) { auto name = readString(from); auto camStr = readString(from); - auto refs = worker_proto::read(*store, from, Phantom {}); + auto refs = WorkerProto::read(*store, from); bool repairBool; from >> repairBool; auto repair = RepairFlag{repairBool}; @@ -401,18 +401,22 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto pathInfo = [&]() { // NB: FramedSource must be out of scope before logger->stopWork(); - ContentAddressMethod contentAddressMethod = ContentAddressMethod::parse(camStr); + auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr); + auto hashType = hashType_; // work around clang bug FramedSource source(from); // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. return std::visit(overloaded { - [&](const TextHashMethod &) { + [&](const TextIngestionMethod &) { + if (hashType != htSHA256) + throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", + name, printHashType(hashType)); // We could stream this by changing Store std::string contents = source.drain(); auto path = store->addTextToStore(name, contents, refs, repair); return store->queryPathInfo(path); }, - [&](const FixedOutputHashMethod & fohm) { - auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs); + [&](const FileIngestionMethod & fim) { + auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs); return store->queryPathInfo(path); }, }, contentAddressMethod.raw); @@ -491,7 +495,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopAddTextToStore: { std::string suffix = readString(from); std::string s = readString(from); - auto refs = worker_proto::read(*store, from, Phantom {}); + auto refs = WorkerProto::read(*store, from); logger->startWork(); auto path = store->addTextToStore(suffix, s, refs, NoRepair); logger->stopWork(); @@ -563,7 +567,7 @@ static void performOp(TunnelLogger * logger, ref store, auto results = store->buildPathsWithResults(drvs, mode); logger->stopWork(); - worker_proto::write(*store, to, results); + workerProtoWrite(*store, to, results); break; } @@ -640,7 +644,7 @@ static void performOp(TunnelLogger * logger, ref store, DrvOutputs builtOutputs; for (auto & [output, realisation] : res.builtOutputs) builtOutputs.insert_or_assign(realisation.id, realisation); - worker_proto::write(*store, to, builtOutputs); + workerProtoWrite(*store, to, builtOutputs); } break; } @@ -705,7 +709,7 @@ static void performOp(TunnelLogger * logger, ref store, case wopCollectGarbage: { GCOptions options; options.action = (GCOptions::GCAction) readInt(from); - options.pathsToDelete = worker_proto::read(*store, from, Phantom {}); + options.pathsToDelete = WorkerProto::read(*store, from); from >> options.ignoreLiveness >> options.maxFreed; // obsolete fields readInt(from); @@ -775,7 +779,7 @@ static void performOp(TunnelLogger * logger, ref store, else { to << 1 << (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); - worker_proto::write(*store, to, i->second.references); + workerProtoWrite(*store, to, i->second.references); to << i->second.downloadSize << i->second.narSize; } @@ -786,11 +790,11 @@ static void performOp(TunnelLogger * logger, ref store, SubstitutablePathInfos infos; StorePathCAMap pathsMap = {}; if (GET_PROTOCOL_MINOR(clientVersion) < 22) { - auto paths = worker_proto::read(*store, from, Phantom {}); + auto paths = WorkerProto::read(*store, from); for (auto & path : paths) pathsMap.emplace(path, std::nullopt); } else - pathsMap = worker_proto::read(*store, from, Phantom {}); + pathsMap = WorkerProto::read(*store, from); logger->startWork(); store->querySubstitutablePathInfos(pathsMap, infos); logger->stopWork(); @@ -798,7 +802,7 @@ static void performOp(TunnelLogger * logger, ref store, for (auto & i : infos) { to << store->printStorePath(i.first) << (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); - worker_proto::write(*store, to, i.second.references); + workerProtoWrite(*store, to, i.second.references); to << i.second.downloadSize << i.second.narSize; } break; @@ -808,7 +812,7 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto paths = store->queryAllValidPaths(); logger->stopWork(); - worker_proto::write(*store, to, paths); + workerProtoWrite(*store, to, paths); break; } @@ -880,7 +884,7 @@ static void performOp(TunnelLogger * logger, ref store, ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = worker_proto::read(*store, from, Phantom {}); + info.references = WorkerProto::read(*store, from); from >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(from); info.ca = ContentAddress::parseOpt(readString(from)); @@ -931,9 +935,9 @@ static void performOp(TunnelLogger * logger, ref store, uint64_t downloadSize, narSize; store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); logger->stopWork(); - worker_proto::write(*store, to, willBuild); - worker_proto::write(*store, to, willSubstitute); - worker_proto::write(*store, to, unknown); + workerProtoWrite(*store, to, willBuild); + workerProtoWrite(*store, to, willSubstitute); + workerProtoWrite(*store, to, unknown); to << downloadSize << narSize; break; } @@ -946,7 +950,7 @@ static void performOp(TunnelLogger * logger, ref store, store->registerDrvOutput(Realisation{ .id = outputId, .outPath = outputPath}); } else { - auto realisation = worker_proto::read(*store, from, Phantom()); + auto realisation = WorkerProto::read(*store, from); store->registerDrvOutput(realisation); } logger->stopWork(); @@ -961,11 +965,11 @@ static void performOp(TunnelLogger * logger, ref store, if (GET_PROTOCOL_MINOR(clientVersion) < 31) { std::set outPaths; if (info) outPaths.insert(info->outPath); - worker_proto::write(*store, to, outPaths); + workerProtoWrite(*store, to, outPaths); } else { std::set realisations; if (info) realisations.insert(*info); - worker_proto::write(*store, to, realisations); + workerProtoWrite(*store, to, realisations); } break; } @@ -1041,7 +1045,7 @@ void processConnection( auto temp = trusted ? store->isTrustedClient() : std::optional { NotTrusted }; - worker_proto::write(*store, to, temp); + workerProtoWrite(*store, to, temp); } /* Send startup error messages to the client. */ diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 15f3908ed..ccb165d68 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1,7 +1,9 @@ #include "derivations.hh" +#include "downstream-placeholder.hh" #include "store-api.hh" #include "globals.hh" #include "util.hh" +#include "split.hh" #include "worker-protocol.hh" #include "fs-accessor.hh" #include @@ -35,9 +37,9 @@ std::optional DerivationOutput::path(const Store & store, std::string StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const { - return store.makeFixedOutputPath( + return store.makeFixedOutputPathFromCA( outputPathName(drvName, outputName), - { hash, {} }); + ContentAddressWithReferences::withoutRefs(ca)); } @@ -211,29 +213,27 @@ static StringSet parseStrings(std::istream & str, bool arePaths) static DerivationOutput parseDerivationOutput(const Store & store, - std::string_view pathS, std::string_view hashAlgo, std::string_view hash) + std::string_view pathS, std::string_view hashAlgo, std::string_view hashS) { if (hashAlgo != "") { - auto method = FileIngestionMethod::Flat; - if (hashAlgo.substr(0, 2) == "r:") { - method = FileIngestionMethod::Recursive; - hashAlgo = hashAlgo.substr(2); - } + ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo); + if (method == TextIngestionMethod {}) + experimentalFeatureSettings.require(Xp::DynamicDerivations); const auto hashType = parseHashType(hashAlgo); - if (hash == "impure") { + if (hashS == "impure") { experimentalFeatureSettings.require(Xp::ImpureDerivations); assert(pathS == ""); return DerivationOutput::Impure { .method = std::move(method), .hashType = std::move(hashType), }; - } else if (hash != "") { + } else if (hashS != "") { validatePath(pathS); + auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType); return DerivationOutput::CAFixed { - .hash = FixedOutputHash { - .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed(hash, hashType), - }, + .ca = ContentAddress::fromParts( + std::move(method), + std::move(hash)), }; } else { experimentalFeatureSettings.require(Xp::CaDerivations); @@ -393,12 +393,12 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, }, [&](const DerivationOutput::CAFixed & dof) { s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first))); - s += ','; printUnquotedString(s, dof.hash.printMethodAlgo()); - s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false)); + s += ','; printUnquotedString(s, dof.ca.printMethodAlgo()); + s += ','; printUnquotedString(s, dof.ca.getHash().to_string(Base16, false)); }, [&](const DerivationOutput::CAFloating & dof) { s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)); + s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType)); s += ','; printUnquotedString(s, ""); }, [&](const DerivationOutput::Deferred &) { @@ -409,7 +409,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs, [&](const DerivationOutputImpure & doi) { // FIXME s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType)); + s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType)); s += ','; printUnquotedString(s, "impure"); } }, i.second.raw()); @@ -626,8 +626,8 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut for (const auto & i : drv.outputs) { auto & dof = std::get(i.second.raw()); auto hash = hashString(htSHA256, "fixed:out:" - + dof.hash.printMethodAlgo() + ":" - + dof.hash.hash.to_string(Base16, false) + ":" + + dof.ca.printMethodAlgo() + ":" + + dof.ca.getHash().to_string(Base16, false) + ":" + store.printStorePath(dof.path(store, drv.name, i.first))); outputHashes.insert_or_assign(i.first, std::move(hash)); } @@ -749,7 +749,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, drv.outputs.emplace(std::move(name), std::move(output)); } - drv.inputSrcs = worker_proto::read(store, in, Phantom {}); + drv.inputSrcs = WorkerProto::read(store, in); in >> drv.platform >> drv.builder; drv.args = readStrings(in); @@ -777,12 +777,12 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, [&](const DerivationOutput::CAFixed & dof) { out << store.printStorePath(dof.path(store, drv.name, i.first)) - << dof.hash.printMethodAlgo() - << dof.hash.hash.to_string(Base16, false); + << dof.ca.printMethodAlgo() + << dof.ca.getHash().to_string(Base16, false); }, [&](const DerivationOutput::CAFloating & dof) { out << "" - << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)) + << (dof.method.renderPrefix() + printHashType(dof.hashType)) << ""; }, [&](const DerivationOutput::Deferred &) { @@ -792,12 +792,12 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr }, [&](const DerivationOutput::Impure & doi) { out << "" - << (makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType)) + << (doi.method.renderPrefix() + printHashType(doi.hashType)) << "impure"; }, }, i.second.raw()); } - worker_proto::write(store, out, drv.inputSrcs); + workerProtoWrite(store, out, drv.inputSrcs); out << drv.platform << drv.builder << drv.args; out << drv.env.size(); for (auto & i : drv.env) @@ -811,13 +811,7 @@ std::string hashPlaceholder(const std::string_view outputName) return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false); } -std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName) -{ - auto drvNameWithExtension = drvPath.name(); - auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); - auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); - return "/" + hashString(htSHA256, clearText).to_string(Base32, false); -} + static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) @@ -881,7 +875,7 @@ std::optional Derivation::tryResolve( for (auto & outputName : inputOutputs) { if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) { inputRewrites.emplace( - downstreamPlaceholder(store, inputDrv, outputName), + DownstreamPlaceholder::unknownCaOutput(inputDrv, outputName).render(), store.printStorePath(*actualPath)); resolved.inputSrcs.insert(*actualPath); } else { @@ -942,7 +936,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const envHasRightPath(doia.path, i.first); }, [&](const DerivationOutput::CAFixed & dof) { - StorePath path = store.makeFixedOutputPath(drvName, { dof.hash, {} }); + auto path = dof.path(store, drvName, i.first); envHasRightPath(path, i.first); }, [&](const DerivationOutput::CAFloating &) { @@ -971,15 +965,16 @@ nlohmann::json DerivationOutput::toJSON( }, [&](const DerivationOutput::CAFixed & dof) { res["path"] = store.printStorePath(dof.path(store, drvName, outputName)); - res["hashAlgo"] = dof.hash.printMethodAlgo(); - res["hash"] = dof.hash.hash.to_string(Base16, false); + res["hashAlgo"] = dof.ca.printMethodAlgo(); + res["hash"] = dof.ca.getHash().to_string(Base16, false); + // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { - res["hashAlgo"] = makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType); + res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType); }, [&](const DerivationOutput::Deferred &) {}, [&](const DerivationOutput::Impure & doi) { - res["hashAlgo"] = makeFileIngestionPrefix(doi.method) + printHashType(doi.hashType); + res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType); res["impure"] = true; }, }, raw()); @@ -998,15 +993,15 @@ DerivationOutput DerivationOutput::fromJSON( for (const auto & [key, _] : json) keys.insert(key); - auto methodAlgo = [&]() -> std::pair { + auto methodAlgo = [&]() -> std::pair { std::string hashAlgo = json["hashAlgo"]; - auto method = FileIngestionMethod::Flat; - if (hashAlgo.substr(0, 2) == "r:") { - method = FileIngestionMethod::Recursive; - hashAlgo = hashAlgo.substr(2); - } - auto hashType = parseHashType(hashAlgo); - return { method, hashType }; + // remaining to parse, will be mutated by parsers + std::string_view s = hashAlgo; + ContentAddressMethod method = ContentAddressMethod::parsePrefix(s); + if (method == TextIngestionMethod {}) + xpSettings.require(Xp::DynamicDerivations); + auto hashType = parseHashType(s); + return { std::move(method), std::move(hashType) }; }; if (keys == (std::set { "path" })) { @@ -1018,10 +1013,9 @@ DerivationOutput DerivationOutput::fromJSON( else if (keys == (std::set { "path", "hashAlgo", "hash" })) { auto [method, hashType] = methodAlgo(); auto dof = DerivationOutput::CAFixed { - .hash = { - .method = method, - .hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType), - }, + .ca = ContentAddress::fromParts( + std::move(method), + Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType)), }; if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"])) throw Error("Path doesn't match derivation output"); @@ -1032,8 +1026,8 @@ DerivationOutput DerivationOutput::fromJSON( xpSettings.require(Xp::CaDerivations); auto [method, hashType] = methodAlgo(); return DerivationOutput::CAFloating { - .method = method, - .hashType = hashType, + .method = std::move(method), + .hashType = std::move(hashType), }; } @@ -1045,7 +1039,7 @@ DerivationOutput DerivationOutput::fromJSON( xpSettings.require(Xp::ImpureDerivations); auto [method, hashType] = methodAlgo(); return DerivationOutput::Impure { - .method = method, + .method = std::move(method), .hashType = hashType, }; } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index d00b23b6d..fa79f77fd 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -6,6 +6,7 @@ #include "hash.hh" #include "content-address.hh" #include "repair-flag.hh" +#include "derived-path.hh" #include "sync.hh" #include "comparator.hh" @@ -36,9 +37,11 @@ struct DerivationOutputInputAddressed struct DerivationOutputCAFixed { /** - * hash used for expected hash computation + * Method and hash used for expected hash computation. + * + * References are not allowed by fiat. */ - FixedOutputHash hash; + ContentAddress ca; /** * Return the \ref StorePath "store path" corresponding to this output @@ -48,7 +51,7 @@ struct DerivationOutputCAFixed */ StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const; - GENERATE_CMP(DerivationOutputCAFixed, me->hash); + GENERATE_CMP(DerivationOutputCAFixed, me->ca); }; /** @@ -61,7 +64,7 @@ struct DerivationOutputCAFloating /** * How the file system objects will be serialized for hashing */ - FileIngestionMethod method; + ContentAddressMethod method; /** * How the serialization will be hashed @@ -88,7 +91,7 @@ struct DerivationOutputImpure /** * How the file system objects will be serialized for hashing */ - FileIngestionMethod method; + ContentAddressMethod method; /** * How the serialization will be hashed @@ -343,12 +346,14 @@ struct Derivation : BasicDerivation Store & store, const std::map, StorePath> & inputDrvOutputs) const; - /* Check that the derivation is valid and does not present any - illegal states. - - This is mainly a matter of checking the outputs, where our C++ - representation supports all sorts of combinations we do not yet - allow. */ + /** + * Check that the derivation is valid and does not present any + * illegal states. + * + * This is mainly a matter of checking the outputs, where our C++ + * representation supports all sorts of combinations we do not yet + * allow. + */ void checkInvariants(Store & store, const StorePath & drvPath) const; Derivation() = default; @@ -491,17 +496,6 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr */ std::string hashPlaceholder(const std::string_view outputName); -/** - * This creates an opaque and almost certainly unique string - * deterministically from a derivation path and output name. - * - * It is used as a placeholder to allow derivations to refer to - * content-addressed paths whose content --- and thus the path - * themselves --- isn't yet known. This occurs when a derivation has a - * dependency which is a CA derivation. - */ -std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName); - extern const Hash impureOutputHash; } diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc new file mode 100644 index 000000000..1752738f2 --- /dev/null +++ b/src/libstore/downstream-placeholder.cc @@ -0,0 +1,39 @@ +#include "downstream-placeholder.hh" +#include "derivations.hh" + +namespace nix { + +std::string DownstreamPlaceholder::render() const +{ + return "/" + hash.to_string(Base32, false); +} + + +DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( + const StorePath & drvPath, + std::string_view outputName) +{ + auto drvNameWithExtension = drvPath.name(); + auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); + auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); + return DownstreamPlaceholder { + hashString(htSHA256, clearText) + }; +} + +DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( + const DownstreamPlaceholder & placeholder, + std::string_view outputName, + const ExperimentalFeatureSettings & xpSettings) +{ + xpSettings.require(Xp::DynamicDerivations); + auto compressed = compressHash(placeholder.hash, 20); + auto clearText = "nix-computed-output:" + + compressed.to_string(Base32, false) + + ":" + std::string { outputName }; + return DownstreamPlaceholder { + hashString(htSHA256, clearText) + }; +} + +} diff --git a/src/libstore/downstream-placeholder.hh b/src/libstore/downstream-placeholder.hh new file mode 100644 index 000000000..f0c0dee77 --- /dev/null +++ b/src/libstore/downstream-placeholder.hh @@ -0,0 +1,75 @@ +#pragma once +///@file + +#include "hash.hh" +#include "path.hh" + +namespace nix { + +/** + * Downstream Placeholders are opaque and almost certainly unique values + * used to allow derivations to refer to store objects which are yet to + * be built and for we do not yet have store paths for. + * + * They correspond to `DerivedPaths` that are not `DerivedPath::Opaque`, + * except for the cases involving input addressing or fixed outputs + * where we do know a store path for the derivation output in advance. + * + * Unlike `DerivationPath`, however, `DownstreamPlaceholder` is + * purposefully opaque and obfuscated. This is so they are hard to + * create by accident, and so substituting them (once we know what the + * path to store object is) is unlikely to capture other stuff it + * shouldn't. + * + * We use them with `Derivation`: the `render()` method is called to + * render an opaque string which can be used in the derivation, and the + * resolving logic can substitute those strings for store paths when + * resolving `Derivation.inputDrvs` to `BasicDerivation.inputSrcs`. + */ +class DownstreamPlaceholder +{ + /** + * `DownstreamPlaceholder` is just a newtype of `Hash`. + * This its only field. + */ + Hash hash; + + /** + * Newtype constructor + */ + DownstreamPlaceholder(Hash hash) : hash(hash) { } + +public: + /** + * This creates an opaque and almost certainly unique string + * deterministically from the placeholder. + */ + std::string render() const; + + /** + * Create a placeholder for an unknown output of a content-addressed + * derivation. + * + * The derivation itself is known (we have a store path for it), but + * the output doesn't yet have a known store path. + */ + static DownstreamPlaceholder unknownCaOutput( + const StorePath & drvPath, + std::string_view outputName); + + /** + * Create a placehold for the output of an unknown derivation. + * + * The derivation is not yet known because it is a dynamic + * derivaiton --- it is itself an output of another derivation --- + * and we just have (another) placeholder for it. + * + * @param xpSettings Stop-gap to avoid globals during unit tests. + */ + static DownstreamPlaceholder unknownDerivation( + const DownstreamPlaceholder & drvPlaceholder, + std::string_view outputName, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); +}; + +} diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 4eb838b68..5ea263a86 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) teeSink << exportMagic << printStorePath(path); - worker_proto::write(*this, teeSink, info->references); + workerProtoWrite(*this, teeSink, info->references); teeSink << (info->deriver ? printStorePath(*info->deriver) : "") << 0; @@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) //Activity act(*logger, lvlInfo, "importing path '%s'", info.path); - auto references = worker_proto::read(*this, source, Phantom {}); + auto references = WorkerProto::read(*this, source); auto deriver = readString(source); auto narHash = hashString(htSHA256, saved.s); diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 4c66d08ee..32e9a6ea9 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -183,7 +183,7 @@ bool Settings::isWSL1() Path Settings::getDefaultSSLCertFile() { for (auto & fn : {"/etc/ssl/certs/ca-certificates.crt", "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"}) - if (pathExists(fn)) return fn; + if (pathAccessible(fn)) return fn; return ""; } diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 609cf53b8..940cb48f5 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -159,6 +159,15 @@ public: )", {"build-max-jobs"}}; + Setting maxSubstitutionJobs{ + this, 16, "max-substitution-jobs", + R"( + This option defines the maximum number of substitution jobs that Nix + will try to run in parallel. The default is `16`. The minimum value + one can choose is `1` and lower values will be interpreted as `1`. + )", + {"substitution-max-jobs"}}; + Setting buildCores{ this, getDefaultCores(), @@ -887,12 +896,11 @@ public: this, {}, "hashed-mirrors", R"( A list of web servers used by `builtins.fetchurl` to obtain files by - hash. The default is `http://tarballs.nixos.org/`. Given a hash type - *ht* and a base-16 hash *h*, Nix will try to download the file from - *hashed-mirror*/*ht*/*h*. This allows files to be downloaded even if - they have disappeared from their original URI. For example, given - the default mirror `http://tarballs.nixos.org/`, when building the - derivation + hash. Given a hash type *ht* and a base-16 hash *h*, Nix will try to + download the file from *hashed-mirror*/*ht*/*h*. This allows files to + be downloaded even if they have disappeared from their original URI. + For example, given an example mirror `http://tarballs.nixos.org/`, + when building the derivation ```nix builtins.fetchurl { @@ -972,7 +980,7 @@ public: this, false, "use-xdg-base-directories", R"( If set to `true`, Nix will conform to the [XDG Base Directory Specification] for files in `$HOME`. - The environment variables used to implement this are documented in the [Environment Variables section](@docroot@/installation/env-variables.md). + The environment variables used to implement this are documented in the [Environment Variables section](@docroot@/command-ref/env-common.md). [XDG Base Directory Specification]: https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html @@ -986,6 +994,18 @@ public: | `~/.nix-profile` | `$XDG_STATE_HOME/nix/profile` | | `~/.nix-defexpr` | `$XDG_STATE_HOME/nix/defexpr` | | `~/.nix-channels` | `$XDG_STATE_HOME/nix/channels` | + + If you already have Nix installed and are using [profiles](@docroot@/package-management/profiles.md) or [channels](@docroot@/package-management/channels.md), you should migrate manually when you enable this option. + If `$XDG_STATE_HOME` is not set, use `$HOME/.local/state/nix` instead of `$XDG_STATE_HOME/nix`. + This can be achieved with the following shell commands: + + ```sh + nix_state_home=${XDG_STATE_HOME-$HOME/.local/state}/nix + mkdir -p $nix_state_home + mv $HOME/.nix-profile $nix_state_home/profile + mv $HOME/.nix-defexpr $nix_state_home/defexpr + mv $HOME/.nix-channels $nix_state_home/channels + ``` )" }; }; diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 2012584e0..2b7bebe9d 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -146,7 +146,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor auto deriver = readString(conn->from); if (deriver != "") info->deriver = parseStorePath(deriver); - info->references = worker_proto::read(*this, conn->from, Phantom {}); + info->references = WorkerProto::read(*this, conn->from); readLongLong(conn->from); // download size info->narSize = readLongLong(conn->from); @@ -180,7 +180,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(Base16, false); - worker_proto::write(*this, conn->to, info.references); + workerProtoWrite(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize @@ -209,7 +209,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor conn->to << exportMagic << printStorePath(info.path); - worker_proto::write(*this, conn->to, info.references); + workerProtoWrite(*this, conn->to, info.references); conn->to << (info.deriver ? printStorePath(*info.deriver) : "") << 0 @@ -294,7 +294,7 @@ public: if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 3) conn->from >> status.timesBuilt >> status.isNonDeterministic >> status.startTime >> status.stopTime; if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 6) { - auto builtOutputs = worker_proto::read(*this, conn->from, Phantom {}); + auto builtOutputs = WorkerProto::read(*this, conn->from); for (auto && [output, realisation] : builtOutputs) status.builtOutputs.insert_or_assign( std::move(output.outputName), @@ -344,6 +344,17 @@ public: virtual ref getFSAccessor() override { unsupported("getFSAccessor"); } + /** + * The default instance would schedule the work on the client side, but + * for consistency with `buildPaths` and `buildDerivation` it should happen + * on the remote side. + * + * We make this fail for now so we can add implement this properly later + * without it being a breaking change. + */ + void repairPath(const StorePath & path) override + { unsupported("repairPath"); } + void computeFSClosure(const StorePathSet & paths, StorePathSet & out, bool flipDirection = false, bool includeOutputs = false, bool includeDerivers = false) override @@ -358,10 +369,10 @@ public: conn->to << cmdQueryClosure << includeOutputs; - worker_proto::write(*this, conn->to, paths); + workerProtoWrite(*this, conn->to, paths); conn->to.flush(); - for (auto & i : worker_proto::read(*this, conn->from, Phantom {})) + for (auto & i : WorkerProto::read(*this, conn->from)) out.insert(i); } @@ -374,10 +385,10 @@ public: << cmdQueryValidPaths << false // lock << maybeSubstitute; - worker_proto::write(*this, conn->to, paths); + workerProtoWrite(*this, conn->to, paths); conn->to.flush(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } void connect() override diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 55add18dd..70debad38 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -240,8 +240,6 @@ public: void vacuumDB(); - void repairPath(const StorePath & path) override; - void addSignatures(const StorePath & storePath, const StringSet & sigs) override; /** diff --git a/src/libstore/local.mk b/src/libstore/local.mk index e5e24501e..0be0bf310 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -57,12 +57,6 @@ $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh $(d)/build.cc: -%.gen.hh: % - @echo 'R"foo(' >> $@.tmp - $(trace-gen) cat $< >> $@.tmp - @echo ')foo"' >> $@.tmp - @mv $@.tmp $@ - clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh $(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644)) diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 89148d415..50336c779 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -83,14 +83,15 @@ void Store::computeFSClosure(const StorePath & startPath, } -std::optional getDerivationCA(const BasicDerivation & drv) +const ContentAddress * getDerivationCA(const BasicDerivation & drv) { auto out = drv.outputs.find("out"); - if (out != drv.outputs.end()) { - if (const auto * v = std::get_if(&out->second.raw())) - return v->hash; + if (out == drv.outputs.end()) + return nullptr; + if (auto dof = std::get_if(&out->second)) { + return &dof->ca; } - return std::nullopt; + return nullptr; } void Store::queryMissing(const std::vector & targets, @@ -140,7 +141,13 @@ void Store::queryMissing(const std::vector & targets, if (drvState_->lock()->done) return; SubstitutablePathInfos infos; - querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos); + auto * cap = getDerivationCA(*drv); + querySubstitutablePathInfos({ + { + outPath, + cap ? std::optional { *cap } : std::nullopt, + }, + }, infos); if (infos.empty()) { drvState_->lock()->done = true; diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index e60d7abe0..97b72faa3 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -1,5 +1,6 @@ #include "path-info.hh" #include "worker-protocol.hh" +#include "store-api.hh" namespace nix { @@ -131,7 +132,7 @@ ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned auto narHash = Hash::parseAny(readString(source), htSHA256); ValidPathInfo info(path, narHash); if (deriver != "") info.deriver = store.parseStorePath(deriver); - info.references = worker_proto::read(store, source, Phantom {}); + info.references = WorkerProto::read(store, source); source >> info.registrationTime >> info.narSize; if (format >= 16) { source >> info.ultimate; @@ -152,7 +153,7 @@ void ValidPathInfo::write( sink << store.printStorePath(path); sink << (deriver ? store.printStorePath(*deriver) : "") << narHash.to_string(Base16, false); - worker_proto::write(store, sink, references); + workerProtoWrite(store, sink, references); sink << registrationTime << narSize; if (format >= 16) { sink << ultimate diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 46be54281..552e83114 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -9,8 +9,8 @@ static void checkName(std::string_view path, std::string_view name) if (name.empty()) throw BadStorePath("store path '%s' has an empty name", path); if (name.size() > StorePath::MaxPathLen) - throw BadStorePath("store path '%s' has a name longer than '%d characters", - StorePath::MaxPathLen, path); + throw BadStorePath("store path '%s' has a name longer than %d characters", + path, StorePath::MaxPathLen); // See nameRegexStr for the definition for (auto c : name) if (!((c >= '0' && c <= '9') diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index d63ec5ea2..93ddb5b20 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -136,6 +136,19 @@ size_t Realisation::checkSignatures(const PublicKeys & publicKeys) const return good; } + +SingleDrvOutputs filterDrvOutputs(const OutputsSpec& wanted, SingleDrvOutputs&& outputs) +{ + SingleDrvOutputs ret = std::move(outputs); + for (auto it = ret.begin(); it != ret.end(); ) { + if (!wanted.contains(it->first)) + it = ret.erase(it); + else + ++it; + } + return ret; +} + StorePath RealisedPath::path() const { return std::visit([](auto && arg) { return arg.getPath(); }, raw); } diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index 3922d1267..2a093c128 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -12,6 +12,7 @@ namespace nix { class Store; +struct OutputsSpec; /** * A general `Realisation` key. @@ -93,6 +94,14 @@ typedef std::map SingleDrvOutputs; */ typedef std::map DrvOutputs; +/** + * Filter a SingleDrvOutputs to include only specific output names + * + * Moves the `outputs` input. + */ +SingleDrvOutputs filterDrvOutputs(const OutputsSpec&, SingleDrvOutputs&&); + + struct OpaquePath { StorePath path; diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index a6e8b9577..c3dfb5979 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -18,189 +18,6 @@ namespace nix { -namespace worker_proto { - -std::string read(const Store & store, Source & from, Phantom _) -{ - return readString(from); -} - -void write(const Store & store, Sink & out, const std::string & str) -{ - out << str; -} - - -StorePath read(const Store & store, Source & from, Phantom _) -{ - return store.parseStorePath(readString(from)); -} - -void write(const Store & store, Sink & out, const StorePath & storePath) -{ - out << store.printStorePath(storePath); -} - - -std::optional read(const Store & store, Source & from, Phantom> _) -{ - auto temp = readNum(from); - switch (temp) { - case 0: - return std::nullopt; - case 1: - return { Trusted }; - case 2: - return { NotTrusted }; - default: - throw Error("Invalid trusted status from remote"); - } -} - -void write(const Store & store, Sink & out, const std::optional & optTrusted) -{ - if (!optTrusted) - out << (uint8_t)0; - else { - switch (*optTrusted) { - case Trusted: - out << (uint8_t)1; - break; - case NotTrusted: - out << (uint8_t)2; - break; - default: - assert(false); - }; - } -} - - -ContentAddress read(const Store & store, Source & from, Phantom _) -{ - return ContentAddress::parse(readString(from)); -} - -void write(const Store & store, Sink & out, const ContentAddress & ca) -{ - out << renderContentAddress(ca); -} - - -DerivedPath read(const Store & store, Source & from, Phantom _) -{ - auto s = readString(from); - return DerivedPath::parseLegacy(store, s); -} - -void write(const Store & store, Sink & out, const DerivedPath & req) -{ - out << req.to_string_legacy(store); -} - - -Realisation read(const Store & store, Source & from, Phantom _) -{ - std::string rawInput = readString(from); - return Realisation::fromJSON( - nlohmann::json::parse(rawInput), - "remote-protocol" - ); -} - -void write(const Store & store, Sink & out, const Realisation & realisation) -{ - out << realisation.toJSON().dump(); -} - - -DrvOutput read(const Store & store, Source & from, Phantom _) -{ - return DrvOutput::parse(readString(from)); -} - -void write(const Store & store, Sink & out, const DrvOutput & drvOutput) -{ - out << drvOutput.to_string(); -} - - -KeyedBuildResult read(const Store & store, Source & from, Phantom _) -{ - auto path = worker_proto::read(store, from, Phantom {}); - auto br = worker_proto::read(store, from, Phantom {}); - return KeyedBuildResult { - std::move(br), - /* .path = */ std::move(path), - }; -} - -void write(const Store & store, Sink & to, const KeyedBuildResult & res) -{ - worker_proto::write(store, to, res.path); - worker_proto::write(store, to, static_cast(res)); -} - - -BuildResult read(const Store & store, Source & from, Phantom _) -{ - BuildResult res; - res.status = (BuildResult::Status) readInt(from); - from - >> res.errorMsg - >> res.timesBuilt - >> res.isNonDeterministic - >> res.startTime - >> res.stopTime; - auto builtOutputs = worker_proto::read(store, from, Phantom {}); - for (auto && [output, realisation] : builtOutputs) - res.builtOutputs.insert_or_assign( - std::move(output.outputName), - std::move(realisation)); - return res; -} - -void write(const Store & store, Sink & to, const BuildResult & res) -{ - to - << res.status - << res.errorMsg - << res.timesBuilt - << res.isNonDeterministic - << res.startTime - << res.stopTime; - DrvOutputs builtOutputs; - for (auto & [output, realisation] : res.builtOutputs) - builtOutputs.insert_or_assign(realisation.id, realisation); - worker_proto::write(store, to, builtOutputs); -} - - -std::optional read(const Store & store, Source & from, Phantom> _) -{ - auto s = readString(from); - return s == "" ? std::optional {} : store.parseStorePath(s); -} - -void write(const Store & store, Sink & out, const std::optional & storePathOpt) -{ - out << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); -} - - -std::optional read(const Store & store, Source & from, Phantom> _) -{ - return ContentAddress::parseOpt(readString(from)); -} - -void write(const Store & store, Sink & out, const std::optional & caOpt) -{ - out << (caOpt ? renderContentAddress(*caOpt) : ""); -} - -} - - /* TODO: Separate these store impls into different files, give them better names */ RemoteStore::RemoteStore(const Params & params) : RemoteStoreConfig(params) @@ -283,7 +100,7 @@ void RemoteStore::initConnection(Connection & conn) } if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 35) { - conn.remoteTrustsUs = worker_proto::read(*this, conn.from, Phantom> {}); + conn.remoteTrustsUs = WorkerProto>::read(*this, conn.from); } else { // We don't know the answer; protocol to old. conn.remoteTrustsUs = std::nullopt; @@ -410,12 +227,12 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute return res; } else { conn->to << wopQueryValidPaths; - worker_proto::write(*this, conn->to, paths); + workerProtoWrite(*this, conn->to, paths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) { conn->to << (settings.buildersUseSubstitutes ? 1 : 0); } conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } } @@ -425,7 +242,7 @@ StorePathSet RemoteStore::queryAllValidPaths() auto conn(getConnection()); conn->to << wopQueryAllValidPaths; conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -442,9 +259,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths) return res; } else { conn->to << wopQuerySubstitutablePaths; - worker_proto::write(*this, conn->to, paths); + workerProtoWrite(*this, conn->to, paths); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } } @@ -466,7 +283,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = worker_proto::read(*this, conn->from, Phantom {}); + info.references = WorkerProto::read(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); infos.insert_or_assign(i.first, std::move(info)); @@ -479,9 +296,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S StorePathSet paths; for (auto & path : pathsMap) paths.insert(path.first); - worker_proto::write(*this, conn->to, paths); + workerProtoWrite(*this, conn->to, paths); } else - worker_proto::write(*this, conn->to, pathsMap); + workerProtoWrite(*this, conn->to, pathsMap); conn.processStderr(); size_t count = readNum(conn->from); for (size_t n = 0; n < count; n++) { @@ -489,7 +306,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S auto deriver = readString(conn->from); if (deriver != "") info.deriver = parseStorePath(deriver); - info.references = worker_proto::read(*this, conn->from, Phantom {}); + info.references = WorkerProto::read(*this, conn->from); info.downloadSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from); } @@ -532,7 +349,7 @@ void RemoteStore::queryReferrers(const StorePath & path, auto conn(getConnection()); conn->to << wopQueryReferrers << printStorePath(path); conn.processStderr(); - for (auto & i : worker_proto::read(*this, conn->from, Phantom {})) + for (auto & i : WorkerProto::read(*this, conn->from)) referrers.insert(i); } @@ -542,7 +359,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path) auto conn(getConnection()); conn->to << wopQueryValidDerivers << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -554,7 +371,7 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path) auto conn(getConnection()); conn->to << wopQueryDerivationOutputs << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom {}); + return WorkerProto::read(*this, conn->from); } @@ -564,7 +381,7 @@ std::map> RemoteStore::queryPartialDerivat auto conn(getConnection()); conn->to << wopQueryDerivationOutputMap << printStorePath(path); conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom>> {}); + return WorkerProto>>::read(*this, conn->from); } else { // Fallback for old daemon versions. // For floating-CA derivations (and their co-dependencies) this is an @@ -597,6 +414,7 @@ ref RemoteStore::addCAToStore( Source & dump, std::string_view name, ContentAddressMethod caMethod, + HashType hashType, const StorePathSet & references, RepairFlag repair) { @@ -608,8 +426,8 @@ ref RemoteStore::addCAToStore( conn->to << wopAddToStore << name - << caMethod.render(); - worker_proto::write(*this, conn->to, references); + << caMethod.render(hashType); + workerProtoWrite(*this, conn->to, references); conn->to << repair; // The dump source may invoke the store, so we need to make some room. @@ -628,26 +446,29 @@ ref RemoteStore::addCAToStore( if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25"); std::visit(overloaded { - [&](const TextHashMethod & thm) -> void { + [&](const TextIngestionMethod & thm) -> void { + if (hashType != htSHA256) + throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", + name, printHashType(hashType)); std::string s = dump.drain(); conn->to << wopAddTextToStore << name << s; - worker_proto::write(*this, conn->to, references); + workerProtoWrite(*this, conn->to, references); conn.processStderr(); }, - [&](const FixedOutputHashMethod & fohm) -> void { + [&](const FileIngestionMethod & fim) -> void { conn->to << wopAddToStore << name - << ((fohm.hashType == htSHA256 && fohm.fileIngestionMethod == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ - << (fohm.fileIngestionMethod == FileIngestionMethod::Recursive ? 1 : 0) - << printHashType(fohm.hashType); + << ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ + << (fim == FileIngestionMethod::Recursive ? 1 : 0) + << printHashType(hashType); try { conn->to.written = 0; connections->incCapacity(); { Finally cleanup([&]() { connections->decCapacity(); }); - if (fohm.fileIngestionMethod == FileIngestionMethod::Recursive) { + if (fim == FileIngestionMethod::Recursive) { dump.drainInto(conn->to); } else { std::string contents = dump.drain(); @@ -678,7 +499,7 @@ ref RemoteStore::addCAToStore( StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name, FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references) { - return addCAToStore(dump, name, FixedOutputHashMethod{ .fileIngestionMethod = method, .hashType = hashType }, references, repair)->path; + return addCAToStore(dump, name, method, hashType, references, repair)->path; } @@ -697,7 +518,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, sink << exportMagic << printStorePath(info.path); - worker_proto::write(*this, sink, info.references); + workerProtoWrite(*this, sink, info.references); sink << (info.deriver ? printStorePath(*info.deriver) : "") << 0 // == no legacy signature @@ -707,7 +528,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, conn.processStderr(0, source2.get()); - auto importedPaths = worker_proto::read(*this, conn->from, Phantom {}); + auto importedPaths = WorkerProto::read(*this, conn->from); assert(importedPaths.size() <= 1); } @@ -716,7 +537,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source, << printStorePath(info.path) << (info.deriver ? printStorePath(*info.deriver) : "") << info.narHash.to_string(Base16, false); - worker_proto::write(*this, conn->to, info.references); + workerProtoWrite(*this, conn->to, info.references); conn->to << info.registrationTime << info.narSize << info.ultimate << info.sigs << renderContentAddress(info.ca) << repair << !checkSigs; @@ -778,7 +599,7 @@ StorePath RemoteStore::addTextToStore( RepairFlag repair) { StringSource source(s); - return addCAToStore(source, name, TextHashMethod{}, references, repair)->path; + return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path; } void RemoteStore::registerDrvOutput(const Realisation & info) @@ -789,7 +610,7 @@ void RemoteStore::registerDrvOutput(const Realisation & info) conn->to << info.id.to_string(); conn->to << std::string(info.outPath.to_string()); } else { - worker_proto::write(*this, conn->to, info); + workerProtoWrite(*this, conn->to, info); } conn.processStderr(); } @@ -811,14 +632,14 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id, auto real = [&]() -> std::shared_ptr { if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) { - auto outPaths = worker_proto::read( - *this, conn->from, Phantom> {}); + auto outPaths = WorkerProto>::read( + *this, conn->from); if (outPaths.empty()) return nullptr; return std::make_shared(Realisation { .id = id, .outPath = *outPaths.begin() }); } else { - auto realisations = worker_proto::read( - *this, conn->from, Phantom> {}); + auto realisations = WorkerProto>::read( + *this, conn->from); if (realisations.empty()) return nullptr; return std::make_shared(*realisations.begin()); @@ -832,7 +653,7 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id, static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector & reqs) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 30) { - worker_proto::write(store, conn->to, reqs); + workerProtoWrite(store, conn->to, reqs); } else { Strings ss; for (auto & p : reqs) { @@ -902,7 +723,7 @@ std::vector RemoteStore::buildPathsWithResults( writeDerivedPaths(*this, conn, paths); conn->to << buildMode; conn.processStderr(); - return worker_proto::read(*this, conn->from, Phantom> {}); + return WorkerProto>::read(*this, conn->from); } else { // Avoid deadlock. conn_.reset(); @@ -985,7 +806,7 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD conn->from >> res.timesBuilt >> res.isNonDeterministic >> res.startTime >> res.stopTime; } if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 28) { - auto builtOutputs = worker_proto::read(*this, conn->from, Phantom {}); + auto builtOutputs = WorkerProto::read(*this, conn->from); for (auto && [output, realisation] : builtOutputs) res.builtOutputs.insert_or_assign( std::move(output.outputName), @@ -1044,7 +865,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) conn->to << wopCollectGarbage << options.action; - worker_proto::write(*this, conn->to, options.pathsToDelete); + workerProtoWrite(*this, conn->to, options.pathsToDelete); conn->to << options.ignoreLiveness << options.maxFreed /* removed options */ @@ -1103,9 +924,9 @@ void RemoteStore::queryMissing(const std::vector & targets, conn->to << wopQueryMissing; writeDerivedPaths(*this, conn, targets); conn.processStderr(); - willBuild = worker_proto::read(*this, conn->from, Phantom {}); - willSubstitute = worker_proto::read(*this, conn->from, Phantom {}); - unknown = worker_proto::read(*this, conn->from, Phantom {}); + willBuild = WorkerProto::read(*this, conn->from); + willSubstitute = WorkerProto::read(*this, conn->from); + unknown = WorkerProto::read(*this, conn->from); conn->from >> downloadSize >> narSize; return; } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index a30466647..4f3971bfd 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -78,6 +78,7 @@ public: Source & dump, std::string_view name, ContentAddressMethod caMethod, + HashType hashType, const StorePathSet & references, RepairFlag repair); @@ -136,6 +137,17 @@ public: bool verifyStore(bool checkContents, RepairFlag repair) override; + /** + * The default instance would schedule the work on the client side, but + * for consistency with `buildPaths` and `buildDerivation` it should happen + * on the remote side. + * + * We make this fail for now so we can add implement this properly later + * without it being a breaking change. + */ + void repairPath(const StorePath & path) override + { unsupported("repairPath"); } + void addSignatures(const StorePath & storePath, const StringSet & sigs) override; void queryMissing(const std::vector & targets, diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 6f6deda51..fae99d75b 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -41,6 +41,11 @@ void SSHMaster::addCommonSSHOpts(Strings & args) args.push_back("-oLocalCommand=echo started"); } +bool SSHMaster::isMasterRunning() { + auto res = runProgram(RunOptions {.program = "ssh", .args = {"-O", "check", host}, .mergeStderrToStdout = true}); + return res.first == 0; +} + std::unique_ptr SSHMaster::startCommand(const std::string & command) { Path socketPath = startMaster(); @@ -97,7 +102,7 @@ std::unique_ptr SSHMaster::startCommand(const std::string // Wait for the SSH connection to be established, // So that we don't overwrite the password prompt with our progress bar. - if (!fakeSSH && !useMaster) { + if (!fakeSSH && !useMaster && !isMasterRunning()) { std::string reply; try { reply = readLine(out.readSide.get()); @@ -133,6 +138,8 @@ Path SSHMaster::startMaster() logger->pause(); Finally cleanup = [&]() { logger->resume(); }; + bool wasMasterRunning = isMasterRunning(); + state->sshMaster = startProcess([&]() { restoreProcessContext(); @@ -152,13 +159,15 @@ Path SSHMaster::startMaster() out.writeSide = -1; - std::string reply; - try { - reply = readLine(out.readSide.get()); - } catch (EndOfFile & e) { } + if (!wasMasterRunning) { + std::string reply; + try { + reply = readLine(out.readSide.get()); + } catch (EndOfFile & e) { } - if (reply != "started") - throw Error("failed to start SSH master connection to '%s'", host); + if (reply != "started") + throw Error("failed to start SSH master connection to '%s'", host); + } return state->socketPath; } diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index c86a8a986..94b952af9 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -28,6 +28,7 @@ private: Sync state_; void addCommonSSHOpts(Strings & args); + bool isMasterRunning(); public: diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index c910d1c96..2ecbe2708 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -679,8 +679,7 @@ public: * Repair the contents of the given path by redownloading it using * a substituter (if available). */ - virtual void repairPath(const StorePath & path) - { unsupported("repairPath"); } + virtual void repairPath(const StorePath & path); /** * Add signatures to the specified store path. The signatures are @@ -1022,7 +1021,7 @@ std::optional decodeValidPathInfo( */ std::pair splitUriAndParams(const std::string & uri); -std::optional getDerivationCA(const BasicDerivation & drv); +const ContentAddress * getDerivationCA(const BasicDerivation & drv); std::map drvOutputReferences( Store & store, diff --git a/src/libstore/tests/derivation.cc b/src/libstore/tests/derivation.cc index 6f94904dd..6328ad370 100644 --- a/src/libstore/tests/derivation.cc +++ b/src/libstore/tests/derivation.cc @@ -26,6 +26,14 @@ class CaDerivationTest : public DerivationTest } }; +class DynDerivationTest : public DerivationTest +{ + void SetUp() override + { + mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); + } +}; + class ImpureDerivationTest : public DerivationTest { void SetUp() override @@ -66,20 +74,47 @@ TEST_JSON(DerivationTest, inputAddressed, }), "drv-name", "output-name") -TEST_JSON(DerivationTest, caFixed, +TEST_JSON(DerivationTest, caFixedFlat, + R"({ + "hashAlgo": "sha256", + "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", + "path": "/nix/store/rhcg9h16sqvlbpsa6dqm57sbr2al6nzg-drv-name-output-name" + })", + (DerivationOutput::CAFixed { + .ca = FixedOutputHash { + .method = FileIngestionMethod::Flat, + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, + }), + "drv-name", "output-name") + +TEST_JSON(DerivationTest, caFixedNAR, R"({ "hashAlgo": "r:sha256", "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", "path": "/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name" })", (DerivationOutput::CAFixed { - .hash = { + .ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), }, }), "drv-name", "output-name") +TEST_JSON(DynDerivationTest, caFixedText, + R"({ + "hashAlgo": "text:sha256", + "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", + "path": "/nix/store/6s1zwabh956jvhv4w9xcdb5jiyanyxg1-drv-name-output-name" + })", + (DerivationOutput::CAFixed { + .ca = TextHash { + .hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="), + }, + }), + "drv-name", "output-name") + TEST_JSON(CaDerivationTest, caFloating, R"({ "hashAlgo": "r:sha256" diff --git a/src/libstore/tests/derived-path.cc b/src/libstore/tests/derived-path.cc index e6d32dbd0..160443ec1 100644 --- a/src/libstore/tests/derived-path.cc +++ b/src/libstore/tests/derived-path.cc @@ -27,11 +27,13 @@ Gen Arbitrary::arbitrary() Gen Arbitrary::arbitrary() { - switch (*gen::inRange(0, 1)) { + switch (*gen::inRange(0, std::variant_size_v)) { case 0: return gen::just(*gen::arbitrary()); - default: + case 1: return gen::just(*gen::arbitrary()); + default: + assert(false); } } diff --git a/src/libstore/tests/downstream-placeholder.cc b/src/libstore/tests/downstream-placeholder.cc new file mode 100644 index 000000000..ec3e1000f --- /dev/null +++ b/src/libstore/tests/downstream-placeholder.cc @@ -0,0 +1,33 @@ +#include + +#include "downstream-placeholder.hh" + +namespace nix { + +TEST(DownstreamPlaceholder, unknownCaOutput) { + ASSERT_EQ( + DownstreamPlaceholder::unknownCaOutput( + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" }, + "out").render(), + "/0c6rn30q4frawknapgwq386zq358m8r6msvywcvc89n6m5p2dgbz"); +} + +TEST(DownstreamPlaceholder, unknownDerivation) { + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations"); + + ASSERT_EQ( + DownstreamPlaceholder::unknownDerivation( + DownstreamPlaceholder::unknownCaOutput( + StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv.drv" }, + "out"), + "out", + mockXpSettings).render(), + "/0gn6agqxjyyalf0dpihgyf49xq5hqxgw100f0wydnj6yqrhqsb3w"); +} + +} diff --git a/src/libstore/tests/outputs-spec.cc b/src/libstore/tests/outputs-spec.cc index 984d1d963..bf8deaa9d 100644 --- a/src/libstore/tests/outputs-spec.cc +++ b/src/libstore/tests/outputs-spec.cc @@ -206,15 +206,17 @@ using namespace nix; Gen Arbitrary::arbitrary() { - switch (*gen::inRange(0, 1)) { + switch (*gen::inRange(0, std::variant_size_v)) { case 0: return gen::just((OutputsSpec) OutputsSpec::All { }); - default: + case 1: return gen::just((OutputsSpec) OutputsSpec::Names { *gen::nonEmpty(gen::container(gen::map( gen::arbitrary(), [](StorePathName n) { return n.name; }))), }); + default: + assert(false); } } diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc new file mode 100644 index 000000000..51bb12026 --- /dev/null +++ b/src/libstore/worker-protocol.cc @@ -0,0 +1,192 @@ +#include "serialise.hh" +#include "util.hh" +#include "path-with-outputs.hh" +#include "store-api.hh" +#include "build-result.hh" +#include "worker-protocol.hh" +#include "archive.hh" +#include "derivations.hh" + +#include + +namespace nix { + +std::string WorkerProto::read(const Store & store, Source & from) +{ + return readString(from); +} + +void WorkerProto::write(const Store & store, Sink & out, const std::string & str) +{ + out << str; +} + + +StorePath WorkerProto::read(const Store & store, Source & from) +{ + return store.parseStorePath(readString(from)); +} + +void WorkerProto::write(const Store & store, Sink & out, const StorePath & storePath) +{ + out << store.printStorePath(storePath); +} + + +std::optional WorkerProto>::read(const Store & store, Source & from) +{ + auto temp = readNum(from); + switch (temp) { + case 0: + return std::nullopt; + case 1: + return { Trusted }; + case 2: + return { NotTrusted }; + default: + throw Error("Invalid trusted status from remote"); + } +} + +void WorkerProto>::write(const Store & store, Sink & out, const std::optional & optTrusted) +{ + if (!optTrusted) + out << (uint8_t)0; + else { + switch (*optTrusted) { + case Trusted: + out << (uint8_t)1; + break; + case NotTrusted: + out << (uint8_t)2; + break; + default: + assert(false); + }; + } +} + + +ContentAddress WorkerProto::read(const Store & store, Source & from) +{ + return ContentAddress::parse(readString(from)); +} + +void WorkerProto::write(const Store & store, Sink & out, const ContentAddress & ca) +{ + out << renderContentAddress(ca); +} + + +DerivedPath WorkerProto::read(const Store & store, Source & from) +{ + auto s = readString(from); + return DerivedPath::parseLegacy(store, s); +} + +void WorkerProto::write(const Store & store, Sink & out, const DerivedPath & req) +{ + out << req.to_string_legacy(store); +} + + +Realisation WorkerProto::read(const Store & store, Source & from) +{ + std::string rawInput = readString(from); + return Realisation::fromJSON( + nlohmann::json::parse(rawInput), + "remote-protocol" + ); +} + +void WorkerProto::write(const Store & store, Sink & out, const Realisation & realisation) +{ + out << realisation.toJSON().dump(); +} + + +DrvOutput WorkerProto::read(const Store & store, Source & from) +{ + return DrvOutput::parse(readString(from)); +} + +void WorkerProto::write(const Store & store, Sink & out, const DrvOutput & drvOutput) +{ + out << drvOutput.to_string(); +} + + +KeyedBuildResult WorkerProto::read(const Store & store, Source & from) +{ + auto path = WorkerProto::read(store, from); + auto br = WorkerProto::read(store, from); + return KeyedBuildResult { + std::move(br), + /* .path = */ std::move(path), + }; +} + +void WorkerProto::write(const Store & store, Sink & to, const KeyedBuildResult & res) +{ + workerProtoWrite(store, to, res.path); + workerProtoWrite(store, to, static_cast(res)); +} + + +BuildResult WorkerProto::read(const Store & store, Source & from) +{ + BuildResult res; + res.status = (BuildResult::Status) readInt(from); + from + >> res.errorMsg + >> res.timesBuilt + >> res.isNonDeterministic + >> res.startTime + >> res.stopTime; + auto builtOutputs = WorkerProto::read(store, from); + for (auto && [output, realisation] : builtOutputs) + res.builtOutputs.insert_or_assign( + std::move(output.outputName), + std::move(realisation)); + return res; +} + +void WorkerProto::write(const Store & store, Sink & to, const BuildResult & res) +{ + to + << res.status + << res.errorMsg + << res.timesBuilt + << res.isNonDeterministic + << res.startTime + << res.stopTime; + DrvOutputs builtOutputs; + for (auto & [output, realisation] : res.builtOutputs) + builtOutputs.insert_or_assign(realisation.id, realisation); + workerProtoWrite(store, to, builtOutputs); +} + + +std::optional WorkerProto>::read(const Store & store, Source & from) +{ + auto s = readString(from); + return s == "" ? std::optional {} : store.parseStorePath(s); +} + +void WorkerProto>::write(const Store & store, Sink & out, const std::optional & storePathOpt) +{ + out << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); +} + + +std::optional WorkerProto>::read(const Store & store, Source & from) +{ + return ContentAddress::parseOpt(readString(from)); +} + +void WorkerProto>::write(const Store & store, Sink & out, const std::optional & caOpt) +{ + out << (caOpt ? renderContentAddress(*caOpt) : ""); +} + +} diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index 34b2fc17b..f06332d17 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -1,7 +1,6 @@ #pragma once ///@file -#include "store-api.hh" #include "serialise.hh" namespace nix { @@ -79,41 +78,81 @@ typedef enum { class Store; struct Source; +// items being serialized +struct DerivedPath; +struct DrvOutput; +struct Realisation; +struct BuildResult; +struct KeyedBuildResult; +enum TrustedFlag : bool; + + /** - * Used to guide overloading + * Data type for canonical pairs of serializers for the worker protocol. * * See https://en.cppreference.com/w/cpp/language/adl for the broader * concept of what is going on here. */ template -struct Phantom {}; +struct WorkerProto { + static T read(const Store & store, Source & from); + static void write(const Store & store, Sink & out, const T & t); +}; +/** + * Wrapper function around `WorkerProto::write` that allows us to + * infer the type instead of having to write it down explicitly. + */ +template +void workerProtoWrite(const Store & store, Sink & out, const T & t) +{ + WorkerProto::write(store, out, t); +} -namespace worker_proto { -/* FIXME maybe move more stuff inside here */ +/** + * Declare a canonical serializer pair for the worker protocol. + * + * We specialize the struct merely to indicate that we are implementing + * the function for the given type. + * + * Some sort of `template<...>` must be used with the caller for this to + * be legal specialization syntax. See below for what that looks like in + * practice. + */ +#define MAKE_WORKER_PROTO(T) \ + struct WorkerProto< T > { \ + static T read(const Store & store, Source & from); \ + static void write(const Store & store, Sink & out, const T & t); \ + }; -#define MAKE_WORKER_PROTO(TEMPLATE, T) \ - TEMPLATE T read(const Store & store, Source & from, Phantom< T > _); \ - TEMPLATE void write(const Store & store, Sink & out, const T & str) +template<> +MAKE_WORKER_PROTO(std::string); +template<> +MAKE_WORKER_PROTO(StorePath); +template<> +MAKE_WORKER_PROTO(ContentAddress); +template<> +MAKE_WORKER_PROTO(DerivedPath); +template<> +MAKE_WORKER_PROTO(Realisation); +template<> +MAKE_WORKER_PROTO(DrvOutput); +template<> +MAKE_WORKER_PROTO(BuildResult); +template<> +MAKE_WORKER_PROTO(KeyedBuildResult); +template<> +MAKE_WORKER_PROTO(std::optional); -MAKE_WORKER_PROTO(, std::string); -MAKE_WORKER_PROTO(, StorePath); -MAKE_WORKER_PROTO(, ContentAddress); -MAKE_WORKER_PROTO(, DerivedPath); -MAKE_WORKER_PROTO(, Realisation); -MAKE_WORKER_PROTO(, DrvOutput); -MAKE_WORKER_PROTO(, BuildResult); -MAKE_WORKER_PROTO(, KeyedBuildResult); -MAKE_WORKER_PROTO(, std::optional); +template +MAKE_WORKER_PROTO(std::vector); +template +MAKE_WORKER_PROTO(std::set); -MAKE_WORKER_PROTO(template, std::vector); -MAKE_WORKER_PROTO(template, std::set); - -#define X_ template -#define Y_ std::map -MAKE_WORKER_PROTO(X_, Y_); +template +#define X_ std::map +MAKE_WORKER_PROTO(X_); #undef X_ -#undef Y_ /** * These use the empty string for the null case, relying on the fact @@ -129,72 +168,72 @@ MAKE_WORKER_PROTO(X_, Y_); * worker protocol harder to implement in other languages where such * specializations may not be allowed. */ -MAKE_WORKER_PROTO(, std::optional); -MAKE_WORKER_PROTO(, std::optional); +template<> +MAKE_WORKER_PROTO(std::optional); +template<> +MAKE_WORKER_PROTO(std::optional); template -std::vector read(const Store & store, Source & from, Phantom> _) +std::vector WorkerProto>::read(const Store & store, Source & from) { std::vector resSet; auto size = readNum(from); while (size--) { - resSet.push_back(read(store, from, Phantom {})); + resSet.push_back(WorkerProto::read(store, from)); } return resSet; } template -void write(const Store & store, Sink & out, const std::vector & resSet) +void WorkerProto>::write(const Store & store, Sink & out, const std::vector & resSet) { out << resSet.size(); for (auto & key : resSet) { - write(store, out, key); + WorkerProto::write(store, out, key); } } template -std::set read(const Store & store, Source & from, Phantom> _) +std::set WorkerProto>::read(const Store & store, Source & from) { std::set resSet; auto size = readNum(from); while (size--) { - resSet.insert(read(store, from, Phantom {})); + resSet.insert(WorkerProto::read(store, from)); } return resSet; } template -void write(const Store & store, Sink & out, const std::set & resSet) +void WorkerProto>::write(const Store & store, Sink & out, const std::set & resSet) { out << resSet.size(); for (auto & key : resSet) { - write(store, out, key); + WorkerProto::write(store, out, key); } } template -std::map read(const Store & store, Source & from, Phantom> _) +std::map WorkerProto>::read(const Store & store, Source & from) { std::map resMap; auto size = readNum(from); while (size--) { - auto k = read(store, from, Phantom {}); - auto v = read(store, from, Phantom {}); + auto k = WorkerProto::read(store, from); + auto v = WorkerProto::read(store, from); resMap.insert_or_assign(std::move(k), std::move(v)); } return resMap; } template -void write(const Store & store, Sink & out, const std::map & resMap) +void WorkerProto>::write(const Store & store, Sink & out, const std::map & resMap) { out << resMap.size(); for (auto & i : resMap) { - write(store, out, i.first); - write(store, out, i.second); + WorkerProto::write(store, out, i.first); + WorkerProto::write(store, out, i.second); } } } - -} diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index ddf6db6d1..040464532 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -13,6 +13,11 @@ CanonPath::CanonPath(std::string_view raw, const CanonPath & root) : path(absPath((Path) raw, root.abs())) { } +CanonPath CanonPath::fromCwd(std::string_view path) +{ + return CanonPath(unchecked_t(), absPath((Path) path)); +} + std::optional CanonPath::parent() const { if (isRoot()) return std::nullopt; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 614883c06..eefe05ed5 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -46,6 +46,8 @@ public: : path(std::move(path)) { } + static CanonPath fromCwd(std::string_view path = "."); + static CanonPath root; /** diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc index bd1899662..c4642d333 100644 --- a/src/libutil/experimental-features.cc +++ b/src/libutil/experimental-features.cc @@ -12,7 +12,7 @@ struct ExperimentalFeatureDetails std::string_view description; }; -constexpr std::array xpFeatureDetails = {{ +constexpr std::array xpFeatureDetails = {{ { .tag = Xp::CaDerivations, .name = "ca-derivations", @@ -50,6 +50,8 @@ constexpr std::array xpFeatureDetails = {{ or other impure derivations can rely on impure derivations. Finally, an impure derivation cannot also be [content-addressed](#xp-feature-ca-derivations). + + This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime). )", }, { @@ -199,6 +201,26 @@ constexpr std::array xpFeatureDetails = {{ networking. )", }, + { + .tag = Xp::DynamicDerivations, + .name = "dynamic-derivations", + .description = R"( + Allow the use of a few things related to dynamic derivations: + + - "text hashing" derivation outputs, so we can build .drv + files. + + - dependencies in derivations on the outputs of + derivations that are themselves derivations outputs. + )", + }, + { + .tag = Xp::ParseTomlTimestamps, + .name = "parse-toml-timestamps", + .description = R"( + Allow parsing of timestamps in builtins.fromTOML. + )", + }, }}; static_assert( @@ -233,7 +255,7 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag) return xpFeatureDetails[(size_t)tag].name; } -nlohmann::json documentExperimentalFeatures() +nlohmann::json documentExperimentalFeatures() { StringMap res; for (auto & xpFeature : xpFeatureDetails) diff --git a/src/libutil/experimental-features.hh b/src/libutil/experimental-features.hh index 3c00bc4e5..892c6c371 100644 --- a/src/libutil/experimental-features.hh +++ b/src/libutil/experimental-features.hh @@ -29,6 +29,8 @@ enum struct ExperimentalFeature Cgroups, DiscardReferences, DaemonTrustOverride, + DynamicDerivations, + ParseTomlTimestamps, }; /** diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 238d0a7a6..5060a8f24 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -17,7 +17,7 @@ static ssize_t callback_read(struct archive * archive, void * _self, const void *buffer = self->buffer.data(); try { - return self->source->read((char *) self->buffer.data(), 4096); + return self->source->read((char *) self->buffer.data(), self->buffer.size()); } catch (EndOfFile &) { return 0; } catch (std::exception & err) { @@ -39,7 +39,7 @@ void TarArchive::check(int err, const std::string & reason) throw Error(reason, archive_error_string(this->archive)); } -TarArchive::TarArchive(Source & source, bool raw) : buffer(4096) +TarArchive::TarArchive(Source & source, bool raw) : buffer(65536) { this->archive = archive_read_new(); this->source = &source; diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh index 24afb710a..237d18c31 100644 --- a/src/libutil/tarfile.hh +++ b/src/libutil/tarfile.hh @@ -24,6 +24,7 @@ struct TarArchive { ~TarArchive(); }; + void unpackTarfile(Source & source, const Path & destDir); void unpackTarfile(const Path & tarFile, const Path & destDir); diff --git a/src/libutil/tests/tests.cc b/src/libutil/tests/tests.cc index 250e83a38..f3c1e8248 100644 --- a/src/libutil/tests/tests.cc +++ b/src/libutil/tests/tests.cc @@ -202,7 +202,7 @@ namespace nix { } TEST(pathExists, bogusPathDoesNotExist) { - ASSERT_FALSE(pathExists("/home/schnitzel/darmstadt/pommes")); + ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes")); } /* ---------------------------------------------------------------------------- diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 21d1c8dcd..aa0a154fd 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -266,6 +266,17 @@ bool pathExists(const Path & path) return false; } +bool pathAccessible(const Path & path) +{ + try { + return pathExists(path); + } catch (SysError & e) { + // swallow EPERM + if (e.errNo == EPERM) return false; + throw; + } +} + Path readLink(const Path & path) { @@ -1141,9 +1152,9 @@ std::vector stringsToCharPtrs(const Strings & ss) } std::string runProgram(Path program, bool searchPath, const Strings & args, - const std::optional & input) + const std::optional & input, bool isInteractive) { - auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input}); + auto res = runProgram(RunOptions {.program = program, .searchPath = searchPath, .args = args, .input = input, .isInteractive = isInteractive}); if (!statusOk(res.first)) throw ExecError(res.first, "program '%1%' %2%", program, statusToString(res.first)); @@ -1193,6 +1204,16 @@ void runProgram2(const RunOptions & options) // case), so we can't use it if we alter the environment processOptions.allowVfork = !options.environment; + std::optional>> resumeLoggerDefer; + if (options.isInteractive) { + logger->pause(); + resumeLoggerDefer.emplace( + []() { + logger->resume(); + } + ); + } + /* Fork. */ Pid pid = startProcess([&]() { if (options.environment) diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 040fed68f..00fcb9b79 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -120,6 +120,14 @@ struct stat lstat(const Path & path); */ bool pathExists(const Path & path); +/** + * A version of pathExists that returns false on a permission error. + * Useful for inferring default paths across directories that might not + * be readable. + * @return true iff the given path can be accessed and exists + */ +bool pathAccessible(const Path & path); + /** * Read the contents (target) of a symbolic link. The result is not * in any way canonicalised. @@ -415,7 +423,7 @@ pid_t startProcess(std::function fun, const ProcessOptions & options = P */ std::string runProgram(Path program, bool searchPath = false, const Strings & args = Strings(), - const std::optional & input = {}); + const std::optional & input = {}, bool isInteractive = false); struct RunOptions { @@ -430,6 +438,7 @@ struct RunOptions Source * standardIn = nullptr; Sink * standardOut = nullptr; bool mergeStderrToStdout = false; + bool isInteractive = false; }; std::pair runProgram(RunOptions && options); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index bc7e7eb18..6510df8f0 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -84,7 +84,6 @@ static void main_nix_build(int argc, char * * argv) auto interactive = isatty(STDIN_FILENO) && isatty(STDERR_FILENO); Strings attrPaths; Strings left; - RepairFlag repair = NoRepair; BuildMode buildMode = bmNormal; bool readStdin = false; @@ -169,11 +168,6 @@ static void main_nix_build(int argc, char * * argv) else if (*arg == "--dry-run") dryRun = true; - else if (*arg == "--repair") { - repair = Repair; - buildMode = bmRepair; - } - else if (*arg == "--run-env") // obsolete runEnv = true; @@ -249,7 +243,8 @@ static void main_nix_build(int argc, char * * argv) auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; auto state = std::make_unique(myArgs.searchPath, evalStore, store); - state->repair = repair; + state->repair = myArgs.repair; + if (myArgs.repair) buildMode = bmRepair; auto autoArgs = myArgs.getAutoArgs(*state); @@ -289,7 +284,7 @@ static void main_nix_build(int argc, char * * argv) else for (auto i : left) { if (fromArgs) - exprs.push_back(state->parseExprFromString(std::move(i), absPath("."))); + exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath(CanonPath::fromCwd()))); else { auto absolute = i; try { @@ -385,7 +380,9 @@ static void main_nix_build(int argc, char * * argv) if (!shell) { try { - auto expr = state->parseExprFromString("(import {}).bashInteractive", absPath(".")); + auto expr = state->parseExprFromString( + "(import {}).bashInteractive", + state->rootPath(CanonPath::fromCwd())); Value v; state->eval(expr, v); diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc index 3cc57af4e..cb1f42e35 100644 --- a/src/nix-collect-garbage/nix-collect-garbage.cc +++ b/src/nix-collect-garbage/nix-collect-garbage.cc @@ -77,7 +77,12 @@ static int main_nix_collect_garbage(int argc, char * * argv) return true; }); - if (removeOld) removeOldGenerations(profilesDir()); + if (removeOld) { + std::set dirsToClean = { + profilesDir(), settings.nixStateDir + "/profiles", dirOf(getDefaultProfile())}; + for (auto & dir : dirsToClean) + removeOldGenerations(dir); + } // Run the actual garbage collector. if (!dryRun) { diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index f076ffdb0..5e94f2d14 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -44,7 +44,7 @@ typedef enum { struct InstallSourceInfo { InstallSourceType type; - Path nixExprPath; /* for srcNixExprDrvs, srcNixExprs */ + std::shared_ptr nixExprPath; /* for srcNixExprDrvs, srcNixExprs */ Path profile; /* for srcProfile */ std::string systemFilter; /* for srcNixExprDrvs */ Bindings * autoArgs; @@ -92,9 +92,11 @@ static bool parseInstallSourceOptions(Globals & globals, } -static bool isNixExpr(const Path & path, struct stat & st) +static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { - return S_ISREG(st.st_mode) || (S_ISDIR(st.st_mode) && pathExists(path + "/default.nix")); + return + st.type == InputAccessor::tRegular + || (st.type == InputAccessor::tDirectory && (path + "default.nix").pathExists()); } @@ -102,10 +104,10 @@ static constexpr size_t maxAttrs = 1024; static void getAllExprs(EvalState & state, - const Path & path, StringSet & seen, BindingsBuilder & attrs) + const SourcePath & path, StringSet & seen, BindingsBuilder & attrs) { StringSet namesSorted; - for (auto & i : readDirectory(path)) namesSorted.insert(i.name); + for (auto & [name, _] : path.readDirectory()) namesSorted.insert(name); for (auto & i : namesSorted) { /* Ignore the manifest.nix used by profiles. This is @@ -113,13 +115,16 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - Path path2 = path + "/" + i; + SourcePath path2 = path + i; - struct stat st; - if (stat(path2.c_str(), &st) == -1) + InputAccessor::Stat st; + try { + st = path2.resolveSymlinks().lstat(); + } catch (Error &) { continue; // ignore dangling symlinks in ~/.nix-defexpr + } - if (isNixExpr(path2, st) && (!S_ISREG(st.st_mode) || hasSuffix(path2, ".nix"))) { + if (isNixExpr(path2, st) && (st.type != InputAccessor::tRegular || hasSuffix(path2.baseName(), ".nix"))) { /* Strip off the `.nix' filename suffix (if applicable), otherwise the attribute cannot be selected with the `-A' option. Useful if you want to stick a Nix @@ -129,21 +134,20 @@ static void getAllExprs(EvalState & state, attrName = std::string(attrName, 0, attrName.size() - 4); if (!seen.insert(attrName).second) { std::string suggestionMessage = ""; - if (path2.find("channels") != std::string::npos && path.find("channels") != std::string::npos) { + if (path2.path.abs().find("channels") != std::string::npos && path.path.abs().find("channels") != std::string::npos) suggestionMessage = fmt("\nsuggestion: remove '%s' from either the root channels or the user channels", attrName); - } printError("warning: name collision in input Nix expressions, skipping '%1%'" "%2%", path2, suggestionMessage); continue; } /* Load the expression on demand. */ auto vArg = state.allocValue(); - vArg->mkString(path2); + vArg->mkString(path2.path.abs()); if (seen.size() == maxAttrs) throw Error("too many Nix expressions in directory '%1%'", path); attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg); } - else if (S_ISDIR(st.st_mode)) + else if (st.type == InputAccessor::tDirectory) /* `path2' is a directory (with no default.nix in it); recurse into it. */ getAllExprs(state, path2, seen, attrs); @@ -152,11 +156,9 @@ static void getAllExprs(EvalState & state, -static void loadSourceExpr(EvalState & state, const Path & path, Value & v) +static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v) { - struct stat st; - if (stat(path.c_str(), &st) == -1) - throw SysError("getting information about '%1%'", path); + auto st = path.resolveSymlinks().lstat(); if (isNixExpr(path, st)) state.evalFile(path, v); @@ -167,7 +169,7 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v) set flat, not nested, to make it easier for a user to have a ~/.nix-defexpr directory that includes some system-wide directory). */ - else if (S_ISDIR(st.st_mode)) { + else if (st.type == InputAccessor::tDirectory) { auto attrs = state.buildBindings(maxAttrs); attrs.alloc("_combineChannels").mkList(0); StringSet seen; @@ -179,7 +181,7 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v) } -static void loadDerivations(EvalState & state, Path nixExprPath, +static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, std::string systemFilter, Bindings & autoArgs, const std::string & pathPrefix, DrvInfos & elems) { @@ -390,7 +392,7 @@ static void queryInstSources(EvalState & state, /* Load the derivations from the (default or specified) Nix expression. */ DrvInfos allElems; - loadDerivations(state, instSource.nixExprPath, + loadDerivations(state, *instSource.nixExprPath, instSource.systemFilter, *instSource.autoArgs, "", allElems); elems = filterBySelector(state, allElems, args, newestOnly); @@ -407,10 +409,10 @@ static void queryInstSources(EvalState & state, case srcNixExprs: { Value vArg; - loadSourceExpr(state, instSource.nixExprPath, vArg); + loadSourceExpr(state, *instSource.nixExprPath, vArg); for (auto & i : args) { - Expr * eFun = state.parseExprFromString(i, absPath(".")); + Expr * eFun = state.parseExprFromString(i, state.rootPath(CanonPath::fromCwd())); Value vFun, vTmp; state.eval(eFun, vFun); vTmp.mkApp(&vFun, &vArg); @@ -462,7 +464,7 @@ static void queryInstSources(EvalState & state, case srcAttrPath: { Value vRoot; - loadSourceExpr(state, instSource.nixExprPath, vRoot); + loadSourceExpr(state, *instSource.nixExprPath, vRoot); for (auto & i : args) { Value & v(*findAlongAttrPath(state, i, *instSource.autoArgs, vRoot).first); getDerivations(state, v, "", *instSource.autoArgs, elems, true); @@ -960,7 +962,7 @@ static void queryJSON(Globals & globals, std::vector & elems, bool prin printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j); metaObj[j] = nullptr; } else { - PathSet context; + NixStringContext context; metaObj[j] = printValueAsJSON(*globals.state, true, *v, noPos, context); } } @@ -1030,7 +1032,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) installedElems = queryInstalled(*globals.state, globals.profile); if (source == sAvailable || compareVersions) - loadDerivations(*globals.state, globals.instSource.nixExprPath, + loadDerivations(*globals.state, *globals.instSource.nixExprPath, globals.instSource.systemFilter, *globals.instSource.autoArgs, attrPath, availElems); @@ -1389,28 +1391,25 @@ static int main_nix_env(int argc, char * * argv) Operation op = 0; std::string opName; bool showHelp = false; - RepairFlag repair = NoRepair; std::string file; Globals globals; globals.instSource.type = srcUnknown; - { - Path nixExprPath = settings.useXDGBaseDirectories ? createNixStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; - globals.instSource.nixExprPath = nixExprPath; - } globals.instSource.systemFilter = "*"; - if (!pathExists(globals.instSource.nixExprPath)) { + Path nixExprPath = settings.useXDGBaseDirectories ? createNixStateDir() + "/defexpr" : getHome() + "/.nix-defexpr"; + + if (!pathExists(nixExprPath)) { try { - createDirs(globals.instSource.nixExprPath); + createDirs(nixExprPath); replaceSymlink( defaultChannelsDir(), - globals.instSource.nixExprPath + "/channels"); + nixExprPath + "/channels"); if (getuid() != 0) replaceSymlink( rootChannelsDir(), - globals.instSource.nixExprPath + "/channels_root"); + nixExprPath + "/channels_root"); } catch (Error &) { } } @@ -1489,8 +1488,6 @@ static int main_nix_env(int argc, char * * argv) globals.instSource.systemFilter = getArg(*arg, arg, end); else if (*arg == "--prebuilt-only" || *arg == "-b") globals.prebuiltOnly = true; - else if (*arg == "--repair") - repair = Repair; else if (*arg != "" && arg->at(0) == '-') { opFlags.push_back(*arg); /* FIXME: hacky */ @@ -1515,10 +1512,12 @@ static int main_nix_env(int argc, char * * argv) auto store = openStore(); globals.state = std::shared_ptr(new EvalState(myArgs.searchPath, store)); - globals.state->repair = repair; + globals.state->repair = myArgs.repair; - if (file != "") - globals.instSource.nixExprPath = lookupFileArg(*globals.state, file); + globals.instSource.nixExprPath = std::make_shared( + file != "" + ? lookupFileArg(*globals.state, file) + : globals.state->rootPath(CanonPath(nixExprPath))); globals.instSource.autoArgs = myArgs.getAutoArgs(*globals.state); diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 745e9e174..9e916abc4 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -19,10 +19,10 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv) DrvInfos elems; if (pathExists(userEnv + "/manifest.json")) throw Error("profile '%s' is incompatible with 'nix-env'; please use 'nix profile' instead", userEnv); - Path manifestFile = userEnv + "/manifest.nix"; + auto manifestFile = userEnv + "/manifest.nix"; if (pathExists(manifestFile)) { Value v; - state.evalFile(manifestFile, v); + state.evalFile(state.rootPath(CanonPath(manifestFile)), v); Bindings & bindings(*state.allocBindings(0)); getDerivations(state, v, "", bindings, elems, false); } @@ -114,14 +114,12 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, Value envBuilder; state.eval(state.parseExprFromString( #include "buildenv.nix.gen.hh" - , "/"), envBuilder); + , state.rootPath(CanonPath::root)), envBuilder); /* Construct a Nix expression that calls the user environment builder with the manifest as argument. */ auto attrs = state.buildBindings(3); - attrs.alloc("manifest").mkString( - state.store->printStorePath(manifestFile), - {state.store->printStorePath(manifestFile)}); + state.mkStorePathString(manifestFile, attrs.alloc("manifest")); attrs.insert(state.symbols.create("derivations"), &manifest); Value args; args.mkAttrs(attrs); @@ -132,7 +130,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Evaluate it. */ debug("evaluating user environment builder"); state.forceValue(topLevel, [&]() { return topLevel.determinePos(noPos); }); - PathSet context; + NixStringContext context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); Attr & aOutPath(*topLevel.attrs->find(state.sOutPath)); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 6b5ba595d..446b27e66 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -43,7 +43,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot).first); state.forceValue(v, [&]() { return v.determinePos(noPos); }); - PathSet context; + NixStringContext context; if (evalOnly) { Value vRes; if (autoArgs.empty()) @@ -102,7 +102,6 @@ static int main_nix_instantiate(int argc, char * * argv) bool strict = false; Strings attrPaths; bool wantsReadWrite = false; - RepairFlag repair = NoRepair; struct MyArgs : LegacyArgs, MixEvalArgs { @@ -140,8 +139,6 @@ static int main_nix_instantiate(int argc, char * * argv) xmlOutputSourceLocation = false; else if (*arg == "--strict") strict = true; - else if (*arg == "--repair") - repair = Repair; else if (*arg == "--dry-run") settings.readOnlyMode = true; else if (*arg != "" && arg->at(0) == '-') @@ -160,7 +157,7 @@ static int main_nix_instantiate(int argc, char * * argv) auto evalStore = myArgs.evalStoreUrl ? openStore(*myArgs.evalStoreUrl) : store; auto state = std::make_unique(myArgs.searchPath, evalStore, store); - state->repair = repair; + state->repair = myArgs.repair; Bindings & autoArgs = *myArgs.getAutoArgs(*state); @@ -168,9 +165,11 @@ static int main_nix_instantiate(int argc, char * * argv) if (findFile) { for (auto & i : files) { - Path p = state->findFile(i); - if (p == "") throw Error("unable to find '%1%'", i); - std::cout << p << std::endl; + auto p = state->findFile(i); + if (auto fn = p.getPhysicalPath()) + std::cout << fn->abs() << std::endl; + else + throw Error("'%s' has no physical path", p); } return 0; } @@ -184,7 +183,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs - ? state->parseExprFromString(i, absPath(".")) + ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) : state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, i)))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40f30eb63..61c189efb 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -849,7 +849,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryValidPaths: { bool lock = readInt(in); bool substitute = readInt(in); - auto paths = worker_proto::read(*store, in, Phantom {}); + auto paths = WorkerProto::read(*store, in); if (lock && writeAllowed) for (auto & path : paths) store->addTempRoot(path); @@ -858,19 +858,19 @@ static void opServe(Strings opFlags, Strings opArgs) store->substitutePaths(paths); } - worker_proto::write(*store, out, store->queryValidPaths(paths)); + workerProtoWrite(*store, out, store->queryValidPaths(paths)); break; } case cmdQueryPathInfos: { - auto paths = worker_proto::read(*store, in, Phantom {}); + auto paths = WorkerProto::read(*store, in); // !!! Maybe we want a queryPathInfos? for (auto & i : paths) { try { auto info = store->queryPathInfo(i); out << store->printStorePath(info->path) << (info->deriver ? store->printStorePath(*info->deriver) : ""); - worker_proto::write(*store, out, info->references); + workerProtoWrite(*store, out, info->references); // !!! Maybe we want compression? out << info->narSize // downloadSize << info->narSize; @@ -898,7 +898,7 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdExportPaths: { readInt(in); // obsolete - store->exportPaths(worker_proto::read(*store, in, Phantom {}), out); + store->exportPaths(WorkerProto::read(*store, in), out); break; } @@ -944,7 +944,7 @@ static void opServe(Strings opFlags, Strings opArgs) DrvOutputs builtOutputs; for (auto & [output, realisation] : status.builtOutputs) builtOutputs.insert_or_assign(realisation.id, realisation); - worker_proto::write(*store, out, builtOutputs); + workerProtoWrite(*store, out, builtOutputs); } break; @@ -953,9 +953,9 @@ static void opServe(Strings opFlags, Strings opArgs) case cmdQueryClosure: { bool includeOutputs = readInt(in); StorePathSet closure; - store->computeFSClosure(worker_proto::read(*store, in, Phantom {}), + store->computeFSClosure(WorkerProto::read(*store, in), closure, false, includeOutputs); - worker_proto::write(*store, out, closure); + workerProtoWrite(*store, out, closure); break; } @@ -970,7 +970,7 @@ static void opServe(Strings opFlags, Strings opArgs) }; if (deriver != "") info.deriver = store->parseStorePath(deriver); - info.references = worker_proto::read(*store, in, Phantom {}); + info.references = WorkerProto::read(*store, in); in >> info.registrationTime >> info.narSize >> info.ultimate; info.sigs = readStrings(in); info.ca = ContentAddress::parseOpt(readString(in)); diff --git a/src/nix/app.cc b/src/nix/app.cc index fd4569bb4..e678b54f0 100644 --- a/src/nix/app.cc +++ b/src/nix/app.cc @@ -7,6 +7,7 @@ #include "names.hh" #include "command.hh" #include "derivations.hh" +#include "downstream-placeholder.hh" namespace nix { @@ -23,7 +24,7 @@ StringPairs resolveRewrites( if (auto drvDep = std::get_if(&dep.path)) for (auto & [ outputName, outputPath ] : drvDep->outputs) res.emplace( - downstreamPlaceholder(store, drvDep->drvPath, outputName), + DownstreamPlaceholder::unknownCaOutput(drvDep->drvPath, outputName).render(), store.printStorePath(outputPath) ); return res; diff --git a/src/nix/build.cc b/src/nix/build.cc index 4e133e288..ad1842a4e 100644 --- a/src/nix/build.cc +++ b/src/nix/build.cc @@ -27,8 +27,10 @@ nlohmann::json builtPathsWithResultToJSON(const std::vector std::visit([&](const auto & t) { auto j = t.toJSON(store); if (b.result) { - j["startTime"] = b.result->startTime; - j["stopTime"] = b.result->stopTime; + if (b.result->startTime) + j["startTime"] = b.result->startTime; + if (b.result->stopTime) + j["stopTime"] = b.result->stopTime; if (b.result->cpuUser) j["cpuUser"] = ((double) b.result->cpuUser->count()) / 1000000; if (b.result->cpuSystem) @@ -131,7 +133,8 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile auto buildables = Installable::build( getEvalStore(), store, Realise::Outputs, - installables, buildMode); + installables, + repair ? bmRepair : buildMode); if (json) logger->cout("%s", builtPathsWithResultToJSON(buildables, store).dump()); diff --git a/src/nix/build.md b/src/nix/build.md index ee414dc86..0fbb39cc3 100644 --- a/src/nix/build.md +++ b/src/nix/build.md @@ -44,7 +44,7 @@ R""( `release.nix`: ```console - # nix build -f release.nix build.x86_64-linux + # nix build --file release.nix build.x86_64-linux ``` * Build a NixOS system configuration from a flake, and make a profile diff --git a/src/nix/bundle.cc b/src/nix/bundle.cc index 57c355f0c..bcc00d490 100644 --- a/src/nix/bundle.cc +++ b/src/nix/bundle.cc @@ -98,7 +98,7 @@ struct CmdBundle : InstallableValueCommand if (!attr1) throw Error("the bundler '%s' does not produce a derivation", bundler.what()); - PathSet context2; + NixStringContext context2; auto drvPath = evalState->coerceToStorePath(attr1->pos, *attr1->value, context2, ""); auto attr2 = vRes->attrs->get(evalState->sOutPath); diff --git a/src/nix/copy.md b/src/nix/copy.md index 25e0ddadc..199006436 100644 --- a/src/nix/copy.md +++ b/src/nix/copy.md @@ -15,7 +15,7 @@ R""( SSH: ```console - # nix copy -s --to ssh://server /run/current-system + # nix copy --substitute-on-destination --to ssh://server /run/current-system ``` The `-s` flag causes the remote machine to try to substitute missing diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index c1a91c63d..9fe9b3b1e 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -24,6 +24,7 @@ #include #include #include +#include #include #include #include diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 9e2dcff61..195eeaa21 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -252,7 +252,7 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore throw Error("get-env.sh failed to produce an environment"); } -struct Common : InstallableValueCommand, MixProfile +struct Common : InstallableCommand, MixProfile { std::set ignoreVars{ "BASHOPTS", @@ -374,7 +374,7 @@ struct Common : InstallableValueCommand, MixProfile return res; } - StorePath getShellOutPath(ref store, ref installable) + StorePath getShellOutPath(ref store, ref installable) { auto path = installable->getStorePath(); if (path && hasSuffix(path->to_string(), "-env")) @@ -393,7 +393,7 @@ struct Common : InstallableValueCommand, MixProfile } std::pair - getBuildEnvironment(ref store, ref installable) + getBuildEnvironment(ref store, ref installable) { auto shellOutPath = getShellOutPath(store, installable); @@ -481,7 +481,7 @@ struct CmdDevelop : Common, MixEnvironment ; } - void run(ref store, ref installable) override + void run(ref store, ref installable) override { auto [buildEnvironment, gcroot] = getBuildEnvironment(store, installable); @@ -538,10 +538,14 @@ struct CmdDevelop : Common, MixEnvironment nixpkgsLockFlags.inputOverrides = {}; nixpkgsLockFlags.inputUpdates = {}; + auto nixpkgs = defaultNixpkgsFlakeRef(); + if (auto * i = dynamic_cast(&*installable)) + nixpkgs = i->nixpkgsFlakeRef(); + auto bashInstallable = make_ref( this, state, - installable->nixpkgsFlakeRef(), + std::move(nixpkgs), "bashInteractive", DefaultOutputs(), Strings{}, @@ -605,7 +609,7 @@ struct CmdPrintDevEnv : Common, MixJSON Category category() override { return catUtility; } - void run(ref store, ref installable) override + void run(ref store, ref installable) override { auto buildEnvironment = getBuildEnvironment(store, installable).first; diff --git a/src/nix/develop.md b/src/nix/develop.md index c49b39669..1b5a8aeba 100644 --- a/src/nix/develop.md +++ b/src/nix/develop.md @@ -69,7 +69,7 @@ R""( * Run a series of script commands: ```console - # nix develop --command bash -c "mkdir build && cmake .. && make" + # nix develop --command bash --command "mkdir build && cmake .. && make" ``` # Description diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 43db5150c..d880bef0a 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -62,11 +62,11 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption auto state = getEvalState(); auto [v, pos] = installable->toValue(*state); - PathSet context; + NixStringContext context; if (apply) { auto vApply = state->allocValue(); - state->eval(state->parseExprFromString(*apply, absPath(".")), *vApply); + state->eval(state->parseExprFromString(*apply, state->rootPath(CanonPath::fromCwd())), *vApply); auto vRes = state->allocValue(); state->callFunction(*vApply, *v, *vRes, noPos); v = vRes; diff --git a/src/nix/eval.md b/src/nix/eval.md index 3b510737a..48d5aa597 100644 --- a/src/nix/eval.md +++ b/src/nix/eval.md @@ -18,7 +18,7 @@ R""( * Evaluate a Nix expression from a file: ```console - # nix eval -f ./my-nixpkgs hello.name + # nix eval --file ./my-nixpkgs hello.name ``` * Get the current version of the `nixpkgs` flake: diff --git a/src/nix/flake-check.md b/src/nix/flake-check.md index 07031c909..c8307f8d8 100644 --- a/src/nix/flake-check.md +++ b/src/nix/flake-check.md @@ -68,6 +68,6 @@ The following flake output attributes must be In addition, the `hydraJobs` output is evaluated in the same way as Hydra's `hydra-eval-jobs` (i.e. as a arbitrarily deeply nested attribute set of derivations). Similarly, the -`legacyPackages`.*system* output is evaluated like `nix-env -qa`. +`legacyPackages`.*system* output is evaluated like `nix-env --query --available `. )"" diff --git a/src/nix/flake.cc b/src/nix/flake.cc index cd4ee5921..1eea52e15 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -259,6 +259,7 @@ struct CmdFlakeInfo : CmdFlakeMetadata struct CmdFlakeCheck : FlakeCommand { bool build = true; + bool checkAllSystems = false; CmdFlakeCheck() { @@ -267,6 +268,11 @@ struct CmdFlakeCheck : FlakeCommand .description = "Do not build checks.", .handler = {&build, false} }); + addFlag({ + .longName = "all-systems", + .description = "Check the outputs for all systems.", + .handler = {&checkAllSystems, true} + }); } std::string description() override @@ -292,6 +298,7 @@ struct CmdFlakeCheck : FlakeCommand lockFlags.applyNixConfig = true; auto flake = lockFlake(); + auto localSystem = std::string(settings.thisSystem.get()); bool hasErrors = false; auto reportError = [&](const Error & e) { @@ -307,6 +314,8 @@ struct CmdFlakeCheck : FlakeCommand } }; + std::set omittedSystems; + // FIXME: rewrite to use EvalCache. auto resolve = [&] (PosIdx p) { @@ -327,6 +336,15 @@ struct CmdFlakeCheck : FlakeCommand reportError(Error("'%s' is not a valid system type, at %s", system, resolve(pos))); }; + auto checkSystemType = [&](const std::string & system, const PosIdx pos) { + if (!checkAllSystems && system != localSystem) { + omittedSystems.insert(system); + return false; + } else { + return true; + } + }; + auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { auto drvInfo = getDerivation(*state, v, false); @@ -438,10 +456,10 @@ struct CmdFlakeCheck : FlakeCommand if (auto attr = v.attrs->get(state->symbols.create("path"))) { if (attr->name == state->symbols.create("path")) { - PathSet context; + NixStringContext context; auto path = state->coerceToPath(attr->pos, *attr->value, context, ""); - if (!store->isInStore(path)) - throw Error("template '%s' has a bad 'path' attribute"); + if (!path.pathExists()) + throw Error("template '%s' refers to a non-existent path '%s'", attrPath, path); // TODO: recursively check the flake in 'path'. } } else @@ -509,16 +527,18 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs) { - auto drvPath = checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - if (drvPath && attr_name == settings.thisSystem.get()) { - drvPaths.push_back(DerivedPath::Built { - .drvPath = *drvPath, - .outputs = OutputsSpec::All { }, - }); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs) { + auto drvPath = checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + if (drvPath && attr_name == settings.thisSystem.get()) { + drvPaths.push_back(DerivedPath::Built { + .drvPath = *drvPath, + .outputs = OutputsSpec::All { }, + }); + } } } } @@ -529,9 +549,11 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - checkApp( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkApp( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; } } @@ -540,11 +562,13 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs) - checkDerivation( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs) + checkDerivation( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + }; } } @@ -553,11 +577,13 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs) - checkApp( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs) + checkApp( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + }; } } @@ -566,9 +592,11 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - checkDerivation( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkDerivation( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; } } @@ -577,9 +605,11 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - checkApp( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); + if (checkSystemType(attr_name, attr.pos) ) { + checkApp( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; } } @@ -587,6 +617,7 @@ struct CmdFlakeCheck : FlakeCommand state->forceAttrs(vOutput, pos, ""); for (auto & attr : *vOutput.attrs) { checkSystemName(state->symbols[attr.name], attr.pos); + checkSystemType(state->symbols[attr.name], attr.pos); // FIXME: do getDerivations? } } @@ -636,9 +667,11 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - checkBundler( - fmt("%s.%s", name, attr_name), - *attr.value, attr.pos); + if (checkSystemType(attr_name, attr.pos)) { + checkBundler( + fmt("%s.%s", name, attr_name), + *attr.value, attr.pos); + }; } } @@ -647,12 +680,14 @@ struct CmdFlakeCheck : FlakeCommand for (auto & attr : *vOutput.attrs) { const auto & attr_name = state->symbols[attr.name]; checkSystemName(attr_name, attr.pos); - state->forceAttrs(*attr.value, attr.pos, ""); - for (auto & attr2 : *attr.value->attrs) { - checkBundler( - fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), - *attr2.value, attr2.pos); - } + if (checkSystemType(attr_name, attr.pos)) { + state->forceAttrs(*attr.value, attr.pos, ""); + for (auto & attr2 : *attr.value->attrs) { + checkBundler( + fmt("%s.%s.%s", name, attr_name, state->symbols[attr2.name]), + *attr2.value, attr2.pos); + } + }; } } @@ -685,7 +720,15 @@ struct CmdFlakeCheck : FlakeCommand } if (hasErrors) throw Error("some errors were encountered during the evaluation"); - } + + if (!omittedSystems.empty()) { + warn( + "The check omitted these incompatible systems: %s\n" + "Use '--all-systems' to check all.", + concatStringsSep(", ", omittedSystems) + ); + }; + }; }; static Strings defaultTemplateAttrPathsPrefixes{"templates."}; diff --git a/src/nix/local.mk b/src/nix/local.mk index 0f2f016ec..20ea29d10 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -32,3 +32,9 @@ src/nix/develop.cc: src/nix/get-env.sh.gen.hh src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh + +src/nix/doc/files/%.md: doc/manual/src/command-ref/files/%.md + @mkdir -p $$(dirname $@) + @cp $< $@ + +src/nix/profile.cc: src/nix/profile.md src/nix/doc/files/profiles.md.gen.hh diff --git a/src/nix/main.cc b/src/nix/main.cc index 705061d25..ce0bed2a3 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -201,14 +201,14 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) auto vGenerateManpage = state.allocValue(); state.eval(state.parseExprFromString( #include "generate-manpage.nix.gen.hh" - , "/"), *vGenerateManpage); + , CanonPath::root), *vGenerateManpage); auto vUtils = state.allocValue(); state.cacheFile( - "/utils.nix", "/utils.nix", + CanonPath("/utils.nix"), CanonPath("/utils.nix"), state.parseExprFromString( #include "utils.nix.gen.hh" - , "/"), + , CanonPath::root), *vUtils); auto vDump = state.allocValue(); diff --git a/src/nix/nar-ls.md b/src/nix/nar-ls.md index d373f9715..5a03c5d82 100644 --- a/src/nix/nar-ls.md +++ b/src/nix/nar-ls.md @@ -5,7 +5,7 @@ R""( * To list a specific file in a NAR: ```console - # nix nar ls -l ./hello.nar /bin/hello + # nix nar ls --long ./hello.nar /bin/hello -r-xr-xr-x 38184 hello ``` @@ -13,7 +13,7 @@ R""( format: ```console - # nix nar ls --json -R ./hello.nar /bin + # nix nar ls --json --recursive ./hello.nar /bin {"type":"directory","entries":{"hello":{"type":"regular","size":38184,"executable":true,"narOffset":400}}} ``` diff --git a/src/nix/nix.md b/src/nix/nix.md index 1ef6c7fcd..6d9e40dbc 100644 --- a/src/nix/nix.md +++ b/src/nix/nix.md @@ -102,6 +102,7 @@ way: available in the flake. If this is undesirable, specify `path:` explicitly; For example, if `/foo/bar` is a git repository with the following structure: + ``` . └── baz @@ -197,7 +198,7 @@ operate are determined as follows: of all outputs of the `glibc` package in the binary cache: ```console - # nix path-info -S --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*' + # nix path-info --closure-size --eval-store auto --store https://cache.nixos.org 'nixpkgs#glibc^*' /nix/store/g02b1lpbddhymmcjb923kf0l7s9nww58-glibc-2.33-123 33208200 /nix/store/851dp95qqiisjifi639r0zzg5l465ny4-glibc-2.33-123-bin 36142896 /nix/store/kdgs3q6r7xdff1p7a9hnjr43xw2404z7-glibc-2.33-123-debug 155787312 @@ -208,7 +209,7 @@ operate are determined as follows: and likewise, using a store path to a "drv" file to specify the derivation: ```console - # nix path-info -S '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*' + # nix path-info --closure-size '/nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^*' … ``` * If you didn't specify the desired outputs, but the derivation has an diff --git a/src/nix/path-info.md b/src/nix/path-info.md index 6ad23a02e..2dda866d0 100644 --- a/src/nix/path-info.md +++ b/src/nix/path-info.md @@ -13,7 +13,7 @@ R""( closure, sorted by size: ```console - # nix path-info -rS /run/current-system | sort -nk2 + # nix path-info --recursive --closure-size /run/current-system | sort -nk2 /nix/store/hl5xwp9kdrd1zkm0idm3kkby9q66z404-empty 96 /nix/store/27324qvqhnxj3rncazmxc4mwy79kz8ha-nameservers 112 … @@ -25,7 +25,7 @@ R""( readable sizes: ```console - # nix path-info -rsSh nixpkgs#rustc + # nix path-info --recursive --size --closure-size --human-readable nixpkgs#rustc /nix/store/01rrgsg5zk3cds0xgdsq40zpk6g51dz9-ncurses-6.2-dev 386.7K 69.1M /nix/store/0q783wnvixpqz6dxjp16nw296avgczam-libpfm-4.11.0 5.9M 37.4M … @@ -34,7 +34,7 @@ R""( * Check the existence of a path in a binary cache: ```console - # nix path-info -r /nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1 --store https://cache.nixos.org/ + # nix path-info --recursive /nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1 --store https://cache.nixos.org/ path '/nix/store/blzxgyvrk32ki6xga10phr4sby2xf25q-geeqie-1.5.1' is not valid ``` @@ -57,7 +57,7 @@ R""( size: ```console - # nix path-info --json --all -S \ + # nix path-info --json --all --closure-size \ | jq 'map(select(.closureSize > 1e9)) | sort_by(.closureSize) | map([.path, .closureSize])' [ …, diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 56e7bbb6e..3b2e225f6 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -27,7 +27,10 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url) Value vMirrors; // FIXME: use nixpkgs flake - state.eval(state.parseExprFromString("import ", "."), vMirrors); + state.eval(state.parseExprFromString( + "import ", + state.rootPath(CanonPath::root)), + vMirrors); state.forceAttrs(vMirrors, noPos, "while evaluating the set of all mirrors"); auto mirrorList = vMirrors.attrs->find(state.symbols.create(mirrorName)); @@ -198,9 +201,11 @@ static int main_nix_prefetch_url(int argc, char * * argv) throw UsageError("you must specify a URL"); url = args[0]; } else { - Path path = resolveExprPath(lookupFileArg(*state, args.empty() ? "." : args[0])); Value vRoot; - state->evalFile(path, vRoot); + state->evalFile( + resolveExprPath( + lookupFileArg(*state, args.empty() ? "." : args[0])), + vRoot); Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first); state->forceAttrs(v, noPos, "while evaluating the source attribute to prefetch"); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index fd63b3519..7cea616d2 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -31,6 +31,11 @@ struct ProfileElementSource std::tuple(originalRef.to_string(), attrPath, outputs) < std::tuple(other.originalRef.to_string(), other.attrPath, other.outputs); } + + std::string to_string() const + { + return fmt("%s#%s%s", originalRef, attrPath, outputs.to_string()); + } }; const int defaultPriority = 5; @@ -42,16 +47,30 @@ struct ProfileElement bool active = true; int priority = defaultPriority; - std::string describe() const + std::string identifier() const { if (source) - return fmt("%s#%s%s", source->originalRef, source->attrPath, source->outputs.to_string()); + return source->to_string(); StringSet names; for (auto & path : storePaths) names.insert(DrvName(path.name()).name); return concatStringsSep(", ", names); } + /** + * Return a string representing an installable corresponding to the current + * element, either a flakeref or a plain store path + */ + std::set toInstallables(Store & store) + { + if (source) + return {source->to_string()}; + StringSet rawPaths; + for (auto & path : storePaths) + rawPaths.insert(store.printStorePath(path)); + return rawPaths; + } + std::string versions() const { StringSet versions; @@ -62,7 +81,7 @@ struct ProfileElement bool operator < (const ProfileElement & other) const { - return std::tuple(describe(), storePaths) < std::tuple(other.describe(), other.storePaths); + return std::tuple(identifier(), storePaths) < std::tuple(other.identifier(), other.storePaths); } void updateStorePaths( @@ -237,13 +256,13 @@ struct ProfileManifest bool changes = false; while (i != prevElems.end() || j != curElems.end()) { - if (j != curElems.end() && (i == prevElems.end() || i->describe() > j->describe())) { - logger->cout("%s%s: ∅ -> %s", indent, j->describe(), j->versions()); + if (j != curElems.end() && (i == prevElems.end() || i->identifier() > j->identifier())) { + logger->cout("%s%s: ∅ -> %s", indent, j->identifier(), j->versions()); changes = true; ++j; } - else if (i != prevElems.end() && (j == curElems.end() || i->describe() < j->describe())) { - logger->cout("%s%s: %s -> ∅", indent, i->describe(), i->versions()); + else if (i != prevElems.end() && (j == curElems.end() || i->identifier() < j->identifier())) { + logger->cout("%s%s: %s -> ∅", indent, i->identifier(), i->versions()); changes = true; ++i; } @@ -251,7 +270,7 @@ struct ProfileManifest auto v1 = i->versions(); auto v2 = j->versions(); if (v1 != v2) { - logger->cout("%s%s: %s -> %s", indent, i->describe(), v1, v2); + logger->cout("%s%s: %s -> %s", indent, i->identifier(), v1, v2); changes = true; } ++i; @@ -363,10 +382,10 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile auto profileElement = *it; for (auto & storePath : profileElement.storePaths) { if (conflictError.fileA.starts_with(store->printStorePath(storePath))) { - return std::pair(conflictError.fileA, profileElement.source->originalRef); + return std::pair(conflictError.fileA, profileElement.toInstallables(*store)); } if (conflictError.fileB.starts_with(store->printStorePath(storePath))) { - return std::pair(conflictError.fileB, profileElement.source->originalRef); + return std::pair(conflictError.fileB, profileElement.toInstallables(*store)); } } } @@ -375,9 +394,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile // There are 2 conflicting files. We need to find out which one is from the already installed package and // which one is the package that is the new package that is being installed. // The first matching package is the one that was already installed (original). - auto [originalConflictingFilePath, originalConflictingRef] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end()); + auto [originalConflictingFilePath, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end()); // The last matching package is the one that was going to be installed (new). - auto [newConflictingFilePath, newConflictingRef] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend()); + auto [newConflictingFilePath, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend()); throw Error( "An existing package already provides the following file:\n" @@ -403,8 +422,8 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile " nix profile install %4% --priority %7%\n", originalConflictingFilePath, newConflictingFilePath, - originalConflictingRef.to_string(), - newConflictingRef.to_string(), + concatStringsSep(" ", originalConflictingRefs), + concatStringsSep(" ", newConflictingRefs), conflictError.priority, conflictError.priority - 1, conflictError.priority + 1 @@ -491,7 +510,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem if (!matches(*store, element, i, matchers)) { newManifest.elements.push_back(std::move(element)); } else { - notice("removing '%s'", element.describe()); + notice("removing '%s'", element.identifier()); } } diff --git a/src/nix/profile.md b/src/nix/profile.md index bf61ef4b9..bd13f906f 100644 --- a/src/nix/profile.md +++ b/src/nix/profile.md @@ -7,100 +7,39 @@ profile is a set of packages that can be installed and upgraded independently from each other. Nix profiles are versioned, allowing them to be rolled back easily. -# Default profile - -The default profile used by `nix profile` is `$HOME/.nix-profile`, -which, if it does not exist, is created as a symlink to -`/nix/var/nix/profiles/default` if Nix is invoked by the -`root` user, or `${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/profile` otherwise. - -You can specify another profile location using `--profile` *path*. - -# Filesystem layout - -Profiles are versioned as follows. When using profile *path*, *path* -is a symlink to *path*`-`*N*, where *N* is the current *version* of -the profile. In turn, *path*`-`*N* is a symlink to a path in the Nix -store. For example: - -```console -$ ls -l ~alice/.local/state/nix/profiles/profile* -lrwxrwxrwx 1 alice users 14 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile -> profile-7-link -lrwxrwxrwx 1 alice users 51 Oct 28 16:18 /home/alice/.local/state/nix/profiles/profile-5-link -> /nix/store/q69xad13ghpf7ir87h0b2gd28lafjj1j-profile -lrwxrwxrwx 1 alice users 51 Oct 29 13:20 /home/alice/.local/state/nix/profiles/profile-6-link -> /nix/store/6bvhpysd7vwz7k3b0pndn7ifi5xr32dg-profile -lrwxrwxrwx 1 alice users 51 Nov 25 14:35 /home/alice/.local/state/nix/profiles/profile-7-link -> /nix/store/mp0x6xnsg0b8qhswy6riqvimai4gm677-profile -``` - -Each of these symlinks is a root for the Nix garbage collector. - -The contents of the store path corresponding to each version of the -profile is a tree of symlinks to the files of the installed packages, -e.g. - -```console -$ ll -R ~eelco/.local/state/nix/profiles/profile-7-link/ -/home/eelco/.local/state/nix/profiles/profile-7-link/: -total 20 -dr-xr-xr-x 2 root root 4096 Jan 1 1970 bin --r--r--r-- 2 root root 1402 Jan 1 1970 manifest.json -dr-xr-xr-x 4 root root 4096 Jan 1 1970 share - -/home/eelco/.local/state/nix/profiles/profile-7-link/bin: -total 20 -lrwxrwxrwx 5 root root 79 Jan 1 1970 chromium -> /nix/store/ijm5k0zqisvkdwjkc77mb9qzb35xfi4m-chromium-86.0.4240.111/bin/chromium -lrwxrwxrwx 7 root root 87 Jan 1 1970 spotify -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/bin/spotify -lrwxrwxrwx 3 root root 79 Jan 1 1970 zoom-us -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/bin/zoom-us - -/home/eelco/.local/state/nix/profiles/profile-7-link/share/applications: -total 12 -lrwxrwxrwx 4 root root 120 Jan 1 1970 chromium-browser.desktop -> /nix/store/4cf803y4vzfm3gyk3vzhzb2327v0kl8a-chromium-unwrapped-86.0.4240.111/share/applications/chromium-browser.desktop -lrwxrwxrwx 7 root root 110 Jan 1 1970 spotify.desktop -> /nix/store/w9182874m1bl56smps3m5zjj36jhp3rn-spotify-1.1.26.501.gbe11e53b-15/share/applications/spotify.desktop -lrwxrwxrwx 3 root root 107 Jan 1 1970 us.zoom.Zoom.desktop -> /nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927/share/applications/us.zoom.Zoom.desktop - -… -``` - -The file `manifest.json` records the provenance of the packages that -are installed in this version of the profile. It looks like this: - -```json -{ - "version": 1, - "elements": [ - { - "active": true, - "attrPath": "legacyPackages.x86_64-linux.zoom-us", - "originalUrl": "flake:nixpkgs", - "storePaths": [ - "/nix/store/wbhg2ga8f3h87s9h5k0slxk0m81m4cxl-zoom-us-5.3.469451.0927" - ], - "uri": "github:NixOS/nixpkgs/13d0c311e3ae923a00f734b43fd1d35b47d8943a" - }, - … - ] -} -``` - -Each object in the array `elements` denotes an installed package and -has the following fields: - -* `originalUrl`: The [flake reference](./nix3-flake.md) specified by - the user at the time of installation (e.g. `nixpkgs`). This is also - the flake reference that will be used by `nix profile upgrade`. - -* `uri`: The locked flake reference to which `originalUrl` resolved. - -* `attrPath`: The flake output attribute that provided this - package. Note that this is not necessarily the attribute that the - user specified, but the one resulting from applying the default - attribute paths and prefixes; for instance, `hello` might resolve to - `packages.x86_64-linux.hello` and the empty string to - `packages.x86_64-linux.default`. - -* `storePath`: The paths in the Nix store containing the package. - -* `active`: Whether the profile contains symlinks to the files of this - package. If set to false, the package is kept in the Nix store, but - is not "visible" in the profile's symlink tree. +# Files + +)"" + +#include "doc/files/profiles.md.gen.hh" + +R""( + +### Profile compatibility + +> **Warning** +> +> Once you have used [`nix profile`] you can no longer use [`nix-env`] without first deleting `$XDG_STATE_HOME/nix/profiles/profile` + +[`nix-env`]: @docroot@/command-ref/nix-env.md +[`nix profile`]: @docroot@/command-ref/new-cli/nix3-profile.md + +Once you installed a package with [`nix profile`], you get the following error message when using [`nix-env`]: + +```console +$ nix-env -f '' -iA 'hello' +error: nix-env +profile '/home/alice/.local/state/nix/profiles/profile' is incompatible with 'nix-env'; please use 'nix profile' instead +``` + +To migrate back to `nix-env` you can delete your current profile: + +> **Warning** +> +> This will delete packages that have been installed before, so you may want to back up this information before running the command. + +```console + $ rm -rf "${XDG_STATE_HOME-$HOME/.local/state}/nix/profiles/profile" +``` )"" diff --git a/src/nix/search.md b/src/nix/search.md index 4caa90654..0c5d22549 100644 --- a/src/nix/search.md +++ b/src/nix/search.md @@ -52,12 +52,12 @@ R""( * Search for packages containing `neovim` but hide ones containing either `gui` or `python`: ```console - # nix search nixpkgs neovim -e 'python|gui' + # nix search nixpkgs neovim --exclude 'python|gui' ``` or ```console - # nix search nixpkgs neovim -e 'python' -e 'gui' + # nix search nixpkgs neovim --exclude 'python' --exclude 'gui' ``` # Description diff --git a/src/nix/shell.md b/src/nix/shell.md index 13a389103..1668104b1 100644 --- a/src/nix/shell.md +++ b/src/nix/shell.md @@ -19,26 +19,26 @@ R""( * Run GNU Hello: ```console - # nix shell nixpkgs#hello -c hello --greeting 'Hi everybody!' + # nix shell nixpkgs#hello --command hello --greeting 'Hi everybody!' Hi everybody! ``` * Run multiple commands in a shell environment: ```console - # nix shell nixpkgs#gnumake -c sh -c "cd src && make" + # nix shell nixpkgs#gnumake --command sh --command "cd src && make" ``` * Run GNU Hello in a chroot store: ```console - # nix shell --store ~/my-nix nixpkgs#hello -c hello + # nix shell --store ~/my-nix nixpkgs#hello --command hello ``` * Start a shell providing GNU Hello in a chroot store: ```console - # nix shell --store ~/my-nix nixpkgs#hello nixpkgs#bashInteractive -c bash + # nix shell --store ~/my-nix nixpkgs#hello nixpkgs#bashInteractive --command bash ``` Note that it's necessary to specify `bash` explicitly because your diff --git a/src/nix/store-ls.md b/src/nix/store-ls.md index 836efce42..14c4627c9 100644 --- a/src/nix/store-ls.md +++ b/src/nix/store-ls.md @@ -5,7 +5,7 @@ R""( * To list the contents of a store path in a binary cache: ```console - # nix store ls --store https://cache.nixos.org/ -lR /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10 + # nix store ls --store https://cache.nixos.org/ --long --recursive /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10 dr-xr-xr-x 0 ./bin -r-xr-xr-x 38184 ./bin/hello dr-xr-xr-x 0 ./share @@ -15,7 +15,7 @@ R""( * To show information about a specific file in a binary cache: ```console - # nix store ls --store https://cache.nixos.org/ -l /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10/bin/hello + # nix store ls --store https://cache.nixos.org/ --long /nix/store/0i2jd68mp5g6h2sa5k9c85rb80sn8hi9-hello-2.10/bin/hello -r-xr-xr-x 38184 hello ``` diff --git a/src/nix/upgrade-nix.cc b/src/nix/upgrade-nix.cc index 2295d86d0..3997c98bf 100644 --- a/src/nix/upgrade-nix.cc +++ b/src/nix/upgrade-nix.cc @@ -148,7 +148,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand auto state = std::make_unique(Strings(), store); auto v = state->allocValue(); - state->eval(state->parseExprFromString(res.data, "/no-such-path"), *v); + state->eval(state->parseExprFromString(res.data, state->rootPath(CanonPath("/no-such-path"))), *v); Bindings & bindings(*state->allocBindings(0)); auto v2 = findAlongAttrPath(*state, settings.thisSystem, bindings, *v).first; diff --git a/src/nix/upgrade-nix.md b/src/nix/upgrade-nix.md index 08757aebd..cce88c397 100644 --- a/src/nix/upgrade-nix.md +++ b/src/nix/upgrade-nix.md @@ -11,7 +11,7 @@ R""( * Upgrade Nix in a specific profile: ```console - # nix upgrade-nix -p ~alice/.local/state/nix/profiles/profile + # nix upgrade-nix --profile ~alice/.local/state/nix/profiles/profile ``` # Description diff --git a/src/nix/verify.md b/src/nix/verify.md index cc1122c02..e1d55eab4 100644 --- a/src/nix/verify.md +++ b/src/nix/verify.md @@ -12,7 +12,7 @@ R""( signatures: ```console - # nix store verify -r -n2 --no-contents $(type -p firefox) + # nix store verify --recursive --sigs-needed 2 --no-contents $(type -p firefox) ``` * Verify a store path in the binary cache `https://cache.nixos.org/`: diff --git a/tests/build-remote-trustless-should-fail-0.sh b/tests/build-remote-trustless-should-fail-0.sh index 5e3d5ae07..fad1def59 100644 --- a/tests/build-remote-trustless-should-fail-0.sh +++ b/tests/build-remote-trustless-should-fail-0.sh @@ -17,13 +17,13 @@ nix-build build-hook.nix -A passthru.input2 \ --store "$TEST_ROOT/local" \ --option system-features bar -# Now when we go to build that downstream derivation, Nix will fail -# because we cannot trustlessly build input-addressed derivations with -# `inputDrv` dependencies. +# Now when we go to build that downstream derivation, Nix will try to +# copy our already-build `input2` to the remote store. That store object +# is input-addressed, so this will fail. file=build-hook.nix prog=$(readlink -e ./nix-daemon-untrusting.sh) proto=ssh-ng expectStderr 1 source build-remote-trustless.sh \ - | grepQuiet "you are not privileged to build input-addressed derivations" + | grepQuiet "cannot add path '[^ ]*' because it lacks a signature by a trusted key" diff --git a/tests/build-remote-trustless-should-pass-2.sh b/tests/build-remote-trustless-should-pass-2.sh new file mode 100644 index 000000000..b769a88f0 --- /dev/null +++ b/tests/build-remote-trustless-should-pass-2.sh @@ -0,0 +1,13 @@ +source common.sh + +enableFeatures "daemon-trust-override" + +restartDaemon + +# Remote doesn't trust us +file=build-hook.nix +prog=$(readlink -e ./nix-daemon-untrusting.sh) +proto=ssh-ng + +source build-remote-trustless.sh +source build-remote-trustless-after.sh diff --git a/tests/build.sh b/tests/build.sh index b579fc374..697aff0f9 100644 --- a/tests/build.sh +++ b/tests/build.sh @@ -57,6 +57,30 @@ nix build -f multiple-outputs.nix --json 'e^*' --no-link | jq --exit-status ' (.outputs | keys == ["a_a", "b", "c"])) ' +# test buidling from non-drv attr path + +nix build -f multiple-outputs.nix --json 'e.a_a.outPath' --no-link | jq --exit-status ' + (.[0] | + (.drvPath | match(".*multiple-outputs-e.drv")) and + (.outputs | keys == ["a_a"])) +' + +# Illegal type of string context +expectStderr 1 nix build -f multiple-outputs.nix 'e.a_a.drvPath' \ + | grepQuiet "has a context which refers to a complete source and binary closure." + +# No string context +expectStderr 1 nix build --expr '""' --no-link \ + | grepQuiet "has 0 entries in its context. It should only have exactly one entry" + +# Too much string context +expectStderr 1 nix build --impure --expr 'with (import ./multiple-outputs.nix).e.a_a; "${drvPath}${outPath}"' --no-link \ + | grepQuiet "has 2 entries in its context. It should only have exactly one entry" + +nix build --impure --json --expr 'builtins.unsafeDiscardOutputDependency (import ./multiple-outputs.nix).e.a_a.drvPath' --no-link | jq --exit-status ' + (.[0] | .path | match(".*multiple-outputs-e.drv")) +' + # Test building from raw store path to drv not expression. drv=$(nix eval -f multiple-outputs.nix --raw a.drvPath) diff --git a/tests/dyn-drv/common.sh b/tests/dyn-drv/common.sh new file mode 100644 index 000000000..c786f6925 --- /dev/null +++ b/tests/dyn-drv/common.sh @@ -0,0 +1,8 @@ +source ../common.sh + +# Need backend to support text-hashing too +requireDaemonNewerThan "2.16.0pre20230419" + +enableFeatures "ca-derivations dynamic-derivations" + +restartDaemon diff --git a/tests/dyn-drv/config.nix.in b/tests/dyn-drv/config.nix.in new file mode 120000 index 000000000..af24ddb30 --- /dev/null +++ b/tests/dyn-drv/config.nix.in @@ -0,0 +1 @@ +../config.nix.in \ No newline at end of file diff --git a/tests/dyn-drv/recursive-mod-json.nix b/tests/dyn-drv/recursive-mod-json.nix new file mode 100644 index 000000000..c6a24ca4f --- /dev/null +++ b/tests/dyn-drv/recursive-mod-json.nix @@ -0,0 +1,33 @@ +with import ./config.nix; + +let innerName = "foo"; in + +mkDerivation rec { + name = "${innerName}.drv"; + SHELL = shell; + + requiredSystemFeatures = [ "recursive-nix" ]; + + drv = builtins.unsafeDiscardOutputDependency (import ./text-hashed-output.nix).hello.drvPath; + + buildCommand = '' + export NIX_CONFIG='experimental-features = nix-command ca-derivations' + + PATH=${builtins.getEnv "EXTRA_PATH"}:$PATH + + # JSON of pre-existing drv + nix derivation show $drv | jq .[] > drv0.json + + # Fix name + jq < drv0.json '.name = "${innerName}"' > drv1.json + + # Extend `buildCommand` + jq < drv1.json '.env.buildCommand += "echo \"I am alive!\" >> $out/hello\n"' > drv0.json + + # Used as our output + cp $(nix derivation add < drv0.json) $out + ''; + __contentAddressed = true; + outputHashMode = "text"; + outputHashAlgo = "sha256"; +} diff --git a/tests/dyn-drv/recursive-mod-json.sh b/tests/dyn-drv/recursive-mod-json.sh new file mode 100644 index 000000000..070c5c2cb --- /dev/null +++ b/tests/dyn-drv/recursive-mod-json.sh @@ -0,0 +1,25 @@ +source common.sh + +# FIXME +if [[ $(uname) != Linux ]]; then skipTest "Not running Linux"; fi + +enableFeatures 'recursive-nix' +restartDaemon + +clearStore + +rm -f $TEST_ROOT/result + +EXTRA_PATH=$(dirname $(type -p nix)):$(dirname $(type -p jq)) +export EXTRA_PATH + +# Will produce a drv +metaDrv=$(nix-instantiate ./recursive-mod-json.nix) + +# computed "dynamic" derivation +drv=$(nix-store -r $metaDrv) + +# build that dyn drv +res=$(nix-store -r $drv) + +grep 'I am alive!' $res/hello diff --git a/tests/dyn-drv/text-hashed-output.nix b/tests/dyn-drv/text-hashed-output.nix new file mode 100644 index 000000000..a700fd102 --- /dev/null +++ b/tests/dyn-drv/text-hashed-output.nix @@ -0,0 +1,29 @@ +with import ./config.nix; + +# A simple content-addressed derivation. +# The derivation can be arbitrarily modified by passing a different `seed`, +# but the output will always be the same +rec { + hello = mkDerivation { + name = "hello"; + buildCommand = '' + set -x + echo "Building a CA derivation" + mkdir -p $out + echo "Hello World" > $out/hello + ''; + __contentAddressed = true; + outputHashMode = "recursive"; + outputHashAlgo = "sha256"; + }; + producingDrv = mkDerivation { + name = "hello.drv"; + buildCommand = '' + echo "Copying the derivation" + cp ${builtins.unsafeDiscardOutputDependency hello.drvPath} $out + ''; + __contentAddressed = true; + outputHashMode = "text"; + outputHashAlgo = "sha256"; + }; +} diff --git a/tests/dyn-drv/text-hashed-output.sh b/tests/dyn-drv/text-hashed-output.sh new file mode 100644 index 000000000..f3e5aa93b --- /dev/null +++ b/tests/dyn-drv/text-hashed-output.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +source common.sh + +# In the corresponding nix file, we have two derivations: the first, named root, +# is a normal recursive derivation, while the second, named dependent, has the +# new outputHashMode "text". Note that in "dependent", we don't refer to the +# build output of root, but only to the path of the drv file. For this reason, +# we only need to: +# +# - instantiate the root derivation +# - build the dependent derivation +# - check that the path of the output coincides with that of the original derivation + +drv=$(nix-instantiate ./text-hashed-output.nix -A hello) +nix show-derivation "$drv" + +drvProducingDrv=$(nix-instantiate ./text-hashed-output.nix -A producingDrv) +nix show-derivation "$drvProducingDrv" + +out1=$(nix-build ./text-hashed-output.nix -A producingDrv --no-out-link) + +nix path-info $drv --derivation --json | jq +nix path-info $out1 --derivation --json | jq + +test $out1 == $drv diff --git a/tests/eval.sh b/tests/eval.sh index ffae08a6a..b81bb1e2c 100644 --- a/tests/eval.sh +++ b/tests/eval.sh @@ -16,9 +16,10 @@ nix eval --expr 'assert 1 + 2 == 3; true' [[ $(nix eval int -f "./eval.nix") == 123 ]] [[ $(nix eval str -f "./eval.nix") == '"foo"' ]] [[ $(nix eval str --raw -f "./eval.nix") == 'foo' ]] -[[ $(nix eval attr -f "./eval.nix") == '{ foo = "bar"; }' ]] +[[ "$(nix eval attr -f "./eval.nix")" == '{ foo = "bar"; }' ]] [[ $(nix eval attr --json -f "./eval.nix") == '{"foo":"bar"}' ]] [[ $(nix eval int -f - < "./eval.nix") == 123 ]] +[[ "$(nix eval --expr '{"assert"=1;bar=2;}')" == '{ "assert" = 1; bar = 2; }' ]] # Check if toFile can be utilized during restricted eval [[ $(nix eval --restrict-eval --expr 'import (builtins.toFile "source" "42")') == 42 ]] @@ -26,10 +27,17 @@ nix eval --expr 'assert 1 + 2 == 3; true' nix-instantiate --eval -E 'assert 1 + 2 == 3; true' [[ $(nix-instantiate -A int --eval "./eval.nix") == 123 ]] [[ $(nix-instantiate -A str --eval "./eval.nix") == '"foo"' ]] -[[ $(nix-instantiate -A attr --eval "./eval.nix") == '{ foo = "bar"; }' ]] +[[ "$(nix-instantiate -A attr --eval "./eval.nix")" == '{ foo = "bar"; }' ]] [[ $(nix-instantiate -A attr --eval --json "./eval.nix") == '{"foo":"bar"}' ]] [[ $(nix-instantiate -A int --eval - < "./eval.nix") == 123 ]] +[[ "$(nix-instantiate --eval -E '{"assert"=1;bar=2;}')" == '{ "assert" = 1; bar = 2; }' ]] # Check that symlink cycles don't cause a hang. ln -sfn cycle.nix $TEST_ROOT/cycle.nix (! nix eval --file $TEST_ROOT/cycle.nix) + +# Check that relative symlinks are resolved correctly. +mkdir -p $TEST_ROOT/xyzzy $TEST_ROOT/foo +ln -sfn ../xyzzy $TEST_ROOT/foo/bar +printf 123 > $TEST_ROOT/xyzzy/default.nix +[[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]] diff --git a/tests/flakes/build-paths.sh b/tests/flakes/build-paths.sh index b399a066e..ff012e1b3 100644 --- a/tests/flakes/build-paths.sh +++ b/tests/flakes/build-paths.sh @@ -41,10 +41,27 @@ cat > $flake1Dir/flake.nix < $flake1Dir/foo nix build --json --out-link $TEST_ROOT/result $flake1Dir#a1 @@ -63,4 +80,17 @@ nix build --json --out-link $TEST_ROOT/result $flake1Dir#a6 nix build --impure --json --out-link $TEST_ROOT/result $flake1Dir#a8 diff common.sh $TEST_ROOT/result -(! nix build --impure --json --out-link $TEST_ROOT/result $flake1Dir#a9) +expectStderr 1 nix build --impure --json --out-link $TEST_ROOT/result $flake1Dir#a9 \ + | grepQuiet "has 0 entries in its context. It should only have exactly one entry" + +nix build --json --out-link $TEST_ROOT/result $flake1Dir#a10 +[[ $(readlink -e $TEST_ROOT/result) = *simple.drv ]] + +expectStderr 1 nix build --json --out-link $TEST_ROOT/result $flake1Dir#a11 \ + | grepQuiet "has a context which refers to a complete source and binary closure" + +nix build --json --out-link $TEST_ROOT/result $flake1Dir#a12 +[[ -e $TEST_ROOT/result/hello ]] + +expectStderr 1 nix build --impure --json --out-link $TEST_ROOT/result $flake1Dir#a13 \ + | grepQuiet "has 2 entries in its context. It should only have exactly one entry" diff --git a/tests/flakes/check.sh b/tests/flakes/check.sh index 865ca61b4..34b82c61c 100644 --- a/tests/flakes/check.sh +++ b/tests/flakes/check.sh @@ -72,6 +72,8 @@ cat > $flakeDir/flake.nix <&1 && fail "nix flake check should have failed" || true) +nix flake check $flakeDir + +checkRes=$(nix flake check --all-systems --keep-going $flakeDir 2>&1 && fail "nix flake check --all-systems should have failed" || true) echo "$checkRes" | grepQuiet "packages.system-1.default" echo "$checkRes" | grepQuiet "packages.system-2.default" diff --git a/tests/gc.sh b/tests/gc.sh index 98d6cb032..95669e25c 100644 --- a/tests/gc.sh +++ b/tests/gc.sh @@ -52,9 +52,7 @@ rmdir $NIX_STORE_DIR/.links rmdir $NIX_STORE_DIR ## Test `nix-collect-garbage -d` -# `nix-env` doesn't work with CA derivations, so let's ignore that bit if we're -# using them -if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then +testCollectGarbageD () { clearProfiles # Run two `nix-env` commands, should create two generations of # the profile @@ -66,4 +64,17 @@ if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then # left nix-collect-garbage -d [[ $(nix-env --list-generations | wc -l) -eq 1 ]] +} +# `nix-env` doesn't work with CA derivations, so let's ignore that bit if we're +# using them +if [[ -z "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + testCollectGarbageD + + # Run the same test, but forcing the profiles at their legacy location under + # /nix/var/nix. + # + # Regression test for #8294 + rm ~/.nix-profile + ln -s $NIX_STATE_DIR/profiles/per-user/me ~/.nix-profile + testCollectGarbageD fi diff --git a/tests/lang/eval-fail-fromTOML-timestamps.nix b/tests/lang/eval-fail-fromTOML-timestamps.nix new file mode 100644 index 000000000..74cff9470 --- /dev/null +++ b/tests/lang/eval-fail-fromTOML-timestamps.nix @@ -0,0 +1,130 @@ +builtins.fromTOML '' + key = "value" + bare_key = "value" + bare-key = "value" + 1234 = "value" + + "127.0.0.1" = "value" + "character encoding" = "value" + "ʎǝʞ" = "value" + 'key2' = "value" + 'quoted "value"' = "value" + + name = "Orange" + + physical.color = "orange" + physical.shape = "round" + site."google.com" = true + + # This is legal according to the spec, but cpptoml doesn't handle it. + #a.b.c = 1 + #a.d = 2 + + str = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." + + int1 = +99 + int2 = 42 + int3 = 0 + int4 = -17 + int5 = 1_000 + int6 = 5_349_221 + int7 = 1_2_3_4_5 + + hex1 = 0xDEADBEEF + hex2 = 0xdeadbeef + hex3 = 0xdead_beef + + oct1 = 0o01234567 + oct2 = 0o755 + + bin1 = 0b11010110 + + flt1 = +1.0 + flt2 = 3.1415 + flt3 = -0.01 + flt4 = 5e+22 + flt5 = 1e6 + flt6 = -2E-2 + flt7 = 6.626e-34 + flt8 = 9_224_617.445_991_228_313 + + bool1 = true + bool2 = false + + odt1 = 1979-05-27T07:32:00Z + odt2 = 1979-05-27T00:32:00-07:00 + odt3 = 1979-05-27T00:32:00.999999-07:00 + odt4 = 1979-05-27 07:32:00Z + ldt1 = 1979-05-27T07:32:00 + ldt2 = 1979-05-27T00:32:00.999999 + ld1 = 1979-05-27 + lt1 = 07:32:00 + lt2 = 00:32:00.999999 + + arr1 = [ 1, 2, 3 ] + arr2 = [ "red", "yellow", "green" ] + arr3 = [ [ 1, 2 ], [3, 4, 5] ] + arr4 = [ "all", 'strings', """are the same""", ''''type''''] + arr5 = [ [ 1, 2 ], ["a", "b", "c"] ] + + arr7 = [ + 1, 2, 3 + ] + + arr8 = [ + 1, + 2, # this is ok + ] + + [table-1] + key1 = "some string" + key2 = 123 + + + [table-2] + key1 = "another string" + key2 = 456 + + [dog."tater.man"] + type.name = "pug" + + [a.b.c] + [ d.e.f ] + [ g . h . i ] + [ j . "ʞ" . 'l' ] + [x.y.z.w] + + name = { first = "Tom", last = "Preston-Werner" } + point = { x = 1, y = 2 } + animal = { type.name = "pug" } + + [[products]] + name = "Hammer" + sku = 738594937 + + [[products]] + + [[products]] + name = "Nail" + sku = 284758393 + color = "gray" + + [[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + + [[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" +'' diff --git a/tests/lang/eval-okay-fromTOML-timestamps.exp b/tests/lang/eval-okay-fromTOML-timestamps.exp new file mode 100644 index 000000000..08b3c69a6 --- /dev/null +++ b/tests/lang/eval-okay-fromTOML-timestamps.exp @@ -0,0 +1 @@ +{ "1234" = "value"; "127.0.0.1" = "value"; a = { b = { c = { }; }; }; arr1 = [ 1 2 3 ]; arr2 = [ "red" "yellow" "green" ]; arr3 = [ [ 1 2 ] [ 3 4 5 ] ]; arr4 = [ "all" "strings" "are the same" "type" ]; arr5 = [ [ 1 2 ] [ "a" "b" "c" ] ]; arr7 = [ 1 2 3 ]; arr8 = [ 1 2 ]; bare-key = "value"; bare_key = "value"; bin1 = 214; bool1 = true; bool2 = false; "character encoding" = "value"; d = { e = { f = { }; }; }; dog = { "tater.man" = { type = { name = "pug"; }; }; }; flt1 = 1; flt2 = 3.1415; flt3 = -0.01; flt4 = 5e+22; flt5 = 1e+06; flt6 = -0.02; flt7 = 6.626e-34; flt8 = 9.22462e+06; fruit = [ { name = "apple"; physical = { color = "red"; shape = "round"; }; variety = [ { name = "red delicious"; } { name = "granny smith"; } ]; } { name = "banana"; variety = [ { name = "plantain"; } ]; } ]; g = { h = { i = { }; }; }; hex1 = 3735928559; hex2 = 3735928559; hex3 = 3735928559; int1 = 99; int2 = 42; int3 = 0; int4 = -17; int5 = 1000; int6 = 5349221; int7 = 12345; j = { "ʞ" = { l = { }; }; }; key = "value"; key2 = "value"; ld1 = { _type = "timestamp"; value = "1979-05-27"; }; ldt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00"; }; ldt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999"; }; lt1 = { _type = "timestamp"; value = "07:32:00"; }; lt2 = { _type = "timestamp"; value = "00:32:00.999999"; }; name = "Orange"; oct1 = 342391; oct2 = 493; odt1 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; odt2 = { _type = "timestamp"; value = "1979-05-27T00:32:00-07:00"; }; odt3 = { _type = "timestamp"; value = "1979-05-27T00:32:00.999999-07:00"; }; odt4 = { _type = "timestamp"; value = "1979-05-27T07:32:00Z"; }; physical = { color = "orange"; shape = "round"; }; products = [ { name = "Hammer"; sku = 738594937; } { } { color = "gray"; name = "Nail"; sku = 284758393; } ]; "quoted \"value\"" = "value"; site = { "google.com" = true; }; str = "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."; table-1 = { key1 = "some string"; key2 = 123; }; table-2 = { key1 = "another string"; key2 = 456; }; x = { y = { z = { w = { animal = { type = { name = "pug"; }; }; name = { first = "Tom"; last = "Preston-Werner"; }; point = { x = 1; y = 2; }; }; }; }; }; "ʎǝʞ" = "value"; } diff --git a/tests/lang/eval-okay-fromTOML-timestamps.flags b/tests/lang/eval-okay-fromTOML-timestamps.flags new file mode 100644 index 000000000..9ed39dc6b --- /dev/null +++ b/tests/lang/eval-okay-fromTOML-timestamps.flags @@ -0,0 +1 @@ +--extra-experimental-features parse-toml-timestamps diff --git a/tests/lang/eval-okay-fromTOML-timestamps.nix b/tests/lang/eval-okay-fromTOML-timestamps.nix new file mode 100644 index 000000000..74cff9470 --- /dev/null +++ b/tests/lang/eval-okay-fromTOML-timestamps.nix @@ -0,0 +1,130 @@ +builtins.fromTOML '' + key = "value" + bare_key = "value" + bare-key = "value" + 1234 = "value" + + "127.0.0.1" = "value" + "character encoding" = "value" + "ʎǝʞ" = "value" + 'key2' = "value" + 'quoted "value"' = "value" + + name = "Orange" + + physical.color = "orange" + physical.shape = "round" + site."google.com" = true + + # This is legal according to the spec, but cpptoml doesn't handle it. + #a.b.c = 1 + #a.d = 2 + + str = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF." + + int1 = +99 + int2 = 42 + int3 = 0 + int4 = -17 + int5 = 1_000 + int6 = 5_349_221 + int7 = 1_2_3_4_5 + + hex1 = 0xDEADBEEF + hex2 = 0xdeadbeef + hex3 = 0xdead_beef + + oct1 = 0o01234567 + oct2 = 0o755 + + bin1 = 0b11010110 + + flt1 = +1.0 + flt2 = 3.1415 + flt3 = -0.01 + flt4 = 5e+22 + flt5 = 1e6 + flt6 = -2E-2 + flt7 = 6.626e-34 + flt8 = 9_224_617.445_991_228_313 + + bool1 = true + bool2 = false + + odt1 = 1979-05-27T07:32:00Z + odt2 = 1979-05-27T00:32:00-07:00 + odt3 = 1979-05-27T00:32:00.999999-07:00 + odt4 = 1979-05-27 07:32:00Z + ldt1 = 1979-05-27T07:32:00 + ldt2 = 1979-05-27T00:32:00.999999 + ld1 = 1979-05-27 + lt1 = 07:32:00 + lt2 = 00:32:00.999999 + + arr1 = [ 1, 2, 3 ] + arr2 = [ "red", "yellow", "green" ] + arr3 = [ [ 1, 2 ], [3, 4, 5] ] + arr4 = [ "all", 'strings', """are the same""", ''''type''''] + arr5 = [ [ 1, 2 ], ["a", "b", "c"] ] + + arr7 = [ + 1, 2, 3 + ] + + arr8 = [ + 1, + 2, # this is ok + ] + + [table-1] + key1 = "some string" + key2 = 123 + + + [table-2] + key1 = "another string" + key2 = 456 + + [dog."tater.man"] + type.name = "pug" + + [a.b.c] + [ d.e.f ] + [ g . h . i ] + [ j . "ʞ" . 'l' ] + [x.y.z.w] + + name = { first = "Tom", last = "Preston-Werner" } + point = { x = 1, y = 2 } + animal = { type.name = "pug" } + + [[products]] + name = "Hammer" + sku = 738594937 + + [[products]] + + [[products]] + name = "Nail" + sku = 284758393 + color = "gray" + + [[fruit]] + name = "apple" + + [fruit.physical] + color = "red" + shape = "round" + + [[fruit.variety]] + name = "red delicious" + + [[fruit.variety]] + name = "granny smith" + + [[fruit]] + name = "banana" + + [[fruit.variety]] + name = "plantain" +'' diff --git a/tests/lang/eval-okay-replacestrings.exp b/tests/lang/eval-okay-replacestrings.exp index 72e8274d8..eac67c5fe 100644 --- a/tests/lang/eval-okay-replacestrings.exp +++ b/tests/lang/eval-okay-replacestrings.exp @@ -1 +1 @@ -[ "faabar" "fbar" "fubar" "faboor" "fubar" "XaXbXcX" "X" "a_b" ] +[ "faabar" "fbar" "fubar" "faboor" "fubar" "XaXbXcX" "X" "a_b" "fubar" ] diff --git a/tests/lang/eval-okay-replacestrings.nix b/tests/lang/eval-okay-replacestrings.nix index bd8031fc0..a803e6519 100644 --- a/tests/lang/eval-okay-replacestrings.nix +++ b/tests/lang/eval-okay-replacestrings.nix @@ -8,4 +8,5 @@ with builtins; (replaceStrings [""] ["X"] "abc") (replaceStrings [""] ["X"] "") (replaceStrings ["-"] ["_"] "a-b") + (replaceStrings ["oo" "XX"] ["u" (throw "unreachable")] "foobar") ] diff --git a/tests/linux-sandbox-cert-test.nix b/tests/linux-sandbox-cert-test.nix new file mode 100644 index 000000000..2b86dad2e --- /dev/null +++ b/tests/linux-sandbox-cert-test.nix @@ -0,0 +1,29 @@ +{ fixed-output }: + +with import ./config.nix; + +mkDerivation ({ + name = "ssl-export"; + buildCommand = '' + # Add some indirection, otherwise grepping into the debug output finds the string. + report () { echo CERT_$1_IN_SANDBOX; } + + if [ -f /etc/ssl/certs/ca-certificates.crt ]; then + content=$( $TEST_ROOT/log) + cat $TEST_ROOT/log + grepQuiet "CERT_${1}_IN_SANDBOX" $TEST_ROOT/log +} + +nocert=$TEST_ROOT/no-cert-file.pem +cert=$TEST_ROOT/some-cert-file.pem +echo -n "CERT_CONTENT" > $cert + +# No cert in sandbox when not a fixed-output derivation +testCert missing normal "$cert" + +# No cert in sandbox when ssl-cert-file is empty +testCert missing fixed-output "" + +# No cert in sandbox when ssl-cert-file is a nonexistent file +testCert missing fixed-output "$nocert" + +# Cert in sandbox when ssl-cert-file is set to an existing file +testCert present fixed-output "$cert" diff --git a/tests/local.mk b/tests/local.mk index 7c3b42599..9e340e2e2 100644 --- a/tests/local.mk +++ b/tests/local.mk @@ -72,6 +72,7 @@ nix_tests = \ build-remote-content-addressed-floating.sh \ build-remote-trustless-should-pass-0.sh \ build-remote-trustless-should-pass-1.sh \ + build-remote-trustless-should-pass-2.sh \ build-remote-trustless-should-pass-3.sh \ build-remote-trustless-should-fail-0.sh \ nar-access.sh \ @@ -110,6 +111,8 @@ nix_tests = \ ca/derivation-json.sh \ import-derivation.sh \ ca/import-derivation.sh \ + dyn-drv/text-hashed-output.sh \ + dyn-drv/recursive-mod-json.sh \ nix_path.sh \ case-hack.sh \ placeholders.sh \ @@ -137,11 +140,19 @@ ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh endif -install-tests += $(foreach x, $(nix_tests), tests/$(x)) +install-tests += $(foreach x, $(nix_tests), $(d)/$(x)) -clean-files += $(d)/common/vars-and-functions.sh $(d)/config.nix $(d)/ca/config.nix +clean-files += \ + $(d)/common/vars-and-functions.sh \ + $(d)/config.nix \ + $(d)/ca/config.nix \ + $(d)/dyn-drv/config.nix -test-deps += tests/common/vars-and-functions.sh tests/config.nix tests/ca/config.nix +test-deps += \ + tests/common/vars-and-functions.sh \ + tests/config.nix \ + tests/ca/config.nix \ + tests/dyn-drv/config.nix ifeq ($(BUILD_SHARED_LIBS), 1) test-deps += tests/plugins/libplugintest.$(SO_EXT) diff --git a/tests/nix-profile.sh b/tests/nix-profile.sh index 4ef5b484a..9da3f802b 100644 --- a/tests/nix-profile.sh +++ b/tests/nix-profile.sh @@ -157,17 +157,17 @@ error: An existing package already provides the following file: To remove the existing package: - nix profile remove path:${flake1Dir} + nix profile remove path:${flake1Dir}#packages.${system}.default The new package can also be installed next to the existing one by assigning a different priority. The conflicting packages have a priority of 5. To prioritise the new package: - nix profile install path:${flake2Dir} --priority 4 + nix profile install path:${flake2Dir}#packages.${system}.default --priority 4 To prioritise the existing package: - nix profile install path:${flake2Dir} --priority 6 + nix profile install path:${flake2Dir}#packages.${system}.default --priority 6 EOF ) [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] @@ -177,3 +177,10 @@ nix profile install $flake2Dir --priority 0 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World2" ]] # nix profile install $flake1Dir --priority 100 # [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] + +# Ensure that conflicts are handled properly even when the installables aren't +# flake references. +# Regression test for https://github.com/NixOS/nix/issues/8284 +clearProfiles +nix profile install $(nix build $flake1Dir --no-link --print-out-paths) +expect 1 nix profile install --impure --expr "(builtins.getFlake ''$flake2Dir'').packages.$system.default" diff --git a/tests/nix-shell.sh b/tests/nix-shell.sh index 044b96d54..edaa1249b 100644 --- a/tests/nix-shell.sh +++ b/tests/nix-shell.sh @@ -98,6 +98,18 @@ nix develop -f "$shellDotNix" shellDrv -c echo foo |& grepQuiet foo nix print-dev-env -f "$shellDotNix" shellDrv > $TEST_ROOT/dev-env.sh nix print-dev-env -f "$shellDotNix" shellDrv --json > $TEST_ROOT/dev-env.json +# Test with raw drv + +shellDrv=$(nix-instantiate "$shellDotNix" -A shellDrv.out) + +nix develop $shellDrv -c bash -c '[[ -n $stdenv ]]' + +nix print-dev-env $shellDrv > $TEST_ROOT/dev-env2.sh +nix print-dev-env $shellDrv --json > $TEST_ROOT/dev-env2.json + +diff $TEST_ROOT/dev-env{,2}.sh +diff $TEST_ROOT/dev-env{,2}.json + # Ensure `nix print-dev-env --json` contains variable assignments. [[ $(jq -r .variables.arr1.value[2] $TEST_ROOT/dev-env.json) = '3 4' ]] diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index ee8b77100..16c477bf9 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -23,6 +23,12 @@ in { nix.settings.substituters = lib.mkForce [ ]; nix.settings.experimental-features = [ "nix-command" ]; services.getty.autologinUser = "root"; + programs.ssh.extraConfig = '' + Host * + ControlMaster auto + ControlPath ~/.ssh/master-%h:%r@%n:%p + ControlPersist 15m + ''; }; server = @@ -62,6 +68,10 @@ in { client.wait_for_text("done") server.succeed("nix-store --check-validity ${pkgA}") + # Check that ControlMaster is working + client.send_chars("nix copy --to ssh://server ${pkgA} >&2; echo done\n") + client.wait_for_text("done") + client.copy_from_host("key", "/root/.ssh/id_ed25519") client.succeed("chmod 600 /root/.ssh/id_ed25519") diff --git a/tests/plugins/local.mk b/tests/plugins/local.mk index 8182a6a83..40350aa96 100644 --- a/tests/plugins/local.mk +++ b/tests/plugins/local.mk @@ -8,4 +8,4 @@ libplugintest_ALLOW_UNDEFINED := 1 libplugintest_EXCLUDE_FROM_LIBRARY_LIST := 1 -libplugintest_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr +libplugintest_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr -I src/libfetchers diff --git a/tests/post-hook.sh b/tests/post-hook.sh index 0266eb15d..752f8220c 100644 --- a/tests/post-hook.sh +++ b/tests/post-hook.sh @@ -17,6 +17,10 @@ fi # Build the dependencies and push them to the remote store. nix-build -o $TEST_ROOT/result dependencies.nix --post-build-hook "$pushToStore" +# See if all outputs are passed to the post-build hook by only specifying one +# We're not able to test CA tests this way +export BUILD_HOOK_ONLY_OUT_PATHS=$([ ! $NIX_TESTS_CA_BY_DEFAULT ]) +nix-build -o $TEST_ROOT/result-mult multiple-outputs.nix -A a.first --post-build-hook "$pushToStore" clearStore @@ -24,3 +28,4 @@ clearStore # closure of what we've just built. nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix nix copy --from "$REMOTE_STORE" --no-require-sigs -f dependencies.nix input1_drv +nix copy --from "$REMOTE_STORE" --no-require-sigs -f multiple-outputs.nix a^second diff --git a/tests/push-to-store-old.sh b/tests/push-to-store-old.sh index b1495c9e2..4187958b2 100755 --- a/tests/push-to-store-old.sh +++ b/tests/push-to-store-old.sh @@ -7,4 +7,8 @@ set -e [ -n "$DRV_PATH" ] echo Pushing "$OUT_PATHS" to "$REMOTE_STORE" -printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +if [ -n "$BUILD_HOOK_ONLY_OUT_PATHS" ]; then + printf "%s" "$OUT_PATHS" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +else + printf "%s" "$DRV_PATH" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +fi diff --git a/tests/push-to-store.sh b/tests/push-to-store.sh index 0b090e1b3..9e4e475e0 100755 --- a/tests/push-to-store.sh +++ b/tests/push-to-store.sh @@ -7,4 +7,8 @@ set -e [ -n "$DRV_PATH" ] echo Pushing "$OUT_PATHS" to "$REMOTE_STORE" -printf "%s" "$DRV_PATH"^'*' | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +if [ -n "$BUILD_HOOK_ONLY_OUT_PATHS" ]; then + printf "%s" "$OUT_PATHS" | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +else + printf "%s" "$DRV_PATH"^'*' | xargs nix copy --to "$REMOTE_STORE" --no-require-sigs +fi diff --git a/tests/recursive.sh b/tests/recursive.sh index b661422ed..0bf00f8fa 100644 --- a/tests/recursive.sh +++ b/tests/recursive.sh @@ -1,6 +1,6 @@ source common.sh -sed -i 's/experimental-features .*/& recursive-nix/' "$NIX_CONF_DIR"/nix.conf +enableFeatures 'recursive-nix' restartDaemon clearStore