forked from lix-project/lix
Merge remote-tracking branch 'nixos/master' into pr-flake-show-foreign
This commit is contained in:
commit
ab424a39a9
15
.github/CODEOWNERS
vendored
Normal file
15
.github/CODEOWNERS
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Pull requests concerning the listed files will automatically invite the respective maintainers as reviewers.
|
||||||
|
# This file is not used for denoting any kind of ownership, but is merely a tool for handling notifications.
|
||||||
|
#
|
||||||
|
# Merge permissions are required for maintaining an entry in this file.
|
||||||
|
# For documentation on this mechanism, see https://help.github.com/articles/about-codeowners/
|
||||||
|
|
||||||
|
# Default reviewers if nothing else matches
|
||||||
|
* @edolstra @thufschmitt
|
||||||
|
|
||||||
|
# This file
|
||||||
|
.github/CODEOWNERS @edolstra
|
||||||
|
|
||||||
|
# Public documentation
|
||||||
|
/doc @fricklerhandwerk
|
||||||
|
*.md @fricklerhandwerk
|
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -30,3 +30,7 @@ A clear and concise description of what you expected to happen.
|
||||||
**Additional context**
|
**Additional context**
|
||||||
|
|
||||||
Add any other context about the problem here.
|
Add any other context about the problem here.
|
||||||
|
|
||||||
|
**Priorities**
|
||||||
|
|
||||||
|
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
|
||||||
|
|
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -2,7 +2,7 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: improvement
|
labels: feature
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
@ -18,3 +18,7 @@ A clear and concise description of any alternative solutions or features you've
|
||||||
|
|
||||||
**Additional context**
|
**Additional context**
|
||||||
Add any other context or screenshots about the feature request here.
|
Add any other context or screenshots about the feature request here.
|
||||||
|
|
||||||
|
**Priorities**
|
||||||
|
|
||||||
|
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
|
||||||
|
|
36
.github/ISSUE_TEMPLATE/installer.md
vendored
Normal file
36
.github/ISSUE_TEMPLATE/installer.md
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
---
|
||||||
|
name: Installer issue
|
||||||
|
about: Report problems with installation
|
||||||
|
title: ''
|
||||||
|
labels: installer
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Platform
|
||||||
|
|
||||||
|
<!-- select the platform on which you tried to install Nix -->
|
||||||
|
|
||||||
|
- [ ] Linux: <!-- state your distribution, e.g. Arch Linux, Ubuntu, ... -->
|
||||||
|
- [ ] macOS
|
||||||
|
- [ ] WSL
|
||||||
|
|
||||||
|
## Additional information
|
||||||
|
|
||||||
|
<!-- state special circumstances on your system or additional steps you have taken prior to installation -->
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
<details><summary>Output</summary>
|
||||||
|
|
||||||
|
```log
|
||||||
|
|
||||||
|
<!-- paste console output here and remove this comment -->
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Priorities
|
||||||
|
|
||||||
|
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
|
31
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
31
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
---
|
||||||
|
name: Missing or incorrect documentation
|
||||||
|
about: Help us improve the reference manual
|
||||||
|
title: ''
|
||||||
|
labels: documentation
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
<!-- describe your problem -->
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
<!-- make sure this issue is not redundant or obsolete -->
|
||||||
|
|
||||||
|
- [ ] checked [latest Nix manual] \([source])
|
||||||
|
- [ ] checked [open documentation issues and pull requests] for possible duplicates
|
||||||
|
|
||||||
|
[latest Nix manual]: https://nixos.org/manual/nix/unstable/
|
||||||
|
[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src
|
||||||
|
[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
|
||||||
|
|
||||||
|
## Proposal
|
||||||
|
|
||||||
|
<!-- propose a solution -->
|
||||||
|
|
||||||
|
## Priorities
|
||||||
|
|
||||||
|
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).
|
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Motivation
|
||||||
|
<!-- Briefly explain what the change is about and why it is desirable. -->
|
||||||
|
|
||||||
|
# Context
|
||||||
|
<!-- Provide context. Reference open issues if available. -->
|
||||||
|
|
||||||
|
<!-- Non-trivial change: Briefly outline the implementation strategy. -->
|
||||||
|
|
||||||
|
<!-- Invasive change: Discuss alternative designs or approaches you considered. -->
|
||||||
|
|
||||||
|
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
||||||
|
|
||||||
|
# Checklist for maintainers
|
||||||
|
|
||||||
|
<!-- Contributors: please leave this as is -->
|
||||||
|
|
||||||
|
Maintainers: tick if completed or explain if not relevant
|
||||||
|
|
||||||
|
- [ ] agreed on idea
|
||||||
|
- [ ] agreed on implementation strategy
|
||||||
|
- [ ] tests, as appropriate
|
||||||
|
- functional tests - `tests/**.sh`
|
||||||
|
- unit tests - `src/*/tests`
|
||||||
|
- integration tests
|
||||||
|
- [ ] documentation in the manual
|
||||||
|
- [ ] code and comments are self-explanatory
|
||||||
|
- [ ] commit message explains why the change was made
|
||||||
|
- [ ] new feature or bug fix: updated release notes
|
|
@ -5,3 +5,7 @@ Please include relevant [release notes](https://github.com/NixOS/nix/blob/master
|
||||||
**Testing**
|
**Testing**
|
||||||
|
|
||||||
If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
|
If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
|
||||||
|
|
||||||
|
**Priorities**
|
||||||
|
|
||||||
|
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
|
||||||
|
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v0.0.8
|
uses: zeebe-io/backport-action@v1.1.0
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
|
@ -19,9 +19,9 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v18
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v12
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -58,8 +58,8 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v18
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v12
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
@ -77,11 +77,18 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v18
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
- run: sudo apt install fish zsh
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
- run: brew install fish
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
|
||||||
docker_push_image:
|
docker_push_image:
|
||||||
needs: [check_secrets, tests]
|
needs: [check_secrets, tests]
|
||||||
|
@ -95,10 +102,10 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v18
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v12
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -27,6 +27,8 @@ perl/Makefile.config
|
||||||
# /scripts/
|
# /scripts/
|
||||||
/scripts/nix-profile.sh
|
/scripts/nix-profile.sh
|
||||||
/scripts/nix-profile-daemon.sh
|
/scripts/nix-profile-daemon.sh
|
||||||
|
/scripts/nix-profile.fish
|
||||||
|
/scripts/nix-profile-daemon.fish
|
||||||
|
|
||||||
# /src/libexpr/
|
# /src/libexpr/
|
||||||
/src/libexpr/lexer-tab.cc
|
/src/libexpr/lexer-tab.cc
|
||||||
|
|
|
@ -1,17 +1,49 @@
|
||||||
|
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
|
||||||
|
index 3dbaa3fb..36a1d1f7 100644
|
||||||
|
--- a/darwin_stop_world.c
|
||||||
|
+++ b/darwin_stop_world.c
|
||||||
|
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
int nthreads = 0;
|
||||||
|
word total_size = 0;
|
||||||
|
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
|
||||||
|
+ size_t stack_limit;
|
||||||
|
if (!EXPECT(GC_thr_initialized, TRUE))
|
||||||
|
GC_thr_init();
|
||||||
|
|
||||||
|
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
|
||||||
|
}
|
||||||
|
if (altstack_lo) {
|
||||||
|
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||||
|
+ // control flow returns to the thread.
|
||||||
|
+ // While in the coroutine, the sp points outside the thread stack,
|
||||||
|
+ // so we can detect this and push the entire thread stack instead,
|
||||||
|
+ // as an approximation.
|
||||||
|
+ // We assume that the coroutine has similarly added its entire stack.
|
||||||
|
+ // This could be made accurate by cooperating with the application
|
||||||
|
+ // via new functions and/or callbacks.
|
||||||
|
+ stack_limit = pthread_get_stacksize_np(p->id);
|
||||||
|
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
|
||||||
|
+ altstack_lo = altstack_hi - stack_limit;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
total_size += altstack_hi - altstack_lo;
|
||||||
|
GC_push_all_stack(altstack_lo, altstack_hi);
|
||||||
|
}
|
||||||
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
||||||
index 4b2c429..1fb4c52 100644
|
index b5d71e62..aed7b0bf 100644
|
||||||
--- a/pthread_stop_world.c
|
--- a/pthread_stop_world.c
|
||||||
+++ b/pthread_stop_world.c
|
+++ b/pthread_stop_world.c
|
||||||
@@ -673,6 +673,8 @@ GC_INNER void GC_push_all_stacks(void)
|
@@ -768,6 +768,8 @@ STATIC void GC_restart_handler(int sig)
|
||||||
struct GC_traced_stack_sect_s *traced_stack_sect;
|
/* world is stopped. Should not fail if it isn't. */
|
||||||
pthread_t self = pthread_self();
|
GC_INNER void GC_push_all_stacks(void)
|
||||||
word total_size = 0;
|
{
|
||||||
+ size_t stack_limit;
|
+ size_t stack_limit;
|
||||||
+ pthread_attr_t pattr;
|
+ pthread_attr_t pattr;
|
||||||
|
GC_bool found_me = FALSE;
|
||||||
if (!EXPECT(GC_thr_initialized, TRUE))
|
size_t nthreads = 0;
|
||||||
GC_thr_init();
|
int i;
|
||||||
@@ -722,6 +724,31 @@ GC_INNER void GC_push_all_stacks(void)
|
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
hi = p->altstack + p->altstack_size;
|
hi = p->altstack + p->altstack_size;
|
||||||
/* FIXME: Need to scan the normal stack too, but how ? */
|
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||||
/* FIXME: Assume stack grows down */
|
/* FIXME: Assume stack grows down */
|
||||||
|
|
10
configure.ac
10
configure.ac
|
@ -41,8 +41,6 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
||||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
||||||
|
|
||||||
|
|
||||||
CFLAGS=
|
|
||||||
CXXFLAGS=
|
|
||||||
AC_PROG_CC
|
AC_PROG_CC
|
||||||
AC_PROG_CXX
|
AC_PROG_CXX
|
||||||
AC_PROG_CPP
|
AC_PROG_CPP
|
||||||
|
@ -177,7 +175,7 @@ fi
|
||||||
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
|
|
||||||
# Checks for libarchive
|
# Look for libarchive.
|
||||||
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
||||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
||||||
if test "$shared" != yes; then
|
if test "$shared" != yes; then
|
||||||
|
@ -276,6 +274,12 @@ fi
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
|
||||||
|
|
||||||
|
# Look for rapidcheck.
|
||||||
|
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
||||||
|
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
||||||
|
AC_CHECK_LIB([rapidcheck], [])
|
||||||
|
|
||||||
|
|
||||||
# Look for nlohmann/json.
|
# Look for nlohmann/json.
|
||||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,21 @@
|
||||||
|
[book]
|
||||||
|
title = "Nix Reference Manual"
|
||||||
|
|
||||||
[output.html]
|
[output.html]
|
||||||
additional-css = ["custom.css"]
|
additional-css = ["custom.css"]
|
||||||
additional-js = ["redirects.js"]
|
additional-js = ["redirects.js"]
|
||||||
|
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||||
|
git-repository-url = "https://github.com/NixOS/nix"
|
||||||
|
|
||||||
[preprocessor.anchors]
|
[preprocessor.anchors]
|
||||||
renderers = ["html"]
|
renderers = ["html"]
|
||||||
command = "jq --from-file doc/manual/anchors.jq"
|
command = "jq --from-file doc/manual/anchors.jq"
|
||||||
|
|
||||||
|
[output.linkcheck]
|
||||||
|
# no Internet during the build (in the sandbox)
|
||||||
|
follow-web-links = false
|
||||||
|
|
||||||
|
# mdbook-linkcheck does not understand [foo]{#bar} style links, resulting in
|
||||||
|
# excessive "Potential incomplete link" warnings. No other kind of warning was
|
||||||
|
# produced at the time of writing.
|
||||||
|
warning-policy = "ignore"
|
||||||
|
|
|
@ -1,16 +1,20 @@
|
||||||
with builtins;
|
builtinsDump:
|
||||||
with import ./utils.nix;
|
let
|
||||||
|
showBuiltin = name:
|
||||||
|
let
|
||||||
|
inherit (builtinsDump.${name}) doc args;
|
||||||
|
in
|
||||||
|
''
|
||||||
|
<dt id="builtins-${name}">
|
||||||
|
<a href="#builtins-${name}"><code>${name} ${listArgs args}</code></a>
|
||||||
|
</dt>
|
||||||
|
<dd>
|
||||||
|
|
||||||
builtins:
|
${doc}
|
||||||
|
|
||||||
|
</dd>
|
||||||
|
'';
|
||||||
|
listArgs = args: builtins.concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||||
|
in
|
||||||
|
with builtins; concatStringsSep "\n" (map showBuiltin (attrNames builtinsDump))
|
||||||
|
|
||||||
concatStrings (map
|
|
||||||
(name:
|
|
||||||
let builtin = builtins.${name}; in
|
|
||||||
"<dt id=\"builtins-${name}\"><a href=\"#builtins-${name}\"><code>${name} "
|
|
||||||
+ concatStringsSep " " (map (s: "<var>${s}</var>") builtin.args)
|
|
||||||
+ "</code></a></dt>"
|
|
||||||
+ "<dd>\n\n"
|
|
||||||
+ builtin.doc
|
|
||||||
+ "\n\n</dd>"
|
|
||||||
)
|
|
||||||
(attrNames builtins))
|
|
||||||
|
|
|
@ -1,97 +1,115 @@
|
||||||
{ command }:
|
{ toplevel }:
|
||||||
|
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import ./utils.nix;
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
showCommand =
|
showCommand = { command, details, filename, toplevel }:
|
||||||
{ command, def, filename }:
|
let
|
||||||
''
|
result = ''
|
||||||
**Warning**: This program is **experimental** and its interface is subject to change.
|
> **Warning** \
|
||||||
''
|
> This program is **experimental** and its interface is subject to change.
|
||||||
+ "# Name\n\n"
|
|
||||||
+ "`${command}` - ${def.description}\n\n"
|
# Name
|
||||||
+ "# Synopsis\n\n"
|
|
||||||
+ showSynopsis { inherit command; args = def.args; }
|
`${command}` - ${details.description}
|
||||||
+ (if def.commands or {} != {}
|
|
||||||
then
|
# Synopsis
|
||||||
let
|
|
||||||
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
|
${showSynopsis command details.args}
|
||||||
listCommands = cmds:
|
|
||||||
concatStrings (map (name:
|
${maybeSubcommands}
|
||||||
"* "
|
|
||||||
+ "[`${command} ${name}`](./${appendName filename name}.md)"
|
${maybeDocumentation}
|
||||||
+ " - ${cmds.${name}.description}\n")
|
|
||||||
(attrNames cmds));
|
${maybeOptions}
|
||||||
in
|
'';
|
||||||
"where *subcommand* is one of the following:\n\n"
|
showSynopsis = command: args:
|
||||||
# FIXME: group by category
|
let
|
||||||
+ (if length categories > 1
|
showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
|
||||||
then
|
arguments = concatStringsSep " " (map showArgument args);
|
||||||
concatStrings (map
|
in ''
|
||||||
(cat:
|
`${command}` [*option*...] ${arguments}
|
||||||
"**${toString cat.description}:**\n\n"
|
'';
|
||||||
+ listCommands (filterAttrs (n: v: v.category == cat) def.commands)
|
maybeSubcommands = if details ? commands && details.commands != {}
|
||||||
+ "\n"
|
then ''
|
||||||
) categories)
|
where *subcommand* is one of the following:
|
||||||
+ "\n"
|
|
||||||
else
|
${subcommands}
|
||||||
listCommands def.commands
|
''
|
||||||
+ "\n")
|
else "";
|
||||||
else "")
|
subcommands = if length categories > 1
|
||||||
+ (if def ? doc
|
then listCategories
|
||||||
then def.doc + "\n\n"
|
else listSubcommands details.commands;
|
||||||
else "")
|
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
|
||||||
+ (let s = showOptions def.flags; in
|
listCategories = concatStrings (map showCategory categories);
|
||||||
if s != ""
|
showCategory = cat: ''
|
||||||
then "# Options\n\n${s}"
|
**${toString cat.description}:**
|
||||||
else "")
|
|
||||||
;
|
${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)}
|
||||||
|
'';
|
||||||
|
listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds));
|
||||||
|
showSubcommand = name: subcmd: ''
|
||||||
|
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||||
|
'';
|
||||||
|
maybeDocumentation = if details ? doc then details.doc else "";
|
||||||
|
maybeOptions = if details.flags == {} then "" else ''
|
||||||
|
# Options
|
||||||
|
|
||||||
|
${showOptions details.flags toplevel.flags}
|
||||||
|
'';
|
||||||
|
showOptions = options: commonOptions:
|
||||||
|
let
|
||||||
|
allOptions = options // commonOptions;
|
||||||
|
showCategory = cat: ''
|
||||||
|
${if cat != "" then "**${cat}:**" else ""}
|
||||||
|
|
||||||
|
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
|
||||||
|
'';
|
||||||
|
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
|
||||||
|
showOption = name: option:
|
||||||
|
let
|
||||||
|
shortName = if option ? shortName then "/ `-${option.shortName}`" else "";
|
||||||
|
labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else "";
|
||||||
|
in trim ''
|
||||||
|
- `--${name}` ${shortName} ${labels}
|
||||||
|
|
||||||
|
${option.description}
|
||||||
|
'';
|
||||||
|
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
||||||
|
in concatStrings (map showCategory categories);
|
||||||
|
in squash result;
|
||||||
|
|
||||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||||
|
|
||||||
showOptions = flags:
|
processCommand = { command, details, filename, toplevel }:
|
||||||
let
|
let
|
||||||
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags)));
|
cmd = {
|
||||||
in
|
inherit command;
|
||||||
concatStrings (map
|
name = filename + ".md";
|
||||||
(cat:
|
value = showCommand { inherit command details filename toplevel; };
|
||||||
(if cat != ""
|
};
|
||||||
then "**${cat}:**\n\n"
|
subcommand = subCmd: processCommand {
|
||||||
else "")
|
command = command + " " + subCmd;
|
||||||
+ concatStrings
|
details = details.commands.${subCmd};
|
||||||
(map (longName:
|
filename = appendName filename subCmd;
|
||||||
let
|
inherit toplevel;
|
||||||
flag = flags.${longName};
|
};
|
||||||
in
|
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
|
||||||
" - `--${longName}`"
|
|
||||||
+ (if flag ? shortName then " / `-${flag.shortName}`" else "")
|
|
||||||
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
|
|
||||||
+ " \n"
|
|
||||||
+ " " + flag.description + "\n\n"
|
|
||||||
) (attrNames (filterAttrs (n: v: v.category == cat) flags))))
|
|
||||||
categories);
|
|
||||||
|
|
||||||
showSynopsis =
|
parsedToplevel = builtins.fromJSON toplevel;
|
||||||
{ command, args }:
|
|
||||||
"`${command}` [*option*...] ${concatStringsSep " "
|
manpages = processCommand {
|
||||||
(map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
|
command = "nix";
|
||||||
|
details = parsedToplevel;
|
||||||
|
filename = "nix";
|
||||||
|
toplevel = parsedToplevel;
|
||||||
|
};
|
||||||
|
|
||||||
processCommand = { command, def, filename }:
|
tableOfContents = let
|
||||||
[ { name = filename + ".md"; value = showCommand { inherit command def filename; }; inherit command; } ]
|
showEntry = page:
|
||||||
++ concatMap
|
" - [${page.command}](command-ref/new-cli/${page.name})";
|
||||||
(name: processCommand {
|
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||||
filename = appendName filename name;
|
|
||||||
command = command + " " + name;
|
|
||||||
def = def.commands.${name};
|
|
||||||
})
|
|
||||||
(attrNames def.commands or {});
|
|
||||||
|
|
||||||
in
|
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||||
|
|
||||||
let
|
|
||||||
manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; };
|
|
||||||
summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages);
|
|
||||||
in
|
|
||||||
(listToAttrs manpages) // { "SUMMARY.md" = summary; }
|
|
||||||
|
|
|
@ -1,29 +1,41 @@
|
||||||
with builtins;
|
let
|
||||||
with import ./utils.nix;
|
inherit (builtins) attrNames concatStringsSep isAttrs isBool;
|
||||||
|
inherit (import ./utils.nix) concatStrings squash splitLines;
|
||||||
|
in
|
||||||
|
|
||||||
options:
|
optionsInfo:
|
||||||
|
let
|
||||||
|
showOption = name:
|
||||||
|
let
|
||||||
|
inherit (optionsInfo.${name}) description documentDefault defaultValue aliases;
|
||||||
|
result = squash ''
|
||||||
|
- <span id="conf-${name}">[`${name}`](#conf-${name})</span>
|
||||||
|
|
||||||
concatStrings (map
|
${indent " " body}
|
||||||
(name:
|
'';
|
||||||
let option = options.${name}; in
|
# separate body to cleanly handle indentation
|
||||||
" - [`${name}`](#conf-${name})"
|
body = ''
|
||||||
+ "<p id=\"conf-${name}\"></p>\n\n"
|
${description}
|
||||||
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
|
|
||||||
+ (if option.documentDefault
|
**Default:** ${showDefault documentDefault defaultValue}
|
||||||
then " **Default:** " + (
|
|
||||||
if option.value == "" || option.value == []
|
${showAliases aliases}
|
||||||
then "*empty*"
|
'';
|
||||||
else if isBool option.value
|
showDefault = documentDefault: defaultValue:
|
||||||
then (if option.value then "`true`" else "`false`")
|
if documentDefault then
|
||||||
else
|
# a StringMap value type is specified as a string, but
|
||||||
# n.b. a StringMap value type is specified as a string, but
|
# this shows the value type. The empty stringmap is `null` in
|
||||||
# this shows the value type. The empty stringmap is "null" in
|
# JSON, but that converts to `{ }` here.
|
||||||
# JSON, but that converts to "{ }" here.
|
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
||||||
(if isAttrs option.value then "`\"\"`"
|
then "*empty*"
|
||||||
else "`" + toString option.value + "`")) + "\n\n"
|
else if isBool defaultValue then
|
||||||
else " **Default:** *machine-specific*\n")
|
if defaultValue then "`true`" else "`false`"
|
||||||
+ (if option.aliases != []
|
else "`${toString defaultValue}`"
|
||||||
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
|
else "*machine-specific*";
|
||||||
else "")
|
showAliases = aliases:
|
||||||
)
|
if aliases == [] then "" else
|
||||||
(attrNames options))
|
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||||
|
indent = prefix: s:
|
||||||
|
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||||
|
in result;
|
||||||
|
in concatStrings (map showOption (attrNames optionsInfo))
|
||||||
|
|
|
@ -29,19 +29,19 @@ nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -
|
||||||
$(d)/%.1: $(d)/src/command-ref/%.md
|
$(d)/%.1: $(d)/src/command-ref/%.md
|
||||||
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
|
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
|
||||||
@cat $^ >> $^.tmp
|
@cat $^ >> $^.tmp
|
||||||
$(trace-gen) lowdown -sT man -M section=1 $^.tmp -o $@
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
|
||||||
@rm $^.tmp
|
@rm $^.tmp
|
||||||
|
|
||||||
$(d)/%.8: $(d)/src/command-ref/%.md
|
$(d)/%.8: $(d)/src/command-ref/%.md
|
||||||
@printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
|
@printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
|
||||||
@cat $^ >> $^.tmp
|
@cat $^ >> $^.tmp
|
||||||
$(trace-gen) lowdown -sT man -M section=8 $^.tmp -o $@
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=8 $^.tmp -o $@
|
||||||
@rm $^.tmp
|
@rm $^.tmp
|
||||||
|
|
||||||
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
|
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
|
||||||
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
|
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
|
||||||
@cat $^ >> $^.tmp
|
@cat $^ >> $^.tmp
|
||||||
$(trace-gen) lowdown -sT man -M section=5 $^.tmp -o $@
|
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
|
||||||
@rm $^.tmp
|
@rm $^.tmp
|
||||||
|
|
||||||
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
@ -50,11 +50,16 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||||
@rm -rf $@
|
@rm -rf $@
|
||||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
||||||
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) sed -i $@.tmp/*.md -e 's^@docroot@^../..^g'
|
||||||
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||||
|
| sed -e 's^@docroot@^..^g'>> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/nix.json: $(bindir)/nix
|
$(d)/nix.json: $(bindir)/nix
|
||||||
|
@ -67,7 +72,9 @@ $(d)/conf-file.json: $(bindir)/nix
|
||||||
|
|
||||||
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||||
|
| sed -e 's^@docroot@^..^g' >> $@.tmp
|
||||||
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
|
@ -102,6 +109,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
||||||
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
|
$(trace-gen) \
|
||||||
|
set -euo pipefail; \
|
||||||
|
RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
|
||||||
|
| { grep -Fv "because fragment resolution isn't implemented" || :; }
|
||||||
|
@rm -rf $(DESTDIR)$(docdir)/manual
|
||||||
|
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
|
||||||
|
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -1,330 +1,421 @@
|
||||||
// Redirects from old DocBook manual.
|
// redirect rules for anchors ensure backwards compatibility of URLs.
|
||||||
var redirects = {
|
// this must be done on the client side, as web servers do not see the anchor part of the URL.
|
||||||
"#part-advanced-topics": "advanced-topics/advanced-topics.html",
|
|
||||||
"#chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
// redirections are declared as follows:
|
||||||
"#chap-diff-hook": "advanced-topics/diff-hook.html",
|
// each entry has as its key a path matching the requested URL path, relative to the mdBook document root.
|
||||||
"#check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
//
|
||||||
"#chap-distributed-builds": "advanced-topics/distributed-builds.html",
|
// IMPORTANT: it must specify the full path with file name and suffix
|
||||||
"#chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
//
|
||||||
"#chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
// each entry is itself a set of key-value pairs, where
|
||||||
"#part-command-ref": "command-ref/command-ref.html",
|
// - keys are anchors on the matched path.
|
||||||
"#conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
// - values are redirection targets relative to the current path.
|
||||||
"#conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
|
|
||||||
"#conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
|
const redirects = {
|
||||||
"#conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
|
"index.html": {
|
||||||
"#conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
|
"part-advanced-topics": "advanced-topics/advanced-topics.html",
|
||||||
"#conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
|
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
||||||
"#conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
|
"chap-diff-hook": "advanced-topics/diff-hook.html",
|
||||||
"#conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
|
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
||||||
"#conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
|
"chap-distributed-builds": "advanced-topics/distributed-builds.html",
|
||||||
"#conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
|
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
||||||
"#conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
|
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
||||||
"#conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
|
"part-command-ref": "command-ref/command-ref.html",
|
||||||
"#conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
|
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
||||||
"#conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
|
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
|
||||||
"#conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
|
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
|
||||||
"#conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat",
|
"conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
|
||||||
"#conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
|
"conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
|
||||||
"#conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
|
"conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
|
||||||
"#conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
|
"conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
|
||||||
"#conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
|
"conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
|
||||||
"#conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
|
"conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
|
||||||
"#conf-builders": "command-ref/conf-file.html#conf-builders",
|
"conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
|
||||||
"#conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
|
"conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
|
||||||
"#conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
|
"conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
|
||||||
"#conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
|
"conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
|
||||||
"#conf-cores": "command-ref/conf-file.html#conf-cores",
|
"conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
|
||||||
"#conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
|
"conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
|
||||||
"#conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism",
|
"conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
|
||||||
"#conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
|
"conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
|
||||||
"#conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
|
"conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
|
||||||
"#conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
|
"conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
|
||||||
"#conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
|
"conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
|
||||||
"#conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
|
"conf-builders": "command-ref/conf-file.html#conf-builders",
|
||||||
"#conf-fallback": "command-ref/conf-file.html#conf-fallback",
|
"conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
|
||||||
"#conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
|
"conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
|
||||||
"#conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
|
"conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
|
||||||
"#conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
|
"conf-cores": "command-ref/conf-file.html#conf-cores",
|
||||||
"#conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
|
"conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
|
||||||
"#conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
|
"conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
|
||||||
"#conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
|
"conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
|
||||||
"#conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
|
"conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
|
||||||
"#conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
|
"conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
|
||||||
"#conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
|
"conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
|
||||||
"#conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
|
"conf-fallback": "command-ref/conf-file.html#conf-fallback",
|
||||||
"#conf-max-free": "command-ref/conf-file.html#conf-max-free",
|
"conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
|
||||||
"#conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
|
"conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
|
||||||
"#conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
|
"conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
|
||||||
"#conf-min-free": "command-ref/conf-file.html#conf-min-free",
|
"conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
|
||||||
"#conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
|
"conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
|
||||||
"#conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
|
"conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
|
||||||
"#conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
|
"conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
|
||||||
"#conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
|
"conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
|
||||||
"#conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
|
"conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
|
||||||
"#conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
|
"conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
|
||||||
"#conf-repeat": "command-ref/conf-file.html#conf-repeat",
|
"conf-max-free": "command-ref/conf-file.html#conf-max-free",
|
||||||
"#conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
|
"conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
|
||||||
"#conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
|
"conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
|
||||||
"#conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
|
"conf-min-free": "command-ref/conf-file.html#conf-min-free",
|
||||||
"#conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
|
"conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
|
||||||
"#conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
|
"conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
|
||||||
"#conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
|
"conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
|
||||||
"#conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
|
"conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
|
||||||
"#conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
|
"conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
|
||||||
"#conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
|
"conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
|
||||||
"#conf-substitute": "command-ref/conf-file.html#conf-substitute",
|
"conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
|
||||||
"#conf-substituters": "command-ref/conf-file.html#conf-substituters",
|
"conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
|
||||||
"#conf-system": "command-ref/conf-file.html#conf-system",
|
"conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
|
||||||
"#conf-system-features": "command-ref/conf-file.html#conf-system-features",
|
"conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
|
||||||
"#conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
|
"conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
|
||||||
"#conf-timeout": "command-ref/conf-file.html#conf-timeout",
|
"conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
|
||||||
"#conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
|
"conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
|
||||||
"#conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
|
"conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
|
||||||
"#conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
|
"conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
|
||||||
"#conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
|
"conf-substitute": "command-ref/conf-file.html#conf-substitute",
|
||||||
"#conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
|
"conf-substituters": "command-ref/conf-file.html#conf-substituters",
|
||||||
"#extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
|
"conf-system": "command-ref/conf-file.html#conf-system",
|
||||||
"#sec-conf-file": "command-ref/conf-file.html",
|
"conf-system-features": "command-ref/conf-file.html#conf-system-features",
|
||||||
"#env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
|
"conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
|
||||||
"#env-common": "command-ref/env-common.html",
|
"conf-timeout": "command-ref/conf-file.html#conf-timeout",
|
||||||
"#envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
|
"conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
|
||||||
"#sec-common-env": "command-ref/env-common.html",
|
"conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
|
||||||
"#ch-files": "command-ref/files.html",
|
"conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
|
||||||
"#ch-main-commands": "command-ref/main-commands.html",
|
"conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
|
||||||
"#opt-out-link": "command-ref/nix-build.html#opt-out-link",
|
"conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
|
||||||
"#sec-nix-build": "command-ref/nix-build.html",
|
"extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
|
||||||
"#sec-nix-channel": "command-ref/nix-channel.html",
|
"sec-conf-file": "command-ref/conf-file.html",
|
||||||
"#sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
|
"env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
|
||||||
"#sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
|
"env-common": "command-ref/env-common.html",
|
||||||
"#sec-nix-daemon": "command-ref/nix-daemon.html",
|
"envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
|
||||||
"#refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
|
"sec-common-env": "command-ref/env-common.html",
|
||||||
"#rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
|
"ch-files": "command-ref/files.html",
|
||||||
"#rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
|
"ch-main-commands": "command-ref/main-commands.html",
|
||||||
"#rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
|
"opt-out-link": "command-ref/nix-build.html#opt-out-link",
|
||||||
"#rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
|
"sec-nix-build": "command-ref/nix-build.html",
|
||||||
"#sec-nix-env": "command-ref/nix-env.html",
|
"sec-nix-channel": "command-ref/nix-channel.html",
|
||||||
"#ssec-version-comparisons": "command-ref/nix-env.html#versions",
|
"sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
|
||||||
"#sec-nix-hash": "command-ref/nix-hash.html",
|
"sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
|
||||||
"#sec-nix-instantiate": "command-ref/nix-instantiate.html",
|
"sec-nix-daemon": "command-ref/nix-daemon.html",
|
||||||
"#sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
|
"refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
|
||||||
"#sec-nix-shell": "command-ref/nix-shell.html",
|
"rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
|
||||||
"#ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
|
"rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
|
||||||
"#nixref-queries": "command-ref/nix-store.html#queries",
|
"rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
|
||||||
"#opt-add-root": "command-ref/nix-store.html#opt-add-root",
|
"rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
|
||||||
"#refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
|
"sec-nix-env": "command-ref/nix-env.html",
|
||||||
"#refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
|
"ssec-version-comparisons": "command-ref/nix-env.html#versions",
|
||||||
"#refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
|
"sec-nix-hash": "command-ref/nix-hash.html",
|
||||||
"#refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
|
"sec-nix-instantiate": "command-ref/nix-instantiate.html",
|
||||||
"#refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
|
"sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
|
||||||
"#rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
|
"sec-nix-shell": "command-ref/nix-shell.html",
|
||||||
"#rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
|
"ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
|
||||||
"#rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
|
"nixref-queries": "command-ref/nix-store.html#queries",
|
||||||
"#rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
|
"opt-add-root": "command-ref/nix-store.html#opt-add-root",
|
||||||
"#sec-nix-store": "command-ref/nix-store.html",
|
"refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
|
||||||
"#opt-I": "command-ref/opt-common.html#opt-I",
|
"refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
|
||||||
"#opt-attr": "command-ref/opt-common.html#opt-attr",
|
"refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
|
||||||
"#opt-common": "command-ref/opt-common.html",
|
"refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
|
||||||
"#opt-cores": "command-ref/opt-common.html#opt-cores",
|
"refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
|
||||||
"#opt-log-format": "command-ref/opt-common.html#opt-log-format",
|
"rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
|
||||||
"#opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
|
"rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
|
||||||
"#opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
|
"rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
|
||||||
"#opt-timeout": "command-ref/opt-common.html#opt-timeout",
|
"rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
|
||||||
"#sec-common-options": "command-ref/opt-common.html",
|
"sec-nix-store": "command-ref/nix-store.html",
|
||||||
"#ch-utilities": "command-ref/utilities.html",
|
"opt-I": "command-ref/opt-common.html#opt-I",
|
||||||
"#chap-hacking": "contributing/hacking.html",
|
"opt-attr": "command-ref/opt-common.html#opt-attr",
|
||||||
"#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
|
"opt-common": "command-ref/opt-common.html",
|
||||||
"#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
|
"opt-cores": "command-ref/opt-common.html#opt-cores",
|
||||||
"#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
|
"opt-log-format": "command-ref/opt-common.html#opt-log-format",
|
||||||
"#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
|
"opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
|
||||||
"#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
|
"opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
|
||||||
"#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
|
"opt-timeout": "command-ref/opt-common.html#opt-timeout",
|
||||||
"#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
|
"sec-common-options": "command-ref/opt-common.html",
|
||||||
"#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
|
"ch-utilities": "command-ref/utilities.html",
|
||||||
"#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
|
"chap-hacking": "contributing/hacking.html",
|
||||||
"#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
|
"adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
|
||||||
"#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
|
"adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
|
||||||
"#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
|
"adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
|
||||||
"#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
|
"adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
|
||||||
"#sec-advanced-attributes": "language/advanced-attributes.html",
|
"adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
|
||||||
"#builtin-abort": "language/builtins.html#builtins-abort",
|
"adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
|
||||||
"#builtin-add": "language/builtins.html#builtins-add",
|
"adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
|
||||||
"#builtin-all": "language/builtins.html#builtins-all",
|
"adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
|
||||||
"#builtin-any": "language/builtins.html#builtins-any",
|
"adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
|
||||||
"#builtin-attrNames": "language/builtins.html#builtins-attrNames",
|
"adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
|
||||||
"#builtin-attrValues": "language/builtins.html#builtins-attrValues",
|
"adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
|
||||||
"#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
|
"adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
|
||||||
"#builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
|
"fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
|
||||||
"#builtin-bitOr": "language/builtins.html#builtins-bitOr",
|
"sec-advanced-attributes": "language/advanced-attributes.html",
|
||||||
"#builtin-bitXor": "language/builtins.html#builtins-bitXor",
|
"builtin-abort": "language/builtins.html#builtins-abort",
|
||||||
"#builtin-builtins": "language/builtins.html#builtins-builtins",
|
"builtin-add": "language/builtins.html#builtins-add",
|
||||||
"#builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
|
"builtin-all": "language/builtins.html#builtins-all",
|
||||||
"#builtin-concatLists": "language/builtins.html#builtins-concatLists",
|
"builtin-any": "language/builtins.html#builtins-any",
|
||||||
"#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
|
"builtin-attrNames": "language/builtins.html#builtins-attrNames",
|
||||||
"#builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
|
"builtin-attrValues": "language/builtins.html#builtins-attrValues",
|
||||||
"#builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
|
"builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
|
||||||
"#builtin-derivation": "language/builtins.html#builtins-derivation",
|
"builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
|
||||||
"#builtin-dirOf": "language/builtins.html#builtins-dirOf",
|
"builtin-bitOr": "language/builtins.html#builtins-bitOr",
|
||||||
"#builtin-div": "language/builtins.html#builtins-div",
|
"builtin-bitXor": "language/builtins.html#builtins-bitXor",
|
||||||
"#builtin-elem": "language/builtins.html#builtins-elem",
|
"builtin-builtins": "language/builtins.html#builtins-builtins",
|
||||||
"#builtin-elemAt": "language/builtins.html#builtins-elemAt",
|
"builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
|
||||||
"#builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
|
"builtin-concatLists": "language/builtins.html#builtins-concatLists",
|
||||||
"#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
|
"builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
|
||||||
"#builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
|
"builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
|
||||||
"#builtin-filterSource": "language/builtins.html#builtins-filterSource",
|
"builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
|
||||||
"#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
|
"builtin-derivation": "language/builtins.html#builtins-derivation",
|
||||||
"#builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
|
"builtin-dirOf": "language/builtins.html#builtins-dirOf",
|
||||||
"#builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
|
"builtin-div": "language/builtins.html#builtins-div",
|
||||||
"#builtin-genList": "language/builtins.html#builtins-genList",
|
"builtin-elem": "language/builtins.html#builtins-elem",
|
||||||
"#builtin-getAttr": "language/builtins.html#builtins-getAttr",
|
"builtin-elemAt": "language/builtins.html#builtins-elemAt",
|
||||||
"#builtin-getEnv": "language/builtins.html#builtins-getEnv",
|
"builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
|
||||||
"#builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
|
"builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
|
||||||
"#builtin-hashFile": "language/builtins.html#builtins-hashFile",
|
"builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
|
||||||
"#builtin-hashString": "language/builtins.html#builtins-hashString",
|
"builtin-filterSource": "language/builtins.html#builtins-filterSource",
|
||||||
"#builtin-head": "language/builtins.html#builtins-head",
|
"builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
|
||||||
"#builtin-import": "language/builtins.html#builtins-import",
|
"builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
|
||||||
"#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
|
"builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
|
||||||
"#builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
|
"builtin-genList": "language/builtins.html#builtins-genList",
|
||||||
"#builtin-isBool": "language/builtins.html#builtins-isBool",
|
"builtin-getAttr": "language/builtins.html#builtins-getAttr",
|
||||||
"#builtin-isFloat": "language/builtins.html#builtins-isFloat",
|
"builtin-getEnv": "language/builtins.html#builtins-getEnv",
|
||||||
"#builtin-isFunction": "language/builtins.html#builtins-isFunction",
|
"builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
|
||||||
"#builtin-isInt": "language/builtins.html#builtins-isInt",
|
"builtin-hashFile": "language/builtins.html#builtins-hashFile",
|
||||||
"#builtin-isList": "language/builtins.html#builtins-isList",
|
"builtin-hashString": "language/builtins.html#builtins-hashString",
|
||||||
"#builtin-isNull": "language/builtins.html#builtins-isNull",
|
"builtin-head": "language/builtins.html#builtins-head",
|
||||||
"#builtin-isString": "language/builtins.html#builtins-isString",
|
"builtin-import": "language/builtins.html#builtins-import",
|
||||||
"#builtin-length": "language/builtins.html#builtins-length",
|
"builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
|
||||||
"#builtin-lessThan": "language/builtins.html#builtins-lessThan",
|
"builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
|
||||||
"#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
|
"builtin-isBool": "language/builtins.html#builtins-isBool",
|
||||||
"#builtin-map": "language/builtins.html#builtins-map",
|
"builtin-isFloat": "language/builtins.html#builtins-isFloat",
|
||||||
"#builtin-match": "language/builtins.html#builtins-match",
|
"builtin-isFunction": "language/builtins.html#builtins-isFunction",
|
||||||
"#builtin-mul": "language/builtins.html#builtins-mul",
|
"builtin-isInt": "language/builtins.html#builtins-isInt",
|
||||||
"#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
|
"builtin-isList": "language/builtins.html#builtins-isList",
|
||||||
"#builtin-path": "language/builtins.html#builtins-path",
|
"builtin-isNull": "language/builtins.html#builtins-isNull",
|
||||||
"#builtin-pathExists": "language/builtins.html#builtins-pathExists",
|
"builtin-isString": "language/builtins.html#builtins-isString",
|
||||||
"#builtin-placeholder": "language/builtins.html#builtins-placeholder",
|
"builtin-length": "language/builtins.html#builtins-length",
|
||||||
"#builtin-readDir": "language/builtins.html#builtins-readDir",
|
"builtin-lessThan": "language/builtins.html#builtins-lessThan",
|
||||||
"#builtin-readFile": "language/builtins.html#builtins-readFile",
|
"builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
|
||||||
"#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
|
"builtin-map": "language/builtins.html#builtins-map",
|
||||||
"#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
|
"builtin-match": "language/builtins.html#builtins-match",
|
||||||
"#builtin-seq": "language/builtins.html#builtins-seq",
|
"builtin-mul": "language/builtins.html#builtins-mul",
|
||||||
"#builtin-sort": "language/builtins.html#builtins-sort",
|
"builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
|
||||||
"#builtin-split": "language/builtins.html#builtins-split",
|
"builtin-path": "language/builtins.html#builtins-path",
|
||||||
"#builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
|
"builtin-pathExists": "language/builtins.html#builtins-pathExists",
|
||||||
"#builtin-stringLength": "language/builtins.html#builtins-stringLength",
|
"builtin-placeholder": "language/builtins.html#builtins-placeholder",
|
||||||
"#builtin-sub": "language/builtins.html#builtins-sub",
|
"builtin-readDir": "language/builtins.html#builtins-readDir",
|
||||||
"#builtin-substring": "language/builtins.html#builtins-substring",
|
"builtin-readFile": "language/builtins.html#builtins-readFile",
|
||||||
"#builtin-tail": "language/builtins.html#builtins-tail",
|
"builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
|
||||||
"#builtin-throw": "language/builtins.html#builtins-throw",
|
"builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
|
||||||
"#builtin-toFile": "language/builtins.html#builtins-toFile",
|
"builtin-seq": "language/builtins.html#builtins-seq",
|
||||||
"#builtin-toJSON": "language/builtins.html#builtins-toJSON",
|
"builtin-sort": "language/builtins.html#builtins-sort",
|
||||||
"#builtin-toPath": "language/builtins.html#builtins-toPath",
|
"builtin-split": "language/builtins.html#builtins-split",
|
||||||
"#builtin-toString": "language/builtins.html#builtins-toString",
|
"builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
|
||||||
"#builtin-toXML": "language/builtins.html#builtins-toXML",
|
"builtin-stringLength": "language/builtins.html#builtins-stringLength",
|
||||||
"#builtin-trace": "language/builtins.html#builtins-trace",
|
"builtin-sub": "language/builtins.html#builtins-sub",
|
||||||
"#builtin-tryEval": "language/builtins.html#builtins-tryEval",
|
"builtin-substring": "language/builtins.html#builtins-substring",
|
||||||
"#builtin-typeOf": "language/builtins.html#builtins-typeOf",
|
"builtin-tail": "language/builtins.html#builtins-tail",
|
||||||
"#ssec-builtins": "language/builtins.html",
|
"builtin-throw": "language/builtins.html#builtins-throw",
|
||||||
"#attr-system": "language/derivations.html#attr-system",
|
"builtin-toFile": "language/builtins.html#builtins-toFile",
|
||||||
"#ssec-derivation": "language/derivations.html",
|
"builtin-toJSON": "language/builtins.html#builtins-toJSON",
|
||||||
"#ch-expression-language": "language/index.html",
|
"builtin-toPath": "language/builtins.html#builtins-toPath",
|
||||||
"#sec-constructs": "language/constructs.html",
|
"builtin-toString": "language/builtins.html#builtins-toString",
|
||||||
"#sect-let-language": "language/constructs.html#let-language",
|
"builtin-toXML": "language/builtins.html#builtins-toXML",
|
||||||
"#ss-functions": "language/constructs.html#functions",
|
"builtin-trace": "language/builtins.html#builtins-trace",
|
||||||
"#sec-language-operators": "language/operators.html",
|
"builtin-tryEval": "language/builtins.html#builtins-tryEval",
|
||||||
"#table-operators": "language/operators.html",
|
"builtin-typeOf": "language/builtins.html#builtins-typeOf",
|
||||||
"#ssec-values": "language/values.html",
|
"ssec-builtins": "language/builtins.html",
|
||||||
"#gloss-closure": "glossary.html#gloss-closure",
|
"attr-system": "language/derivations.html#attr-system",
|
||||||
"#gloss-derivation": "glossary.html#gloss-derivation",
|
"ssec-derivation": "language/derivations.html",
|
||||||
"#gloss-deriver": "glossary.html#gloss-deriver",
|
"ch-expression-language": "language/index.html",
|
||||||
"#gloss-nar": "glossary.html#gloss-nar",
|
"sec-constructs": "language/constructs.html",
|
||||||
"#gloss-output-path": "glossary.html#gloss-output-path",
|
"sect-let-language": "language/constructs.html#let-language",
|
||||||
"#gloss-profile": "glossary.html#gloss-profile",
|
"ss-functions": "language/constructs.html#functions",
|
||||||
"#gloss-reachable": "glossary.html#gloss-reachable",
|
"sec-language-operators": "language/operators.html",
|
||||||
"#gloss-reference": "glossary.html#gloss-reference",
|
"table-operators": "language/operators.html",
|
||||||
"#gloss-substitute": "glossary.html#gloss-substitute",
|
"ssec-values": "language/values.html",
|
||||||
"#gloss-user-env": "glossary.html#gloss-user-env",
|
"gloss-closure": "glossary.html#gloss-closure",
|
||||||
"#gloss-validity": "glossary.html#gloss-validity",
|
"gloss-derivation": "glossary.html#gloss-derivation",
|
||||||
"#part-glossary": "glossary.html",
|
"gloss-deriver": "glossary.html#gloss-deriver",
|
||||||
"#sec-building-source": "installation/building-source.html",
|
"gloss-nar": "glossary.html#gloss-nar",
|
||||||
"#ch-env-variables": "installation/env-variables.html",
|
"gloss-output-path": "glossary.html#gloss-output-path",
|
||||||
"#sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
|
"gloss-profile": "glossary.html#gloss-profile",
|
||||||
"#sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
|
"gloss-reachable": "glossary.html#gloss-reachable",
|
||||||
"#sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
"gloss-reference": "glossary.html#gloss-reference",
|
||||||
"#chap-installation": "installation/installation.html",
|
"gloss-substitute": "glossary.html#gloss-substitute",
|
||||||
"#ch-installing-binary": "installation/installing-binary.html",
|
"gloss-user-env": "glossary.html#gloss-user-env",
|
||||||
"#sect-macos-installation": "installation/installing-binary.html#macos-installation",
|
"gloss-validity": "glossary.html#gloss-validity",
|
||||||
"#sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
|
"part-glossary": "glossary.html",
|
||||||
"#sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
|
"sec-building-source": "installation/building-source.html",
|
||||||
"#sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
|
"ch-env-variables": "installation/env-variables.html",
|
||||||
"#sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
|
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
|
||||||
"#sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
|
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
|
||||||
"#sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
|
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
||||||
"#sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
|
"chap-installation": "installation/installation.html",
|
||||||
"#sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
|
"ch-installing-binary": "installation/installing-binary.html",
|
||||||
"#ch-installing-source": "installation/installing-source.html",
|
"sect-macos-installation": "installation/installing-binary.html#macos-installation",
|
||||||
"#ssec-multi-user": "installation/multi-user.html",
|
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
|
||||||
"#ch-nix-security": "installation/nix-security.html",
|
"sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
|
||||||
"#sec-obtaining-source": "installation/obtaining-source.html",
|
"sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
|
||||||
"#sec-prerequisites-source": "installation/prerequisites-source.html",
|
"sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
|
||||||
"#sec-single-user": "installation/single-user.html",
|
"sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
|
||||||
"#ch-supported-platforms": "installation/supported-platforms.html",
|
"sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
|
||||||
"#ch-upgrading-nix": "installation/upgrading.html",
|
"sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
|
||||||
"#ch-about-nix": "introduction.html",
|
"sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
|
||||||
"#chap-introduction": "introduction.html",
|
"ch-installing-source": "installation/installing-source.html",
|
||||||
"#ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
|
"ssec-multi-user": "installation/multi-user.html",
|
||||||
"#ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
|
"ch-nix-security": "installation/nix-security.html",
|
||||||
"#sec-channels": "package-management/channels.html",
|
"sec-obtaining-source": "installation/obtaining-source.html",
|
||||||
"#ssec-copy-closure": "package-management/copy-closure.html",
|
"sec-prerequisites-source": "installation/prerequisites-source.html",
|
||||||
"#sec-garbage-collection": "package-management/garbage-collection.html",
|
"sec-single-user": "installation/single-user.html",
|
||||||
"#ssec-gc-roots": "package-management/garbage-collector-roots.html",
|
"ch-supported-platforms": "installation/supported-platforms.html",
|
||||||
"#chap-package-management": "package-management/package-management.html",
|
"ch-upgrading-nix": "installation/upgrading.html",
|
||||||
"#sec-profiles": "package-management/profiles.html",
|
"ch-about-nix": "introduction.html",
|
||||||
"#ssec-s3-substituter": "package-management/s3-substituter.html",
|
"chap-introduction": "introduction.html",
|
||||||
"#ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
"ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
|
||||||
"#ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
|
"ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
|
||||||
"#ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
|
"sec-channels": "package-management/channels.html",
|
||||||
"#sec-sharing-packages": "package-management/sharing-packages.html",
|
"ssec-copy-closure": "package-management/copy-closure.html",
|
||||||
"#ssec-ssh-substituter": "package-management/ssh-substituter.html",
|
"sec-garbage-collection": "package-management/garbage-collection.html",
|
||||||
"#chap-quick-start": "quick-start.html",
|
"ssec-gc-roots": "package-management/garbage-collector-roots.html",
|
||||||
"#sec-relnotes": "release-notes/release-notes.html",
|
"chap-package-management": "package-management/package-management.html",
|
||||||
"#ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
|
"sec-profiles": "package-management/profiles.html",
|
||||||
"#ch-relnotes-0.10": "release-notes/rl-0.10.html",
|
"ssec-s3-substituter": "package-management/s3-substituter.html",
|
||||||
"#ssec-relnotes-0.11": "release-notes/rl-0.11.html",
|
"ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
||||||
"#ssec-relnotes-0.12": "release-notes/rl-0.12.html",
|
"ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
|
||||||
"#ssec-relnotes-0.13": "release-notes/rl-0.13.html",
|
"ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
|
||||||
"#ssec-relnotes-0.14": "release-notes/rl-0.14.html",
|
"sec-sharing-packages": "package-management/sharing-packages.html",
|
||||||
"#ssec-relnotes-0.15": "release-notes/rl-0.15.html",
|
"ssec-ssh-substituter": "package-management/ssh-substituter.html",
|
||||||
"#ssec-relnotes-0.16": "release-notes/rl-0.16.html",
|
"chap-quick-start": "quick-start.html",
|
||||||
"#ch-relnotes-0.5": "release-notes/rl-0.5.html",
|
"sec-relnotes": "release-notes/release-notes.html",
|
||||||
"#ch-relnotes-0.6": "release-notes/rl-0.6.html",
|
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
|
||||||
"#ch-relnotes-0.7": "release-notes/rl-0.7.html",
|
"ch-relnotes-0.10": "release-notes/rl-0.10.html",
|
||||||
"#ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
|
"ssec-relnotes-0.11": "release-notes/rl-0.11.html",
|
||||||
"#ch-relnotes-0.8": "release-notes/rl-0.8.html",
|
"ssec-relnotes-0.12": "release-notes/rl-0.12.html",
|
||||||
"#ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
|
"ssec-relnotes-0.13": "release-notes/rl-0.13.html",
|
||||||
"#ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
|
"ssec-relnotes-0.14": "release-notes/rl-0.14.html",
|
||||||
"#ch-relnotes-0.9": "release-notes/rl-0.9.html",
|
"ssec-relnotes-0.15": "release-notes/rl-0.15.html",
|
||||||
"#ssec-relnotes-1.0": "release-notes/rl-1.0.html",
|
"ssec-relnotes-0.16": "release-notes/rl-0.16.html",
|
||||||
"#ssec-relnotes-1.1": "release-notes/rl-1.1.html",
|
"ch-relnotes-0.5": "release-notes/rl-0.5.html",
|
||||||
"#ssec-relnotes-1.10": "release-notes/rl-1.10.html",
|
"ch-relnotes-0.6": "release-notes/rl-0.6.html",
|
||||||
"#ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
|
"ch-relnotes-0.7": "release-notes/rl-0.7.html",
|
||||||
"#ssec-relnotes-1.11": "release-notes/rl-1.11.html",
|
"ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
|
||||||
"#ssec-relnotes-1.2": "release-notes/rl-1.2.html",
|
"ch-relnotes-0.8": "release-notes/rl-0.8.html",
|
||||||
"#ssec-relnotes-1.3": "release-notes/rl-1.3.html",
|
"ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
|
||||||
"#ssec-relnotes-1.4": "release-notes/rl-1.4.html",
|
"ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
|
||||||
"#ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
|
"ch-relnotes-0.9": "release-notes/rl-0.9.html",
|
||||||
"#ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
|
"ssec-relnotes-1.0": "release-notes/rl-1.0.html",
|
||||||
"#ssec-relnotes-1.5": "release-notes/rl-1.5.html",
|
"ssec-relnotes-1.1": "release-notes/rl-1.1.html",
|
||||||
"#ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
|
"ssec-relnotes-1.10": "release-notes/rl-1.10.html",
|
||||||
"#ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
|
"ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
|
||||||
"#ssec-relnotes-1.7": "release-notes/rl-1.7.html",
|
"ssec-relnotes-1.11": "release-notes/rl-1.11.html",
|
||||||
"#ssec-relnotes-1.8": "release-notes/rl-1.8.html",
|
"ssec-relnotes-1.2": "release-notes/rl-1.2.html",
|
||||||
"#ssec-relnotes-1.9": "release-notes/rl-1.9.html",
|
"ssec-relnotes-1.3": "release-notes/rl-1.3.html",
|
||||||
"#ssec-relnotes-2.0": "release-notes/rl-2.0.html",
|
"ssec-relnotes-1.4": "release-notes/rl-1.4.html",
|
||||||
"#ssec-relnotes-2.1": "release-notes/rl-2.1.html",
|
"ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
|
||||||
"#ssec-relnotes-2.2": "release-notes/rl-2.2.html",
|
"ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
|
||||||
"#ssec-relnotes-2.3": "release-notes/rl-2.3.html"
|
"ssec-relnotes-1.5": "release-notes/rl-1.5.html",
|
||||||
|
"ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
|
||||||
|
"ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
|
||||||
|
"ssec-relnotes-1.7": "release-notes/rl-1.7.html",
|
||||||
|
"ssec-relnotes-1.8": "release-notes/rl-1.8.html",
|
||||||
|
"ssec-relnotes-1.9": "release-notes/rl-1.9.html",
|
||||||
|
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
|
||||||
|
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
|
||||||
|
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
|
||||||
|
"ssec-relnotes-2.3": "release-notes/rl-2.3.html"
|
||||||
|
},
|
||||||
|
"language/values.html": {
|
||||||
|
"simple-values": "#primitives",
|
||||||
|
"lists": "#list",
|
||||||
|
"strings": "#string",
|
||||||
|
"lists": "#list",
|
||||||
|
"attribute-sets": "#attribute-set"
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var isRoot = (document.location.pathname.endsWith('/') || document.location.pathname.endsWith('/index.html')) && path_to_root === '';
|
// the following code matches the current page's URL against the set of redirects.
|
||||||
if (isRoot && redirects[document.location.hash]) {
|
//
|
||||||
document.location.href = path_to_root + redirects[document.location.hash];
|
// it is written to minimize the latency between page load and redirect.
|
||||||
|
// therefore we avoid function calls, copying data, and unnecessary loops.
|
||||||
|
// IMPORTANT: we use stateful array operations and their order matters!
|
||||||
|
//
|
||||||
|
// matching URLs is more involved than it should be:
|
||||||
|
//
|
||||||
|
// 1. `document.location.pathname` can have an arbitrary prefix.
|
||||||
|
//
|
||||||
|
// 2. `path_to_root` is set by mdBook. it consists only of `../`s and
|
||||||
|
// determines the depth of `<path>` relative to the prefix:
|
||||||
|
//
|
||||||
|
// `document.location.pathname`
|
||||||
|
// |------------------------------|
|
||||||
|
// /<prefix>/<path>/[<file>[.html]][#<anchor>]
|
||||||
|
// |----|
|
||||||
|
// `path_to_root` has same number of path segments
|
||||||
|
//
|
||||||
|
// source: https://phaiax.github.io/mdBook/format/theme/index-hbs.html#data
|
||||||
|
//
|
||||||
|
// 3. the following paths are equivalent:
|
||||||
|
//
|
||||||
|
// /foo/bar/
|
||||||
|
// /foo/bar/index.html
|
||||||
|
// /foo/bar/index
|
||||||
|
//
|
||||||
|
// 4. the following paths are also equivalent:
|
||||||
|
//
|
||||||
|
// /foo/bar/baz
|
||||||
|
// /foo/bar/baz.html
|
||||||
|
//
|
||||||
|
|
||||||
|
let segments = document.location.pathname.split('/');
|
||||||
|
|
||||||
|
let file = segments.pop();
|
||||||
|
|
||||||
|
// normalize file name
|
||||||
|
if (file === '') { file = "index.html"; }
|
||||||
|
else if (!file.endsWith('.html')) { file = file + '.html'; }
|
||||||
|
|
||||||
|
segments.push(file);
|
||||||
|
|
||||||
|
// use `path_to_root` to discern prefix from path.
|
||||||
|
const depth = path_to_root.split('/').length;
|
||||||
|
|
||||||
|
// remove segments containing prefix. the following works because
|
||||||
|
// 1. the original `document.location.pathname` is absolute,
|
||||||
|
// hence first element of `segments` is always empty.
|
||||||
|
// 2. last element of splitting `path_to_root` is also always empty.
|
||||||
|
// 3. last element of `segments` is the file name.
|
||||||
|
//
|
||||||
|
// visual example:
|
||||||
|
//
|
||||||
|
// '/foo/bar/baz.html'.split('/') -> [ '', 'foo', 'bar', 'baz.html' ]
|
||||||
|
// '../'.split('/') -> [ '..', '' ]
|
||||||
|
//
|
||||||
|
// the following operations will then result in
|
||||||
|
//
|
||||||
|
// path = 'bar/baz.html'
|
||||||
|
//
|
||||||
|
segments.splice(0, segments.length - depth);
|
||||||
|
const path = segments.join('/');
|
||||||
|
|
||||||
|
// anchor starts with the hash character (`#`),
|
||||||
|
// but our redirect declarations don't, so we strip it.
|
||||||
|
// example:
|
||||||
|
// document.location.hash -> '#foo'
|
||||||
|
// document.location.hash.substring(1) -> 'foo'
|
||||||
|
const anchor = document.location.hash.substring(1);
|
||||||
|
|
||||||
|
const redirect = redirects[path];
|
||||||
|
if (redirect) {
|
||||||
|
const target = redirect[anchor];
|
||||||
|
if (target) {
|
||||||
|
document.location.href = target;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
- [Nix Language](language/index.md)
|
- [Nix Language](language/index.md)
|
||||||
- [Data Types](language/values.md)
|
- [Data Types](language/values.md)
|
||||||
- [Language Constructs](language/constructs.md)
|
- [Language Constructs](language/constructs.md)
|
||||||
|
- [String interpolation](language/string-interpolation.md)
|
||||||
- [Operators](language/operators.md)
|
- [Operators](language/operators.md)
|
||||||
- [Derivations](language/derivations.md)
|
- [Derivations](language/derivations.md)
|
||||||
- [Advanced Attributes](language/advanced-attributes.md)
|
- [Advanced Attributes](language/advanced-attributes.md)
|
||||||
|
@ -59,20 +60,15 @@
|
||||||
@manpages@
|
@manpages@
|
||||||
- [Files](command-ref/files.md)
|
- [Files](command-ref/files.md)
|
||||||
- [nix.conf](command-ref/conf-file.md)
|
- [nix.conf](command-ref/conf-file.md)
|
||||||
<!--
|
|
||||||
- [Architecture](architecture/architecture.md)
|
- [Architecture](architecture/architecture.md)
|
||||||
- [Store](architecture/store/store.md)
|
|
||||||
- [Closure](architecture/store/store/closure.md)
|
|
||||||
- [Build system terminology](architecture/store/store/build-system-terminology.md)
|
|
||||||
- [Store Path](architecture/store/path.md)
|
|
||||||
- [File System Object](architecture/store/fso.md)
|
|
||||||
-->
|
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md)
|
||||||
|
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
|
||||||
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
|
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
|
||||||
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
||||||
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
|
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)
|
||||||
|
|
|
@ -121,37 +121,3 @@ error:
|
||||||
are not valid, so checking is not possible
|
are not valid, so checking is not possible
|
||||||
|
|
||||||
Run the build without `--check`, and then try with `--check` again.
|
Run the build without `--check`, and then try with `--check` again.
|
||||||
|
|
||||||
# Automatic and Optionally Enforced Determinism Verification
|
|
||||||
|
|
||||||
Automatically verify every build at build time by executing the build
|
|
||||||
multiple times.
|
|
||||||
|
|
||||||
Setting `repeat` and `enforce-determinism` in your `nix.conf` permits
|
|
||||||
the automated verification of every build Nix performs.
|
|
||||||
|
|
||||||
The following configuration will run each build three times, and will
|
|
||||||
require the build to be deterministic:
|
|
||||||
|
|
||||||
enforce-determinism = true
|
|
||||||
repeat = 2
|
|
||||||
|
|
||||||
Setting `enforce-determinism` to false as in the following
|
|
||||||
configuration will run the build multiple times, execute the build
|
|
||||||
hook, but will allow the build to succeed even if it does not build
|
|
||||||
reproducibly:
|
|
||||||
|
|
||||||
enforce-determinism = false
|
|
||||||
repeat = 1
|
|
||||||
|
|
||||||
An example output of this configuration:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-build ./test.nix -A unstable
|
|
||||||
this derivation will be built:
|
|
||||||
/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
|
|
||||||
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
|
|
||||||
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...
|
|
||||||
output '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable' of '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' differs from '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable.check' from previous round
|
|
||||||
/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable
|
|
||||||
```
|
|
||||||
|
|
|
@ -33,12 +33,17 @@ distribute the public key for verifying the authenticity of the paths.
|
||||||
example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
|
example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, add the public key and the cache URL to your `nix.conf`'s
|
Then update [`nix.conf`](../command-ref/conf-file.md) on any machine that will access the cache.
|
||||||
`trusted-public-keys` and `substituters` options:
|
Add the cache URL to [`substituters`](../command-ref/conf-file.md#conf-substituters) and the public key to [`trusted-public-keys`](../command-ref/conf-file.md#conf-trusted-public-keys):
|
||||||
|
|
||||||
substituters = https://cache.nixos.org/ s3://example-nix-cache
|
substituters = https://cache.nixos.org/ s3://example-nix-cache
|
||||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
|
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
|
||||||
|
|
||||||
|
Machines that build for the cache must sign derivations using the private key.
|
||||||
|
On those machines, add the path to the key file to the [`secret-key-files`](../command-ref/conf-file.md#conf-secret-key-files) field in their [`nix.conf`](../command-ref/conf-file.md):
|
||||||
|
|
||||||
|
secret-key-files = /etc/nix/key.private
|
||||||
|
|
||||||
We will restart the Nix daemon in a later step.
|
We will restart the Nix daemon in a later step.
|
||||||
|
|
||||||
# Implementing the build hook
|
# Implementing the build hook
|
||||||
|
@ -52,14 +57,12 @@ set -eu
|
||||||
set -f # disable globbing
|
set -f # disable globbing
|
||||||
export IFS=' '
|
export IFS=' '
|
||||||
|
|
||||||
echo "Signing paths" $OUT_PATHS
|
|
||||||
nix store sign --key-file /etc/nix/key.private $OUT_PATHS
|
|
||||||
echo "Uploading paths" $OUT_PATHS
|
echo "Uploading paths" $OUT_PATHS
|
||||||
exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
|
exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> The `$OUT_PATHS` variable is a space-separated list of Nix store
|
> The `$OUT_PATHS` variable is a space-separated list of Nix store
|
||||||
> paths. In this case, we expect and want the shell to perform word
|
> paths. In this case, we expect and want the shell to perform word
|
||||||
> splitting to make each output path its own argument to `nix
|
> splitting to make each output path its own argument to `nix
|
||||||
|
|
|
@ -1,79 +1,115 @@
|
||||||
# Architecture
|
# Architecture
|
||||||
|
|
||||||
*(This chapter is unstable and a work in progress. Incoming links may rot.)*
|
|
||||||
|
|
||||||
This chapter describes how Nix works.
|
This chapter describes how Nix works.
|
||||||
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
|
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Nix consists of [hierarchical layers][layer-architecture].
|
Nix consists of [hierarchical layers].
|
||||||
|
|
||||||
|
[hierarchical layers]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
|
||||||
|
|
||||||
|
The following [concept map] shows its main components (rectangles), the objects they operate on (rounded rectangles), and their interactions (connecting phrases):
|
||||||
|
|
||||||
|
[concept map]: https://en.m.wikipedia.org/wiki/Concept_map
|
||||||
|
|
||||||
```
|
```
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
.----------------.
|
||||||
| [ commmand line interface ]------, |
|
| Nix expression |----------.
|
||||||
| | | |
|
'----------------' |
|
||||||
| evaluates | |
|
| passed to
|
||||||
| | manages |
|
| |
|
||||||
| V | |
|
+----------|-------------------|--------------------------------+
|
||||||
| [ configuration language ] | |
|
| Nix | V |
|
||||||
| | | |
|
| | +-------------------------+ |
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
| | | commmand line interface |------. |
|
||||||
| | store evaluates to | |
|
| | +-------------------------+ | |
|
||||||
| | | | |
|
| | | | |
|
||||||
| | referenced by V builds | |
|
| evaluated by calls manages |
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
| | | | |
|
||||||
| | | |
|
| | V | |
|
||||||
| +-------------------------------------------------------------+ |
|
| | +--------------------+ | |
|
||||||
+-----------------------------------------------------------------+
|
| '-------->| language evaluator | | |
|
||||||
|
| +--------------------+ | |
|
||||||
|
| | | |
|
||||||
|
| produces | |
|
||||||
|
| | V |
|
||||||
|
| +----------------------------|------------------------------+ |
|
||||||
|
| | store | | |
|
||||||
|
| | referenced by V builds | |
|
||||||
|
| | .-------------. .------------. .--------------. | |
|
||||||
|
| | | build input |----->| build plan |----->| build result | | |
|
||||||
|
| | '-------------' '------------' '--------------' | |
|
||||||
|
| +-------------------------------------------------|---------+ |
|
||||||
|
+---------------------------------------------------|-----------+
|
||||||
|
|
|
||||||
|
represented as
|
||||||
|
|
|
||||||
|
V
|
||||||
|
.---------------.
|
||||||
|
| file |
|
||||||
|
'---------------'
|
||||||
```
|
```
|
||||||
|
|
||||||
At the top is the [command line interface](../command-ref/command-ref.md), translating from invocations of Nix executables to interactions with the underlying layers.
|
At the top is the [command line interface](../command-ref/command-ref.md) that drives the underlying layers.
|
||||||
|
|
||||||
Below that is the [Nix expression language](../expressions/expression-language.md), a [purely functional][purely-functional-programming] configuration language.
|
The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*.
|
||||||
It is used to compose expressions which ultimately evaluate to self-contained *build plans*, used to derive *build results* from referenced *build inputs*.
|
|
||||||
|
|
||||||
The command line and Nix language are what users interact with most.
|
The command line interface and Nix expressions are what users deal with most.
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
||||||
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
||||||
|
|
||||||
Underlying these is the [Nix store](./store/store.md), a mechanism to keep track of build plans, data, and references between them.
|
Underlying the command line interface and the Nix language evaluator is the [Nix store](../glossary.md#gloss-store), a mechanism to keep track of build plans, data, and references between them.
|
||||||
It can also execute build plans to produce new data.
|
It can also execute build plans to produce new data, which are made available to the operating system as files.
|
||||||
|
|
||||||
A build plan is a series of *build tasks*.
|
A build plan itself is a series of *build tasks*, together with their build inputs.
|
||||||
Each build task has a special build input which is used as *build instructions*.
|
|
||||||
|
> **Important**
|
||||||
|
> A build task in Nix is called [derivation](../glossary.md#gloss-derivation).
|
||||||
|
|
||||||
|
Each build task has a special build input executed as *build instructions* in order to perform the build.
|
||||||
The result of a build task can be input to another build task.
|
The result of a build task can be input to another build task.
|
||||||
|
|
||||||
|
The following [data flow diagram] shows a build plan for illustration.
|
||||||
|
Build inputs used as instructions to a build task are marked accordingly:
|
||||||
|
|
||||||
|
[data flow diagram]: https://en.m.wikipedia.org/wiki/Data-flow_diagram
|
||||||
|
|
||||||
```
|
```
|
||||||
+-----------------------------------------------------------------------------------------+
|
+--------------------------------------------------------------------+
|
||||||
| store |
|
| build plan |
|
||||||
| ................................................. |
|
| |
|
||||||
| : build plan : |
|
| .-------------. |
|
||||||
| : : |
|
| | build input |---------. |
|
||||||
| [ build input ]-----instructions-, : |
|
| '-------------' | |
|
||||||
| : | : |
|
| instructions |
|
||||||
| : v : |
|
| | |
|
||||||
| [ build input ]----------->[ build task ]--instructions-, : |
|
| v |
|
||||||
| : | : |
|
| .-------------. .----------. |
|
||||||
| : | : |
|
| | build input |-->( build task )-------. |
|
||||||
| : v : |
|
| '-------------' '----------' | |
|
||||||
| : [ build task ]----->[ build result ] |
|
| instructions |
|
||||||
| [ build input ]-----instructions-, ^ : |
|
| | |
|
||||||
| : | | : |
|
| v |
|
||||||
| : v | : |
|
| .-------------. .----------. .--------------. |
|
||||||
| [ build input ]----------->[ build task ]---------------' : |
|
| | build input |---------. ( build task )--->| build result | |
|
||||||
| : ^ : |
|
| '-------------' | '----------' '--------------' |
|
||||||
| : | : |
|
| instructions ^ |
|
||||||
| [ build input ]------------------' : |
|
| | | |
|
||||||
| : : |
|
| v | |
|
||||||
| : : |
|
| .-------------. .----------. | |
|
||||||
| :...............................................: |
|
| | build input |-->( build task )-------' |
|
||||||
| |
|
| '-------------' '----------' |
|
||||||
+-----------------------------------------------------------------------------------------+
|
| ^ |
|
||||||
|
| | |
|
||||||
|
| | |
|
||||||
|
| .-------------. | |
|
||||||
|
| | build input |---------' |
|
||||||
|
| '-------------' |
|
||||||
|
| |
|
||||||
|
+--------------------------------------------------------------------+
|
||||||
```
|
```
|
||||||
|
|
||||||
[layer-architecture]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
|
|
||||||
[purely-functional-programming]: https://en.m.wikipedia.org/wiki/Purely_functional_programming
|
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
# File System Object
|
|
||||||
|
|
||||||
The Nix store uses a simple file system model for the data it holds in [store objects](store.md#store-object).
|
|
||||||
|
|
||||||
Every file system object is one of the following:
|
|
||||||
|
|
||||||
- File: an executable flag, and arbitrary data for contents
|
|
||||||
- Directory: mapping of names to child file system objects
|
|
||||||
- [Symbolic link][symlink]: may point anywhere.
|
|
||||||
|
|
||||||
We call a store object's outermost file system object the *root*.
|
|
||||||
|
|
||||||
data FileSystemObject
|
|
||||||
= File { isExecutable :: Bool, contents :: Bytes }
|
|
||||||
| Directory { entries :: Map FileName FileSystemObject }
|
|
||||||
| SymLink { target :: Path }
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- a directory with contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10
|
|
||||||
├── bin
|
|
||||||
│ └── hello
|
|
||||||
└── share
|
|
||||||
├── info
|
|
||||||
│ └── hello.info
|
|
||||||
└── man
|
|
||||||
└── man1
|
|
||||||
└── hello.1.gz
|
|
||||||
|
|
||||||
- a directory with relative symlink and other contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-go-1.16.9
|
|
||||||
├── bin -> share/go/bin
|
|
||||||
├── nix-support/
|
|
||||||
└── share/
|
|
||||||
|
|
||||||
- a directory with absolute symlink
|
|
||||||
|
|
||||||
/nix/store/d3k...-nodejs
|
|
||||||
└── nix_node -> /nix/store/f20...-nodejs-10.24.
|
|
||||||
|
|
||||||
A bare file or symlink can be a root file system object.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10.tar.gz
|
|
||||||
|
|
||||||
/nix/store/4j5...-pkg-config-wrapper-0.29.2-doc -> /nix/store/i99...-pkg-config-0.29.2-doc
|
|
||||||
|
|
||||||
Symlinks pointing outside of their own root or to a store object without a matching reference are allowed, but might not function as intended.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- an arbitrarily symlinked file may change or not exist at all
|
|
||||||
|
|
||||||
/nix/store/<hash>-foo
|
|
||||||
└── foo -> /home/foo
|
|
||||||
|
|
||||||
- if a symlink to a store path was not automatically created by Nix, it may be invalid or get invalidated when the store object is deleted
|
|
||||||
|
|
||||||
/nix/store/<hash>-bar
|
|
||||||
└── bar -> /nix/store/abc...-foo
|
|
||||||
|
|
||||||
Nix file system objects do not support [hard links][hardlink]:
|
|
||||||
each file system object which is not the root has exactly one parent and one name.
|
|
||||||
However, as store objects are immutable, an underlying file system can use hard links for optimization.
|
|
||||||
|
|
||||||
[symlink]: https://en.m.wikipedia.org/wiki/Symbolic_link
|
|
||||||
[hardlink]: https://en.m.wikipedia.org/wiki/Hard_link
|
|
|
@ -1,105 +0,0 @@
|
||||||
# Store Path
|
|
||||||
|
|
||||||
Nix implements [references](store.md#reference) to [store objects](store.md#store-object) as *store paths*.
|
|
||||||
|
|
||||||
Store paths are pairs of
|
|
||||||
|
|
||||||
- a 20-byte [digest](#digest) for identification
|
|
||||||
- a symbolic name for people to read.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
- digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
|
|
||||||
- name: `firefox-33.1`
|
|
||||||
|
|
||||||
It is rendered to a file system path as the concatenation of
|
|
||||||
|
|
||||||
- [store directory](#store-directory)
|
|
||||||
- path-separator (`/`)
|
|
||||||
- [digest](#digest) rendered in a custom variant of [base-32](https://en.m.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
|
||||||
- hyphen (`-`)
|
|
||||||
- name
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
/nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
|
|
||||||
|--------| |------------------------------| |----------|
|
|
||||||
store directory digest name
|
|
||||||
|
|
||||||
## Store Directory
|
|
||||||
|
|
||||||
Every [store](./store.md) has a store directory.
|
|
||||||
|
|
||||||
If the store has a [file system representation](./store.md#files-and-processes), this directory contains the store’s [file system objects](#file-system-object), which can be addressed by [store paths](#store-path).
|
|
||||||
|
|
||||||
This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> The store directory defaults to `/nix/store`, but is in principle arbitrary.
|
|
||||||
|
|
||||||
It is important which store a given store object belongs to:
|
|
||||||
Files in the store object can contain store paths, and processes may read these paths.
|
|
||||||
Nix can only guarantee [referential integrity](store/closure.md) if store paths do not cross store boundaries.
|
|
||||||
|
|
||||||
Therefore one can only copy store objects to a different store if
|
|
||||||
|
|
||||||
- the source and target stores' directories match
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
- the store object in question has no references, that is, contains no store paths.
|
|
||||||
|
|
||||||
One cannot copy a store object to a store with a different store directory.
|
|
||||||
Instead, it has to be rebuilt, together with all its dependencies.
|
|
||||||
It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
|
|
||||||
|
|
||||||
# Digest
|
|
||||||
|
|
||||||
In a [store path](#store-path), the [digest][digest] is the output of a [cryptographic hash function][hash] of either all *inputs* involved in building the referenced store object or its actual *contents*.
|
|
||||||
|
|
||||||
Store objects are therefore said to be either [input-addressed](#input-addressing) or [content-addressed](#content-addressing).
|
|
||||||
|
|
||||||
> **Historical Note**
|
|
||||||
> The 20 byte restriction is because originally digests were [SHA-1][sha-1] hashes.
|
|
||||||
> Nix now uses [SHA-256][sha-256], and longer hashes are still reduced to 20 bytes for compatibility.
|
|
||||||
|
|
||||||
[digest]: https://en.m.wiktionary.org/wiki/digest#Noun
|
|
||||||
[hash]: https://en.m.wikipedia.org/wiki/Cryptographic_hash_function
|
|
||||||
[sha-1]: https://en.m.wikipedia.org/wiki/SHA-1
|
|
||||||
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
|
|
||||||
|
|
||||||
### Reference scanning
|
|
||||||
|
|
||||||
When a new store object is built, Nix scans its file contents for store paths to construct its set of references.
|
|
||||||
|
|
||||||
The special format of a store path's [digest](#digest) allows reliably detecting it among arbitrary data.
|
|
||||||
Nix uses the [closure](store.md#closure) of build inputs to derive the list of allowed store paths, to avoid false positives.
|
|
||||||
|
|
||||||
This way, scanning files captures run time dependencies without the user having to declare them explicitly.
|
|
||||||
Doing it at build time and persisting references in the store object avoids repeating this time-consuming operation.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> In practice, it is sometimes still necessary for users to declare certain dependencies explicitly, if they are to be preserved in the build result's closure.
|
|
||||||
This depends on the specifics of the software to build and run.
|
|
||||||
>
|
|
||||||
> For example, Java programs are compressed after compilation, which obfuscates any store paths they may refer to and prevents Nix from automatically detecting them.
|
|
||||||
|
|
||||||
## Input Addressing
|
|
||||||
|
|
||||||
Input addressing means that the digest derives from how the store object was produced, namely its build inputs and build plan.
|
|
||||||
|
|
||||||
To compute the hash of a store object one needs a deterministic serialisation, i.e., a binary string representation which only changes if the store object changes.
|
|
||||||
|
|
||||||
Nix has a custom serialisation format called Nix Archive (NAR)
|
|
||||||
|
|
||||||
Store object references of this sort can *not* be validated from the content of the store object.
|
|
||||||
Rather, a cryptographic signature has to be used to indicate that someone is vouching for the store object really being produced from a build plan with that digest.
|
|
||||||
|
|
||||||
## Content Addressing
|
|
||||||
|
|
||||||
Content addressing means that the digest derives from the store object's contents, namely its file system objects and references.
|
|
||||||
If one knows content addressing was used, one can recalculate the reference and thus verify the store object.
|
|
||||||
|
|
||||||
Content addressing is currently only used for the special cases of source files and "fixed-output derivations", where the contents of a store object are known in advance.
|
|
||||||
Content addressing of build results is still an [experimental feature subject to some restrictions](https://github.com/tweag/rfcs/blob/cas-rfc/rfcs/0062-content-addressed-paths.md).
|
|
||||||
|
|
|
@ -1,151 +0,0 @@
|
||||||
# Store
|
|
||||||
|
|
||||||
A Nix store is a collection of *store objects* with references between them.
|
|
||||||
It supports operations to manipulate that collection.
|
|
||||||
|
|
||||||
The following concept map is a graphical outline of this chapter.
|
|
||||||
Arrows indicate suggested reading order.
|
|
||||||
|
|
||||||
```
|
|
||||||
,--------------[ store ]----------------,
|
|
||||||
| | |
|
|
||||||
v v v
|
|
||||||
[ store object ] [ closure ]--, [ operations ]
|
|
||||||
| | | | | |
|
|
||||||
v | | v v |
|
|
||||||
[ files and processes ] | | [ garbage collection ] |
|
|
||||||
/ \ | | |
|
|
||||||
v v | v v
|
|
||||||
[ file system object ] [ store path ] | [ derivation ]--->[ building ]
|
|
||||||
| ^ | | |
|
|
||||||
v | v v |
|
|
||||||
[ digest ]----' [ reference scanning ]<------------'
|
|
||||||
/ \
|
|
||||||
v v
|
|
||||||
[ input addressing ] [ content addressing ]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Store Object
|
|
||||||
|
|
||||||
A store object can hold
|
|
||||||
|
|
||||||
- arbitrary *data*
|
|
||||||
- *references* to other store objects.
|
|
||||||
|
|
||||||
Store objects can be build inputs, build results, or build tasks.
|
|
||||||
|
|
||||||
Store objects are [immutable][immutable-object]: once created, they do not change until they are deleted.
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
A store object reference is an [opaque][opaque-data-type], [unique identifier][unique-identifier]:
|
|
||||||
The only way to obtain references is by adding or building store objects.
|
|
||||||
A reference will always point to exactly one store object.
|
|
||||||
|
|
||||||
## Operations
|
|
||||||
|
|
||||||
A Nix store can *add*, *retrieve*, and *delete* store objects.
|
|
||||||
|
|
||||||
[ data ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> add ----> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> get
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store object ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> delete --> [ store' ]
|
|
||||||
|
|
||||||
|
|
||||||
It can *perform builds*, that is, create new store objects by transforming build inputs into build outputs, using instructions from the build tasks.
|
|
||||||
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> build --(maybe)--> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
|
|
||||||
As it keeps track of references, it can [garbage-collect][garbage-collection] unused store objects.
|
|
||||||
|
|
||||||
|
|
||||||
[ store ] --> collect garbage --> [ store' ]
|
|
||||||
|
|
||||||
## Files and Processes
|
|
||||||
|
|
||||||
Nix maps between its store model and the [Unix paradigm][unix-paradigm] of [files and processes][file-descriptor], by encoding immutable store objects and opaque identifiers as file system primitives: files and directories, and paths.
|
|
||||||
That allows processes to resolve references contained in files and thus access the contents of store objects.
|
|
||||||
|
|
||||||
Store objects are therefore implemented as the pair of
|
|
||||||
|
|
||||||
- a [file system object](fso.md) for data
|
|
||||||
- a set of [store paths](path.md) for references.
|
|
||||||
|
|
||||||
[unix-paradigm]: https://en.m.wikipedia.org/wiki/Everything_is_a_file
|
|
||||||
[file-descriptor]: https://en.m.wikipedia.org/wiki/File_descriptor
|
|
||||||
|
|
||||||
The following diagram shows a radical simplification of how Nix interacts with the operating system:
|
|
||||||
It uses files as build inputs, and build outputs are files again.
|
|
||||||
On the operating system, files can be run as processes, which in turn operate on files.
|
|
||||||
A build function also amounts to an operating system process (not depicted).
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
|
||||||
| [ commmand line interface ]------, |
|
|
||||||
| | | |
|
|
||||||
| evaluates | |
|
|
||||||
| | manages |
|
|
||||||
| V | |
|
|
||||||
| [ configuration language ] | |
|
|
||||||
| | | |
|
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
|
||||||
| | store evaluates to | |
|
|
||||||
| | | | |
|
|
||||||
| | referenced by V builds | |
|
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
|
||||||
| | ^ | | |
|
|
||||||
| +---------|----------------------------------------|----------+ |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| |
|
|
||||||
file system object store path
|
|
||||||
| |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| operating system +------------+ | |
|
|
||||||
| '------------ | | <-----------' |
|
|
||||||
| | file | |
|
|
||||||
| ,-- | | <-, |
|
|
||||||
| | +------------+ | |
|
|
||||||
| execute as | | read, write, execute |
|
|
||||||
| | +------------+ | |
|
|
||||||
| '-> | process | --' |
|
|
||||||
| +------------+ |
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
There exist different types of stores, which all follow this model.
|
|
||||||
Examples:
|
|
||||||
- store on the local file system
|
|
||||||
- remote store accessible via SSH
|
|
||||||
- binary cache store accessible via HTTP
|
|
||||||
|
|
||||||
To make store objects accessible to processes, stores ultimately have to expose store objects through the file system.
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
# A [Rosetta stone][rosetta-stone] for build system terminology
|
|
||||||
|
|
||||||
The Nix store's design is comparable to other build systems.
|
|
||||||
Usage of terms is, for historic reasons, not entirely consistent within the Nix ecosystem, and still subject to slow change.
|
|
||||||
|
|
||||||
The following translation table points out similarities and equivalent terms, to help clarify their meaning and inform consistent use in the future.
|
|
||||||
|
|
||||||
| generic build system | Nix | [Bazel][bazel] | [Build Systems à la Carte][bsalc] | programming language |
|
|
||||||
| -------------------------------- | ---------------- | -------------------------------------------------------------------- | --------------------------------- | ------------------------ |
|
|
||||||
| data (build input, build result) | store object | [artifact][bazel-artifact] | value | value |
|
|
||||||
| build instructions | builder | ([depends on action type][bazel-actions]) | function | function |
|
|
||||||
| build task | derivation | [action][bazel-action] | `Task` | [thunk][thunk] |
|
|
||||||
| build plan | derivation graph | [action graph][bazel-action-graph], [build graph][bazel-build-graph] | `Tasks` | [call graph][call-graph] |
|
|
||||||
| build | build | build | application of `Build` | evaluation |
|
|
||||||
| persistence layer | store | [action cache][bazel-action-cache] | `Store` | heap |
|
|
||||||
|
|
||||||
All of these systems share features of [declarative programming][declarative-programming] languages, a key insight first put forward by Eelco Dolstra et al. in [Imposing a Memory Management Discipline on Software Deployment][immdsd] (2004), elaborated in his PhD thesis [The Purely Functional Software Deployment Model][phd-thesis] (2006), and further refined by Andrey Mokhov et al. in [Build Systems à la Carte][bsalc] (2018).
|
|
||||||
|
|
||||||
[rosetta-stone]: https://en.m.wikipedia.org/wiki/Rosetta_Stone
|
|
||||||
[bazel]: https://bazel.build/start/bazel-intro
|
|
||||||
[bazel-artifact]: https://bazel.build/reference/glossary#artifact
|
|
||||||
[bazel-actions]: https://docs.bazel.build/versions/main/skylark/lib/actions.html
|
|
||||||
[bazel-action]: https://bazel.build/reference/glossary#action
|
|
||||||
[bazel-action-graph]: https://bazel.build/reference/glossary#action-graph
|
|
||||||
[bazel-build-graph]: https://bazel.build/reference/glossary#build-graph
|
|
||||||
[bazel-action-cache]: https://bazel.build/reference/glossary#action-cache
|
|
||||||
[thunk]: https://en.m.wikipedia.org/wiki/Thunk
|
|
||||||
[call-graph]: https://en.m.wikipedia.org/wiki/Call_graph
|
|
||||||
[declarative-programming]: https://en.m.wikipedia.org/wiki/Declarative_programming
|
|
||||||
[immdsd]: https://edolstra.github.io/pubs/immdsd-icse2004-final.pdf
|
|
||||||
[phd-thesis]: https://edolstra.github.io/pubs/phd-thesis.pdf
|
|
||||||
[bsalc]: https://www.microsoft.com/en-us/research/uploads/prod/2018/03/build-systems.pdf
|
|
|
@ -1,29 +0,0 @@
|
||||||
# Closure
|
|
||||||
|
|
||||||
Nix stores ensure [referential integrity][referential-integrity]: for each store object in the store, all the store objects it references must also be in the store.
|
|
||||||
|
|
||||||
The set of all store objects reachable by following references from a given initial set of store objects is called a *closure*.
|
|
||||||
|
|
||||||
Adding, building, copying and deleting store objects must be done in a way that preserves referential integrity:
|
|
||||||
|
|
||||||
- A newly added store object cannot have references, unless it is a build task.
|
|
||||||
|
|
||||||
- Build results must only refer to store objects in the closure of the build inputs.
|
|
||||||
|
|
||||||
Building a store object will add appropriate references, according to the build task.
|
|
||||||
|
|
||||||
- Store objects being copied must refer to objects already in the destination store.
|
|
||||||
|
|
||||||
Recursive copying must either proceed in dependency order or be atomic.
|
|
||||||
|
|
||||||
- We can only safely delete store objects which are not reachable from any reference still in use.
|
|
||||||
|
|
||||||
<!-- more details in section on garbage collection, link to it once it exists -->
|
|
||||||
|
|
||||||
[referential-integrity]: https://en.m.wikipedia.org/wiki/Referential_integrity
|
|
||||||
[garbage-collection]: https://en.m.wikipedia.org/wiki/Garbage_collection_(computer_science)
|
|
||||||
[immutable-object]: https://en.m.wikipedia.org/wiki/Immutable_object
|
|
||||||
[opaque-data-type]: https://en.m.wikipedia.org/wiki/Opaque_data_type
|
|
||||||
[unique-identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier
|
|
||||||
|
|
||||||
|
|
|
@ -7,42 +7,11 @@ Most Nix commands interpret the following environment variables:
|
||||||
`nix-shell`. It can have the values `pure` or `impure`.
|
`nix-shell`. It can have the values `pure` or `impure`.
|
||||||
|
|
||||||
- [`NIX_PATH`]{#env-NIX_PATH}\
|
- [`NIX_PATH`]{#env-NIX_PATH}\
|
||||||
A colon-separated list of directories used to look up Nix
|
A colon-separated list of directories used to look up the location of Nix
|
||||||
expressions enclosed in angle brackets (i.e., `<path>`). For
|
expressions using [paths](../language/values.md#type-path)
|
||||||
instance, the value
|
enclosed in angle brackets (i.e., `<path>`),
|
||||||
|
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||||
/home/eelco/Dev:/etc/nixos
|
[`-I` option](./opt-common.md#opt-I).
|
||||||
|
|
||||||
will cause Nix to look for paths relative to `/home/eelco/Dev` and
|
|
||||||
`/etc/nixos`, in this order. It is also possible to match paths
|
|
||||||
against a prefix. For example, the value
|
|
||||||
|
|
||||||
nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
|
|
||||||
|
|
||||||
will cause Nix to search for `<nixpkgs/path>` in
|
|
||||||
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
|
|
||||||
|
|
||||||
If a path in the Nix search path starts with `http://` or
|
|
||||||
`https://`, it is interpreted as the URL of a tarball that will be
|
|
||||||
downloaded and unpacked to a temporary location. The tarball must
|
|
||||||
consist of a single top-level directory. For example, setting
|
|
||||||
`NIX_PATH` to
|
|
||||||
|
|
||||||
nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
|
|
||||||
|
|
||||||
tells Nix to download and use the current contents of the
|
|
||||||
`master` branch in the `nixpkgs` repository.
|
|
||||||
|
|
||||||
The URLs of the tarballs from the official nixos.org channels (see
|
|
||||||
[the manual for `nix-channel`](nix-channel.md)) can be abbreviated
|
|
||||||
as `channel:<channel-name>`. For instance, the following two
|
|
||||||
values of `NIX_PATH` are equivalent:
|
|
||||||
|
|
||||||
nixpkgs=channel:nixos-21.05
|
|
||||||
nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
|
|
||||||
|
|
||||||
The Nix search path can also be extended using the `-I` option to
|
|
||||||
many Nix commands, which takes precedence over `NIX_PATH`.
|
|
||||||
|
|
||||||
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
||||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||||
|
|
|
@ -37,10 +37,12 @@ directory containing at least a file named `default.nix`.
|
||||||
|
|
||||||
`nix-build` is essentially a wrapper around
|
`nix-build` is essentially a wrapper around
|
||||||
[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix
|
[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix
|
||||||
expression to a low-level store derivation) and [`nix-store
|
expression to a low-level [store derivation]) and [`nix-store
|
||||||
--realise`](nix-store.md#operation---realise) (to build the store
|
--realise`](nix-store.md#operation---realise) (to build the store
|
||||||
derivation).
|
derivation).
|
||||||
|
|
||||||
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
>
|
>
|
||||||
> The result of the build is automatically registered as a root of the
|
> The result of the build is automatically registered as a root of the
|
||||||
|
@ -53,16 +55,18 @@ All options not listed here are passed to `nix-store
|
||||||
--realise`, except for `--arg` and `--attr` / `-A` which are passed to
|
--realise`, except for `--arg` and `--attr` / `-A` which are passed to
|
||||||
`nix-instantiate`.
|
`nix-instantiate`.
|
||||||
|
|
||||||
- [`--no-out-link`]{#opt-no-out-link}\
|
- <span id="opt-no-out-link">[`--no-out-link`](#opt-no-out-link)<span>
|
||||||
|
|
||||||
Do not create a symlink to the output path. Note that as a result
|
Do not create a symlink to the output path. Note that as a result
|
||||||
the output does not become a root of the garbage collector, and so
|
the output does not become a root of the garbage collector, and so
|
||||||
might be deleted by `nix-store
|
might be deleted by `nix-store --gc`.
|
||||||
--gc`.
|
|
||||||
|
- <span id="opt-dry-run">[`--dry-run`](#opt-dry-run)</span>
|
||||||
|
|
||||||
- [`--dry-run`]{#opt-dry-run}\
|
|
||||||
Show what store paths would be built or downloaded.
|
Show what store paths would be built or downloaded.
|
||||||
|
|
||||||
- [`--out-link`]{#opt-out-link} / `-o` *outlink*\
|
- <span id="opt-out-link">[`--out-link`](#opt-out-link)</span> / `-o` *outlink*
|
||||||
|
|
||||||
Change the name of the symlink to the output path created from
|
Change the name of the symlink to the output path created from
|
||||||
`result` to *outlink*.
|
`result` to *outlink*.
|
||||||
|
|
||||||
|
|
|
@ -30,8 +30,8 @@ Since `nix-copy-closure` calls `ssh`, you may be asked to type in the
|
||||||
appropriate password or passphrase. In fact, you may be asked _twice_
|
appropriate password or passphrase. In fact, you may be asked _twice_
|
||||||
because `nix-copy-closure` currently connects twice to the remote
|
because `nix-copy-closure` currently connects twice to the remote
|
||||||
machine, first to get the set of paths missing on the target machine,
|
machine, first to get the set of paths missing on the target machine,
|
||||||
and second to send the dump of those paths. If this bothers you, use
|
and second to send the dump of those paths. When using public key
|
||||||
`ssh-agent`.
|
authentication, you can avoid typing the passphrase with `ssh-agent`.
|
||||||
|
|
||||||
# Options
|
# Options
|
||||||
|
|
||||||
|
@ -47,7 +47,9 @@ and second to send the dump of those paths. If this bothers you, use
|
||||||
Enable compression of the SSH connection.
|
Enable compression of the SSH connection.
|
||||||
|
|
||||||
- `--include-outputs`\
|
- `--include-outputs`\
|
||||||
Also copy the outputs of store derivations included in the closure.
|
Also copy the outputs of [store derivation]s included in the closure.
|
||||||
|
|
||||||
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
- `--use-substitutes` / `-s`\
|
- `--use-substitutes` / `-s`\
|
||||||
Attempt to download missing paths on the target machine using Nix’s
|
Attempt to download missing paths on the target machine using Nix’s
|
||||||
|
|
|
@ -8,6 +8,6 @@
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
The Nix daemon is necessary in multi-user Nix installations. It performs
|
The Nix daemon is necessary in multi-user Nix installations. It runs
|
||||||
build actions and other operations on the Nix store on behalf of
|
build tasks and other operations on the Nix store on behalf of
|
||||||
unprivileged users.
|
unprivileged users.
|
||||||
|
|
|
@ -205,10 +205,12 @@ a number of possible ways:
|
||||||
unambiguous way, which is necessary if there are multiple
|
unambiguous way, which is necessary if there are multiple
|
||||||
derivations with the same name.
|
derivations with the same name.
|
||||||
|
|
||||||
- If *args* are store derivations, then these are
|
- If *args* are [store derivation]s, then these are
|
||||||
[realised](nix-store.md#operation---realise), and the resulting output paths
|
[realised](nix-store.md#operation---realise), and the resulting output paths
|
||||||
are installed.
|
are installed.
|
||||||
|
|
||||||
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
- If *args* are store paths that are not store derivations, then these
|
- If *args* are store paths that are not store derivations, then these
|
||||||
are [realised](nix-store.md#operation---realise) and installed.
|
are [realised](nix-store.md#operation---realise) and installed.
|
||||||
|
|
||||||
|
@ -280,7 +282,7 @@ To copy the store path with symbolic name `gcc` from another profile:
|
||||||
$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
|
$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
|
||||||
```
|
```
|
||||||
|
|
||||||
To install a specific store derivation (typically created by
|
To install a specific [store derivation] (typically created by
|
||||||
`nix-instantiate`):
|
`nix-instantiate`):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -665,7 +667,7 @@ derivation is shown unless `--no-name` is specified.
|
||||||
Print the `system` attribute of the derivation.
|
Print the `system` attribute of the derivation.
|
||||||
|
|
||||||
- `--drv-path`\
|
- `--drv-path`\
|
||||||
Print the path of the store derivation.
|
Print the path of the [store derivation].
|
||||||
|
|
||||||
- `--out-path`\
|
- `--out-path`\
|
||||||
Print the output path of the derivation.
|
Print the output path of the derivation.
|
||||||
|
|
|
@ -17,13 +17,14 @@
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
The command `nix-instantiate` generates [store
|
The command `nix-instantiate` produces [store derivation]s from (high-level) Nix expressions.
|
||||||
derivations](../glossary.md) from (high-level) Nix expressions. It
|
It evaluates the Nix expressions in each of *files* (which defaults to
|
||||||
evaluates the Nix expressions in each of *files* (which defaults to
|
|
||||||
*./default.nix*). Each top-level expression should evaluate to a
|
*./default.nix*). Each top-level expression should evaluate to a
|
||||||
derivation, a list of derivations, or a set of derivations. The paths
|
derivation, a list of derivations, or a set of derivations. The paths
|
||||||
of the resulting store derivations are printed on standard output.
|
of the resulting store derivations are printed on standard output.
|
||||||
|
|
||||||
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
If *files* is the character `-`, then a Nix expression will be read from
|
If *files* is the character `-`, then a Nix expression will be read from
|
||||||
standard input.
|
standard input.
|
||||||
|
|
||||||
|
@ -79,8 +80,7 @@ standard input.
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
Instantiating store derivations from a Nix expression, and building them
|
Instantiate [store derivation]s from a Nix expression, and build them using `nix-store`:
|
||||||
using `nix-store`:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate test.nix (instantiate)
|
$ nix-instantiate test.nix (instantiate)
|
||||||
|
|
|
@ -22,7 +22,8 @@ This section lists the options that are common to all operations. These
|
||||||
options are allowed for every subcommand, though they may not always
|
options are allowed for every subcommand, though they may not always
|
||||||
have an effect.
|
have an effect.
|
||||||
|
|
||||||
- [`--add-root`]{#opt-add-root} *path*\
|
- <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
|
||||||
|
|
||||||
Causes the result of a realisation (`--realise` and
|
Causes the result of a realisation (`--realise` and
|
||||||
`--force-realise`) to be registered as a root of the garbage
|
`--force-realise`) to be registered as a root of the garbage
|
||||||
collector. *path* will be created as a symlink to the resulting
|
collector. *path* will be created as a symlink to the resulting
|
||||||
|
@ -65,13 +66,13 @@ The operation `--realise` essentially “builds” the specified store
|
||||||
paths. Realisation is a somewhat overloaded term:
|
paths. Realisation is a somewhat overloaded term:
|
||||||
|
|
||||||
- If the store path is a *derivation*, realisation ensures that the
|
- If the store path is a *derivation*, realisation ensures that the
|
||||||
output paths of the derivation are [valid](../glossary.md) (i.e.,
|
output paths of the derivation are [valid] (i.e.,
|
||||||
the output path and its closure exist in the file system). This
|
the output path and its closure exist in the file system). This
|
||||||
can be done in several ways. First, it is possible that the
|
can be done in several ways. First, it is possible that the
|
||||||
outputs are already valid, in which case we are done
|
outputs are already valid, in which case we are done
|
||||||
immediately. Otherwise, there may be [substitutes](../glossary.md)
|
immediately. Otherwise, there may be [substitutes]
|
||||||
that produce the outputs (e.g., by downloading them). Finally, the
|
that produce the outputs (e.g., by downloading them). Finally, the
|
||||||
outputs can be produced by performing the build action described
|
outputs can be produced by running the build task described
|
||||||
by the derivation.
|
by the derivation.
|
||||||
|
|
||||||
- If the store path is not a derivation, realisation ensures that the
|
- If the store path is not a derivation, realisation ensures that the
|
||||||
|
@ -81,6 +82,9 @@ paths. Realisation is a somewhat overloaded term:
|
||||||
produced through substitutes. If there are no (successful)
|
produced through substitutes. If there are no (successful)
|
||||||
substitutes, realisation fails.
|
substitutes, realisation fails.
|
||||||
|
|
||||||
|
[valid]: ../glossary.md#gloss-validity
|
||||||
|
[substitutes]: ../glossary.md#gloss-substitute
|
||||||
|
|
||||||
The output path of each derivation is printed on standard output. (For
|
The output path of each derivation is printed on standard output. (For
|
||||||
non-derivations argument, the argument itself is printed.)
|
non-derivations argument, the argument itself is printed.)
|
||||||
|
|
||||||
|
@ -104,10 +108,6 @@ The following flags are available:
|
||||||
previous build, the new output path is left in
|
previous build, the new output path is left in
|
||||||
`/nix/store/name.check.`
|
`/nix/store/name.check.`
|
||||||
|
|
||||||
See also the `build-repeat` configuration option, which repeats a
|
|
||||||
derivation a number of times and prevents its outputs from being
|
|
||||||
registered as “valid” in the Nix store unless they are identical.
|
|
||||||
|
|
||||||
Special exit codes:
|
Special exit codes:
|
||||||
|
|
||||||
- `100`\
|
- `100`\
|
||||||
|
@ -140,8 +140,10 @@ or.
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
This operation is typically used to build store derivations produced by
|
This operation is typically used to build [store derivation]s produced by
|
||||||
[`nix-instantiate`](nix-instantiate.md):
|
[`nix-instantiate`](./nix-instantiate.md):
|
||||||
|
|
||||||
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-store -r $(nix-instantiate ./test.nix)
|
$ nix-store -r $(nix-instantiate ./test.nix)
|
||||||
|
@ -156,6 +158,12 @@ To test whether a previously-built derivation is deterministic:
|
||||||
$ nix-build '<nixpkgs>' -A hello --check -K
|
$ nix-build '<nixpkgs>' -A hello --check -K
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Use [`--read-log`](#operation---read-log) to show the stderr and stdout of a build:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-store --read-log $(nix-instantiate ./test.nix)
|
||||||
|
```
|
||||||
|
|
||||||
# Operation `--serve`
|
# Operation `--serve`
|
||||||
|
|
||||||
## Synopsis
|
## Synopsis
|
||||||
|
@ -290,8 +298,8 @@ error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4'
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
The operation `--query` displays various bits of information about the
|
The operation `--query` displays information about [store path]s.
|
||||||
store paths . The queries are described below. At most one query can be
|
The queries are described below. At most one query can be
|
||||||
specified. The default query is `--outputs`.
|
specified. The default query is `--outputs`.
|
||||||
|
|
||||||
The paths *paths* may also be symlinks from outside of the Nix store, to
|
The paths *paths* may also be symlinks from outside of the Nix store, to
|
||||||
|
@ -301,7 +309,7 @@ symlink.
|
||||||
## Common query options
|
## Common query options
|
||||||
|
|
||||||
- `--use-output`; `-u`\
|
- `--use-output`; `-u`\
|
||||||
For each argument to the query that is a store derivation, apply the
|
For each argument to the query that is a [store derivation], apply the
|
||||||
query to the output path of the derivation instead.
|
query to the output path of the derivation instead.
|
||||||
|
|
||||||
- `--force-realise`; `-f`\
|
- `--force-realise`; `-f`\
|
||||||
|
@ -311,17 +319,17 @@ symlink.
|
||||||
## Queries
|
## Queries
|
||||||
|
|
||||||
- `--outputs`\
|
- `--outputs`\
|
||||||
Prints out the [output paths](../glossary.md) of the store
|
Prints out the [output path]s of the store
|
||||||
derivations *paths*. These are the paths that will be produced when
|
derivations *paths*. These are the paths that will be produced when
|
||||||
the derivation is built.
|
the derivation is built.
|
||||||
|
|
||||||
- `--requisites`; `-R`\
|
- `--requisites`; `-R`\
|
||||||
Prints out the [closure](../glossary.md) of the store path *paths*.
|
Prints out the [closure] of the given *paths*.
|
||||||
|
|
||||||
This query has one option:
|
This query has one option:
|
||||||
|
|
||||||
- `--include-outputs`
|
- `--include-outputs`
|
||||||
Also include the existing output paths of store derivations,
|
Also include the existing output paths of [store derivation]s,
|
||||||
and their closures.
|
and their closures.
|
||||||
|
|
||||||
This query can be used to implement various kinds of deployment. A
|
This query can be used to implement various kinds of deployment. A
|
||||||
|
@ -333,10 +341,12 @@ symlink.
|
||||||
derivation and specifying the option `--include-outputs`.
|
derivation and specifying the option `--include-outputs`.
|
||||||
|
|
||||||
- `--references`\
|
- `--references`\
|
||||||
Prints the set of [references](../glossary.md) of the store paths
|
Prints the set of [references]s of the store paths
|
||||||
*paths*, that is, their immediate dependencies. (For *all*
|
*paths*, that is, their immediate dependencies. (For *all*
|
||||||
dependencies, use `--requisites`.)
|
dependencies, use `--requisites`.)
|
||||||
|
|
||||||
|
[reference]: ../glossary.md#gloss-reference
|
||||||
|
|
||||||
- `--referrers`\
|
- `--referrers`\
|
||||||
Prints the set of *referrers* of the store paths *paths*, that is,
|
Prints the set of *referrers* of the store paths *paths*, that is,
|
||||||
the store paths currently existing in the Nix store that refer to
|
the store paths currently existing in the Nix store that refer to
|
||||||
|
@ -351,11 +361,13 @@ symlink.
|
||||||
in the Nix store that are dependent on *paths*.
|
in the Nix store that are dependent on *paths*.
|
||||||
|
|
||||||
- `--deriver`; `-d`\
|
- `--deriver`; `-d`\
|
||||||
Prints the [deriver](../glossary.md) of the store paths *paths*. If
|
Prints the [deriver] of the store paths *paths*. If
|
||||||
the path has no deriver (e.g., if it is a source file), or if the
|
the path has no deriver (e.g., if it is a source file), or if the
|
||||||
deriver is not known (e.g., in the case of a binary-only
|
deriver is not known (e.g., in the case of a binary-only
|
||||||
deployment), the string `unknown-deriver` is printed.
|
deployment), the string `unknown-deriver` is printed.
|
||||||
|
|
||||||
|
[deriver]: ../glossary.md#gloss-deriver
|
||||||
|
|
||||||
- `--graph`\
|
- `--graph`\
|
||||||
Prints the references graph of the store paths *paths* in the format
|
Prints the references graph of the store paths *paths* in the format
|
||||||
of the `dot` tool of AT\&T's [Graphviz
|
of the `dot` tool of AT\&T's [Graphviz
|
||||||
|
@ -375,12 +387,12 @@ symlink.
|
||||||
Prints the references graph of the store paths *paths* in the
|
Prints the references graph of the store paths *paths* in the
|
||||||
[GraphML](http://graphml.graphdrawing.org/) file format. This can be
|
[GraphML](http://graphml.graphdrawing.org/) file format. This can be
|
||||||
used to visualise dependency graphs. To obtain a build-time
|
used to visualise dependency graphs. To obtain a build-time
|
||||||
dependency graph, apply this to a store derivation. To obtain a
|
dependency graph, apply this to a [store derivation]. To obtain a
|
||||||
runtime dependency graph, apply it to an output path.
|
runtime dependency graph, apply it to an output path.
|
||||||
|
|
||||||
- `--binding` *name*; `-b` *name*\
|
- `--binding` *name*; `-b` *name*\
|
||||||
Prints the value of the attribute *name* (i.e., environment
|
Prints the value of the attribute *name* (i.e., environment
|
||||||
variable) of the store derivations *paths*. It is an error for a
|
variable) of the [store derivation]s *paths*. It is an error for a
|
||||||
derivation to not have the specified attribute.
|
derivation to not have the specified attribute.
|
||||||
|
|
||||||
- `--hash`\
|
- `--hash`\
|
||||||
|
|
|
@ -42,7 +42,7 @@ $ nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
To get a shell with a different compilation environment (e.g. stdenv,
|
To get a shell with a different compilation environment (e.g. stdenv,
|
||||||
gccStdenv, clangStdenv, clang11Stdenv):
|
gccStdenv, clangStdenv, clang11Stdenv, ccacheStdenv):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
|
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
|
||||||
|
@ -54,6 +54,9 @@ or if you have a flake-enabled nix:
|
||||||
$ nix develop .#clang11StdenvPackages
|
$ nix develop .#clang11StdenvPackages
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note: you can use `ccacheStdenv` to drastically improve rebuild
|
||||||
|
time. By default, ccache keeps artifacts in `~/.cache/ccache/`.
|
||||||
|
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -83,23 +86,93 @@ by:
|
||||||
$ nix develop
|
$ nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing
|
## Running tests
|
||||||
|
|
||||||
Nix comes with three different flavors of tests: unit, functional and integration.
|
|
||||||
|
|
||||||
### Unit-tests
|
### Unit-tests
|
||||||
|
|
||||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||||
under `src/{library_name}/tests` using the
|
under `src/{library_name}/tests` using the
|
||||||
[googletest](https://google.github.io/googletest/) framework.
|
[googletest](https://google.github.io/googletest/) and
|
||||||
|
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
||||||
|
|
||||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||||
|
|
||||||
### Functional tests
|
### Functional tests
|
||||||
|
|
||||||
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
|
||||||
The whole testsuite can be run with `make install && make installcheck`.
|
Each test is a bash script.
|
||||||
Individual tests can be run with `make tests/{testName}.sh.test`.
|
|
||||||
|
The whole test suite can be run with:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ make install && make installcheck
|
||||||
|
ran test tests/foo.sh... [PASS]
|
||||||
|
ran test tests/bar.sh... [PASS]
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Individual tests can be run with `make`:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ make tests/${testName}.sh.test
|
||||||
|
ran test tests/${testName}.sh... [PASS]
|
||||||
|
```
|
||||||
|
|
||||||
|
or without `make`:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/run-test.sh tests/${testName}.sh
|
||||||
|
ran test tests/${testName}.sh... [PASS]
|
||||||
|
```
|
||||||
|
|
||||||
|
To see the complete output, one can also run:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||||
|
+ foo
|
||||||
|
output from foo
|
||||||
|
+ bar
|
||||||
|
output from bar
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails.
|
||||||
|
|
||||||
|
#### Debugging failing functional tests
|
||||||
|
|
||||||
|
When a functional test fails, it usually does so somewhere in the middle of the script.
|
||||||
|
|
||||||
|
To figure out what's wrong, it is convenient to run the test regularly up to the failing `nix` command, and then run that command with a debugger like GDB.
|
||||||
|
|
||||||
|
For example, if the script looks like:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
foo
|
||||||
|
nix blah blub
|
||||||
|
bar
|
||||||
|
```
|
||||||
|
edit it like so:
|
||||||
|
|
||||||
|
```diff
|
||||||
|
foo
|
||||||
|
-nix blah blub
|
||||||
|
+gdb --args nix blah blub
|
||||||
|
bar
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
$ ./mk/debug-test.sh tests/${testName}.sh
|
||||||
|
...
|
||||||
|
+ gdb blash blub
|
||||||
|
GNU gdb (GDB) 12.1
|
||||||
|
...
|
||||||
|
(gdb)
|
||||||
|
```
|
||||||
|
|
||||||
|
One can debug the Nix invocation in all the usual ways.
|
||||||
|
For example, enter `run` to start the Nix invocation.
|
||||||
|
|
||||||
### Integration tests
|
### Integration tests
|
||||||
|
|
||||||
|
@ -108,3 +181,105 @@ These tests include everything that needs to interact with external services or
|
||||||
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
||||||
|
|
||||||
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
||||||
|
|
||||||
|
### Installer tests
|
||||||
|
|
||||||
|
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
||||||
|
|
||||||
|
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||||
|
|
||||||
|
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
||||||
|
- `x86_64-linux`
|
||||||
|
- `armv6l-linux`
|
||||||
|
- `armv7l-linux`
|
||||||
|
- `x86_64-darwin`
|
||||||
|
|
||||||
|
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
|
||||||
|
|
||||||
|
#### One-time setup
|
||||||
|
|
||||||
|
1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix).
|
||||||
|
2. At cachix.org:
|
||||||
|
- Create or log in to an account.
|
||||||
|
- Create a Cachix cache using the format `<github-username>-nix-install-tests`.
|
||||||
|
- Navigate to the new cache > Settings > Auth Tokens.
|
||||||
|
- Generate a new Cachix auth token and copy the generated value.
|
||||||
|
3. At github.com:
|
||||||
|
- Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret.
|
||||||
|
- Name the secret `CACHIX_AUTH_TOKEN`.
|
||||||
|
- Paste the copied value of the Cachix cache auth token.
|
||||||
|
|
||||||
|
#### Using the CI-generated installer for manual testing
|
||||||
|
|
||||||
|
After the CI run completes, you can check the output to extract the installer URL:
|
||||||
|
1. Click into the detailed view of the CI run.
|
||||||
|
2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them).
|
||||||
|
3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`)
|
||||||
|
4. Copy the value of `install_url`
|
||||||
|
5. To generate an install command, plug this `install_url` and your GitHub username into this template:
|
||||||
|
|
||||||
|
```console
|
||||||
|
sh <(curl -L <install_url>) --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- #### Manually generating test installers
|
||||||
|
|
||||||
|
There's obviously a manual way to do this, and it's still the only way for
|
||||||
|
platforms that lack GA runners.
|
||||||
|
|
||||||
|
I did do this back in Fall 2020 (before the GA approach encouraged here). I'll
|
||||||
|
sketch what I recall in case it encourages someone to fill in detail, but: I
|
||||||
|
didn't know what I was doing at the time and had to fumble/ask around a lot--
|
||||||
|
so I don't want to uphold any of it as "right". It may have been dumb or
|
||||||
|
the _hard_ way from the getgo. Fundamentals may have changed since.
|
||||||
|
|
||||||
|
Here's the build command I used to do this on and for x86_64-darwin:
|
||||||
|
nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball"
|
||||||
|
|
||||||
|
I used the stable out-link to make it easier to script the next steps:
|
||||||
|
link=$(readlink /tmp/foo)
|
||||||
|
cp $link/*-darwin.tar.xz ~/somewheres
|
||||||
|
|
||||||
|
I've lost the last steps and am just going from memory:
|
||||||
|
|
||||||
|
From here, I think I had to extract and modify the `install` script to point
|
||||||
|
it at this tarball (which I scped to my own site, but it might make more sense
|
||||||
|
to just share them locally). I extracted this script once and then just
|
||||||
|
search/replaced in it for each new build.
|
||||||
|
|
||||||
|
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
||||||
|
solved this need?
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Checking links in the manual
|
||||||
|
|
||||||
|
The build checks for broken internal links.
|
||||||
|
This happens late in the process, so `nix build` is not suitable for iterating.
|
||||||
|
To build the manual incrementally, run:
|
||||||
|
|
||||||
|
```console
|
||||||
|
make html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
||||||
|
|
||||||
|
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||||
|
|
||||||
|
```console
|
||||||
|
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||||
|
|
||||||
|
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||||
|
[URI fragments]: https://en.m.wikipedia.org/wiki/URI_fragment
|
||||||
|
|
||||||
|
#### `@docroot@` variable
|
||||||
|
|
||||||
|
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||||
|
|
||||||
|
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||||
|
|
||||||
|
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||||
|
See existing `@docroot@` logic in the [Makefile].
|
||||||
|
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||||
|
|
|
@ -1,26 +1,104 @@
|
||||||
# Glossary
|
# Glossary
|
||||||
|
|
||||||
- [derivation]{#gloss-derivation}\
|
- [derivation]{#gloss-derivation}\
|
||||||
A description of a build action. The result of a derivation is a
|
A description of a build task. The result of a derivation is a
|
||||||
store object. Derivations are typically specified in Nix expressions
|
store object. Derivations are typically specified in Nix expressions
|
||||||
using the [`derivation` primitive](language/derivations.md). These are
|
using the [`derivation` primitive](./language/derivations.md). These are
|
||||||
translated into low-level *store derivations* (implicitly by
|
translated into low-level *store derivations* (implicitly by
|
||||||
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
||||||
|
|
||||||
|
[derivation]: #gloss-derivation
|
||||||
|
|
||||||
|
- [store derivation]{#gloss-store-derivation}\
|
||||||
|
A [derivation] represented as a `.drv` file in the [store].
|
||||||
|
It has a [store path], like any [store object].
|
||||||
|
|
||||||
|
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
|
||||||
|
|
||||||
|
See [`nix show-derivation`](./command-ref/new-cli/nix3-show-derivation.md) (experimental) for displaying the contents of store derivations.
|
||||||
|
|
||||||
|
[store derivation]: #gloss-store-derivation
|
||||||
|
|
||||||
|
- [realise]{#gloss-realise}, realisation\
|
||||||
|
Ensure a [store path] is [valid][validity].
|
||||||
|
|
||||||
|
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
|
||||||
|
|
||||||
|
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](./command-ref/nix-store.md#operation---realise).
|
||||||
|
|
||||||
|
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||||
|
|
||||||
|
[realise]: #gloss-realise
|
||||||
|
|
||||||
|
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
||||||
|
A derivation which has the
|
||||||
|
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||||
|
attribute set to `true`.
|
||||||
|
|
||||||
|
- [fixed-output derivation]{#gloss-fixed-output-derivation}\
|
||||||
|
A derivation which includes the
|
||||||
|
[`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute.
|
||||||
|
|
||||||
- [store]{#gloss-store}\
|
- [store]{#gloss-store}\
|
||||||
The location in the file system where store objects live. Typically
|
The location in the file system where store objects live. Typically
|
||||||
`/nix/store`.
|
`/nix/store`.
|
||||||
|
|
||||||
|
From the perspective of the location where Nix is
|
||||||
|
invoked, the Nix store can be referred to
|
||||||
|
as a "_local_" or a "_remote_" one:
|
||||||
|
|
||||||
|
+ A *local store* exists on the filesystem of
|
||||||
|
the machine where Nix is invoked. You can use other
|
||||||
|
local stores by passing the `--store` flag to the
|
||||||
|
`nix` command. Local stores can be used for building derivations.
|
||||||
|
|
||||||
|
+ A *remote store* exists anywhere other than the
|
||||||
|
local filesystem. One example is the `/nix/store`
|
||||||
|
directory on another machine, accessed via `ssh` or
|
||||||
|
served by the `nix-serve` Perl script.
|
||||||
|
|
||||||
|
[store]: #gloss-store
|
||||||
|
|
||||||
|
- [chroot store]{#gloss-chroot-store}\
|
||||||
|
A local store whose canonical path is anything other than `/nix/store`.
|
||||||
|
|
||||||
|
- [binary cache]{#gloss-binary-cache}\
|
||||||
|
A *binary cache* is a Nix store which uses a different format: its
|
||||||
|
metadata and signatures are kept in `.narinfo` files rather than in a
|
||||||
|
Nix database. This different format simplifies serving store objects
|
||||||
|
over the network, but cannot host builds. Examples of binary caches
|
||||||
|
include S3 buckets and the [NixOS binary
|
||||||
|
cache](https://cache.nixos.org).
|
||||||
|
|
||||||
- [store path]{#gloss-store-path}\
|
- [store path]{#gloss-store-path}\
|
||||||
The location in the file system of a store object, i.e., an
|
The location of a [store object] in the file system, i.e., an
|
||||||
immediate child of the Nix store directory.
|
immediate child of the Nix store directory.
|
||||||
|
|
||||||
|
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
|
||||||
|
|
||||||
|
[store path]: #gloss-store-path
|
||||||
|
|
||||||
- [store object]{#gloss-store-object}\
|
- [store object]{#gloss-store-object}\
|
||||||
A file that is an immediate child of the Nix store directory. These
|
A file that is an immediate child of the Nix store directory. These
|
||||||
can be regular files, but also entire directory trees. Store objects
|
can be regular files, but also entire directory trees. Store objects
|
||||||
can be sources (objects copied from outside of the store),
|
can be sources (objects copied from outside of the store),
|
||||||
derivation outputs (objects produced by running a build action), or
|
derivation outputs (objects produced by running a build task), or
|
||||||
derivations (files describing a build action).
|
derivations (files describing a build task).
|
||||||
|
|
||||||
|
[store object]: #gloss-store-object
|
||||||
|
|
||||||
|
- [input-addressed store object]{#gloss-input-addressed-store-object}\
|
||||||
|
A store object produced by building a
|
||||||
|
non-[content-addressed](#gloss-content-addressed-derivation),
|
||||||
|
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||||
|
derivation.
|
||||||
|
|
||||||
|
- [output-addressed store object]{#gloss-output-addressed-store-object}\
|
||||||
|
A store object whose store path hashes its content. This
|
||||||
|
includes derivations, the outputs of
|
||||||
|
[content-addressed derivations](#gloss-content-addressed-derivation),
|
||||||
|
and the outputs of
|
||||||
|
[fixed-output derivations](#gloss-fixed-output-derivation).
|
||||||
|
|
||||||
- [substitute]{#gloss-substitute}\
|
- [substitute]{#gloss-substitute}\
|
||||||
A substitute is a command invocation stored in the Nix database that
|
A substitute is a command invocation stored in the Nix database that
|
||||||
|
@ -29,6 +107,13 @@
|
||||||
store object by downloading a pre-built version of the store object
|
store object by downloading a pre-built version of the store object
|
||||||
from some server.
|
from some server.
|
||||||
|
|
||||||
|
- [substituter]{#gloss-substituter}\
|
||||||
|
A *substituter* is an additional store from which Nix will
|
||||||
|
copy store objects it doesn't have. For details, see the
|
||||||
|
[`substituters` option](./command-ref/conf-file.md#conf-substituters).
|
||||||
|
|
||||||
|
[substituter]: #gloss-substituter
|
||||||
|
|
||||||
- [purity]{#gloss-purity}\
|
- [purity]{#gloss-purity}\
|
||||||
The assumption that equal Nix derivations when run always produce
|
The assumption that equal Nix derivations when run always produce
|
||||||
the same output. This cannot be guaranteed in general (e.g., a
|
the same output. This cannot be guaranteed in general (e.g., a
|
||||||
|
@ -71,23 +156,31 @@
|
||||||
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||||
references `R` then `R` is also in the closure of `P`.
|
references `R` then `R` is also in the closure of `P`.
|
||||||
|
|
||||||
|
[closure]: #gloss-closure
|
||||||
|
|
||||||
- [output path]{#gloss-output-path}\
|
- [output path]{#gloss-output-path}\
|
||||||
A store path produced by a derivation.
|
A [store path] produced by a [derivation].
|
||||||
|
|
||||||
|
[output path]: #gloss-output-path
|
||||||
|
|
||||||
- [deriver]{#gloss-deriver}\
|
- [deriver]{#gloss-deriver}\
|
||||||
The deriver of an *output path* is the store
|
The [store derivation] that produced an [output path].
|
||||||
derivation that built it.
|
|
||||||
|
|
||||||
- [validity]{#gloss-validity}\
|
- [validity]{#gloss-validity}\
|
||||||
A store path is considered *valid* if it exists in the file system,
|
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||||
is listed in the Nix database as being valid, and if all paths in
|
|
||||||
its closure are also valid.
|
For a local store, this means:
|
||||||
|
- The store path leads to an existing [store object] in that [store].
|
||||||
|
- The store path is listed in the Nix database as being valid.
|
||||||
|
- All paths in the store path's [closure] are valid.
|
||||||
|
|
||||||
|
[validity]: #gloss-validity
|
||||||
|
|
||||||
- [user environment]{#gloss-user-env}\
|
- [user environment]{#gloss-user-env}\
|
||||||
An automatically generated store object that consists of a set of
|
An automatically generated store object that consists of a set of
|
||||||
symlinks to “active” applications, i.e., other store paths. These
|
symlinks to “active” applications, i.e., other store paths. These
|
||||||
are generated automatically by
|
are generated automatically by
|
||||||
[`nix-env`](command-ref/nix-env.md). See *profiles*.
|
[`nix-env`](./command-ref/nix-env.md). See *profiles*.
|
||||||
|
|
||||||
- [profile]{#gloss-profile}\
|
- [profile]{#gloss-profile}\
|
||||||
A symlink to the current *user environment* of a user, e.g.,
|
A symlink to the current *user environment* of a user, e.g.,
|
||||||
|
@ -98,7 +191,18 @@
|
||||||
store. It can contain regular files, directories and symbolic
|
store. It can contain regular files, directories and symbolic
|
||||||
links. NARs are generated and unpacked using `nix-store --dump`
|
links. NARs are generated and unpacked using `nix-store --dump`
|
||||||
and `nix-store --restore`.
|
and `nix-store --restore`.
|
||||||
|
|
||||||
- [`∅`]{#gloss-emtpy-set}\
|
- [`∅`]{#gloss-emtpy-set}\
|
||||||
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
|
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
|
||||||
|
|
||||||
- [`ε`]{#gloss-epsilon}\
|
- [`ε`]{#gloss-epsilon}\
|
||||||
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
||||||
|
|
||||||
|
- [string interpolation]{#gloss-string-interpolation}\
|
||||||
|
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
|
||||||
|
|
||||||
|
See [String interpolation](./language/string-interpolation.md) for details.
|
||||||
|
|
||||||
|
[string]: ./language/values.md#type-string
|
||||||
|
[path]: ./language/values.md#type-path
|
||||||
|
[attribute name]: ./language/values.md#attribute-set
|
||||||
|
|
|
@ -88,19 +88,51 @@ extension. The installer will also create `/etc/profile.d/nix.sh`.
|
||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
|
|
||||||
```console
|
If you are on Linux with systemd:
|
||||||
sudo rm -rf /etc/profile/nix.sh /etc/nix /nix ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
|
||||||
|
|
||||||
# If you are on Linux with systemd, you will need to run:
|
1. Remove the Nix daemon service:
|
||||||
sudo systemctl stop nix-daemon.socket
|
|
||||||
sudo systemctl stop nix-daemon.service
|
```console
|
||||||
sudo systemctl disable nix-daemon.socket
|
sudo systemctl stop nix-daemon.service
|
||||||
sudo systemctl disable nix-daemon.service
|
sudo systemctl disable nix-daemon.socket nix-daemon.service
|
||||||
sudo systemctl daemon-reload
|
sudo systemctl daemon-reload
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Remove systemd service files:
|
||||||
|
|
||||||
|
```console
|
||||||
|
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
|
||||||
|
```
|
||||||
|
|
||||||
|
1. The installer script uses systemd-tmpfiles to create the socket directory.
|
||||||
|
You may also want to remove the configuration for that:
|
||||||
|
|
||||||
|
```console
|
||||||
|
sudo rm /etc/tmpfiles.d/nix-daemon.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
Remove files created by Nix:
|
||||||
|
|
||||||
|
```console
|
||||||
|
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
|
||||||
```
|
```
|
||||||
|
|
||||||
There may also be references to Nix in `/etc/profile`, `/etc/bashrc`,
|
Remove build users and their group:
|
||||||
and `/etc/zshrc` which you may remove.
|
|
||||||
|
```console
|
||||||
|
for i in $(seq 1 32); do
|
||||||
|
sudo userdel nixbld$i
|
||||||
|
done
|
||||||
|
sudo groupdel nixbld
|
||||||
|
```
|
||||||
|
|
||||||
|
There may also be references to Nix in
|
||||||
|
|
||||||
|
- `/etc/profile`
|
||||||
|
- `/etc/bashrc`
|
||||||
|
- `/etc/zshrc`
|
||||||
|
|
||||||
|
which you may remove.
|
||||||
|
|
||||||
### macOS
|
### macOS
|
||||||
|
|
||||||
|
|
|
@ -104,7 +104,7 @@ a currently running program.
|
||||||
|
|
||||||
Packages are built from _Nix expressions_, which is a simple
|
Packages are built from _Nix expressions_, which is a simple
|
||||||
functional language. A Nix expression describes everything that goes
|
functional language. A Nix expression describes everything that goes
|
||||||
into a package build action (a “derivation”): other packages, sources,
|
into a package build task (a “derivation”): other packages, sources,
|
||||||
the build script, environment variables for the build script, etc.
|
the build script, environment variables for the build script, etc.
|
||||||
Nix tries very hard to ensure that Nix expressions are
|
Nix tries very hard to ensure that Nix expressions are
|
||||||
_deterministic_: building a Nix expression twice should yield the same
|
_deterministic_: building a Nix expression twice should yield the same
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# Derivations
|
# Derivations
|
||||||
|
|
||||||
The most important built-in function is `derivation`, which is used to
|
The most important built-in function is `derivation`, which is used to
|
||||||
describe a single derivation (a build action). It takes as input a set,
|
describe a single derivation (a build task). It takes as input a set,
|
||||||
the attributes of which specify the inputs of the build.
|
the attributes of which specify the inputs of the build.
|
||||||
|
|
||||||
- There must be an attribute named [`system`]{#attr-system} whose value must be a
|
- There must be an attribute named [`system`]{#attr-system} whose value must be a
|
||||||
|
|
|
@ -31,3 +31,551 @@ The Nix language is
|
||||||
|
|
||||||
Type errors are only detected when expressions are evaluated.
|
Type errors are only detected when expressions are evaluated.
|
||||||
|
|
||||||
|
# Overview
|
||||||
|
|
||||||
|
This is an incomplete overview of language features, by example.
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th>
|
||||||
|
Example
|
||||||
|
</th>
|
||||||
|
<th>
|
||||||
|
Description
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
*Basic values*
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`"hello world"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A string
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
```
|
||||||
|
''
|
||||||
|
multi
|
||||||
|
line
|
||||||
|
string
|
||||||
|
''
|
||||||
|
```
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`.
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`"hello ${ { a = "world" }.a }"`
|
||||||
|
|
||||||
|
`"1 2 ${toString 3}"`
|
||||||
|
|
||||||
|
`"${pkgs.bash}/bin/sh"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
String interpolation (expands to `"hello world"`, `"1 2 3"`, `"/nix/store/<hash>-bash-<version>/bin/sh"`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`true`, `false`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Booleans
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`null`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Null value
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`123`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
An integer
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`3.141`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A floating point number
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`/etc`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
An absolute path
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`./foo.png`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A path relative to the file containing this Nix expression
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`~/.config`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A home path. Evaluates to the `"<user's home directory>/.config"`.
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`<nixpkgs>`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Search path for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
*Compound values*
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x = 1; y = 2; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A set with attributes named `x` and `y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ foo.bar = 1; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A nested set, equivalent to `{ foo = { bar = 1; }; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`rec { x = "foo"; y = x + "bar"; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A recursive set, equivalent to `{ x = "foo"; y = "foobar"; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`[ "foo" "bar" "baz" ]`
|
||||||
|
|
||||||
|
`[ 1 2 3 ]`
|
||||||
|
|
||||||
|
`[ (f 1) { a = 1; b = 2; } [ "c" ] ]`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Lists with three elements.
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
*Operators*
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`"foo" + "bar"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
String concatenation
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`1 + 2`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Integer addition
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`"foo" == "f" + "oo"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Equality test (evaluates to `true`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`"foo" != "bar"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Inequality test (evaluates to `true`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`!true`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Boolean negation
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x = 1; y = 2; }.x`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Attribute selection (evaluates to `1`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x = 1; y = 2; }.z or 3`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Attribute selection with default (evaluates to `3`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x = 1; y = 2; } // { z = 3; }`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Merge two sets (attributes in the right-hand set taking precedence)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
*Control structures*
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`if 1 + 1 == 2 then "yes!" else "no!"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Conditional expression
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`assert 1 + 1 == 2; "yes!"`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Assertion check (evaluates to `"yes!"`).
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`let x = "foo"; y = "bar"; in x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Variable definition
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`with builtins; head [ 1 2 3 ]`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Add all attributes from the given set to the scope (evaluates to `1`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
*Functions (lambdas)*
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`x: x + 1`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function that expects an integer and returns it increased by 1
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`x: y: x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Curried function, equivalent to `x: (y: x + y)`. Can be used like a function that takes two arguments and returns their sum.
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`(x: x + 1) 100`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function call (evaluates to 101)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`let inc = x: x + 1; in inc (inc (inc 100))`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function bound to a variable and subsequently called by name (evaluates to 103)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x, y }: x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function that expects a set with required attributes `x` and `y` and concatenates them
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x, y ? "bar" }: x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function that expects a set with required attribute `x` and optional `y`, using `"bar"` as default value for `y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x, y, ... }: x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function that expects a set with required attributes `x` and `y` and ignores any other attributes
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`{ x, y } @ args: x + y`
|
||||||
|
|
||||||
|
`args @ { x, y }: x + y`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
A function that expects a set with required attributes `x` and `y`, and binds the whole set to `args`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
*Built-in functions*
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`import ./foo.nix`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Load and return Nix expression in given file
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`map (x: x + x) [ 1 2 3 ]`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Apply a function to every element of a list (evaluates to `[ 2 4 6 ]`)
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
|
@ -1,28 +1,167 @@
|
||||||
# Operators
|
# Operators
|
||||||
|
|
||||||
The table below lists the operators in the Nix language, in
|
| Name | Syntax | Associativity | Precedence |
|
||||||
order of precedence (from strongest to weakest binding).
|
|----------------------------------------|--------------------------------------------|---------------|------------|
|
||||||
|
| [Attribute selection] | *attrset* `.` *attrpath* \[ `or` *expr* \] | none | 1 |
|
||||||
|
| Function application | *func* *expr* | left | 2 |
|
||||||
|
| [Arithmetic negation][arithmetic] | `-` *number* | none | 3 |
|
||||||
|
| [Has attribute] | *attrset* `?` *attrpath* | none | 4 |
|
||||||
|
| List concatenation | *list* `++` *list* | right | 5 |
|
||||||
|
| [Multiplication][arithmetic] | *number* `*` *number* | left | 6 |
|
||||||
|
| [Division][arithmetic] | *number* `/` *number* | left | 6 |
|
||||||
|
| [Subtraction][arithmetic] | *number* `-` *number* | left | 7 |
|
||||||
|
| [Addition][arithmetic] | *number* `+` *number* | left | 7 |
|
||||||
|
| [String concatenation] | *string* `+` *string* | left | 7 |
|
||||||
|
| [Path concatenation] | *path* `+` *path* | left | 7 |
|
||||||
|
| [Path and string concatenation] | *path* `+` *string* | left | 7 |
|
||||||
|
| [String and path concatenation] | *string* `+` *path* | left | 7 |
|
||||||
|
| Logical negation (`NOT`) | `!` *bool* | none | 8 |
|
||||||
|
| [Update] | *attrset* `//` *attrset* | right | 9 |
|
||||||
|
| [Less than][Comparison] | *expr* `<` *expr* | none | 10 |
|
||||||
|
| [Less than or equal to][Comparison] | *expr* `<=` *expr* | none | 10 |
|
||||||
|
| [Greater than][Comparison] | *expr* `>` *expr* | none | 10 |
|
||||||
|
| [Greater than or equal to][Comparison] | *expr* `>=` *expr* | none | 10 |
|
||||||
|
| [Equality] | *expr* `==` *expr* | none | 11 |
|
||||||
|
| Inequality | *expr* `!=` *expr* | none | 11 |
|
||||||
|
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
|
||||||
|
| Logical disjunction (`OR`) | *bool* `\|\|` *bool* | left | 13 |
|
||||||
|
| [Logical implication] | *bool* `->` *bool* | none | 14 |
|
||||||
|
|
||||||
|
[string]: ./values.md#type-string
|
||||||
|
[path]: ./values.md#type-path
|
||||||
|
[number]: ./values.md#type-number
|
||||||
|
[list]: ./values.md#list
|
||||||
|
[attribute set]: ./values.md#attribute-set
|
||||||
|
|
||||||
|
## Attribute selection
|
||||||
|
|
||||||
|
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||||
|
If the attribute doesn’t exist, return *value* if provided, otherwise abort evaluation.
|
||||||
|
|
||||||
|
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
|
||||||
|
|
||||||
|
An attribute path is a dot-separated list of attribute names.
|
||||||
|
An attribute name can be an identifier or a string.
|
||||||
|
|
||||||
|
> *attrpath* = *name* [ `.` *name* ]...
|
||||||
|
> *name* = *identifier* | *string*
|
||||||
|
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
||||||
|
|
||||||
|
[Attribute selection]: #attribute-selection
|
||||||
|
|
||||||
|
## Has attribute
|
||||||
|
|
||||||
|
> *attrset* `?` *attrpath*
|
||||||
|
|
||||||
|
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
||||||
|
The result is a [Boolean] value.
|
||||||
|
|
||||||
|
[Boolean]: ./values.md#type-boolean
|
||||||
|
|
||||||
|
[Has attribute]: #has-attribute
|
||||||
|
|
||||||
|
## Arithmetic
|
||||||
|
|
||||||
|
Numbers are type-compatible:
|
||||||
|
Pure integer operations will always return integers, whereas any operation involving at least one floating point number return a floating point number.
|
||||||
|
|
||||||
|
See also [Comparison] and [Equality].
|
||||||
|
|
||||||
|
The `+` operator is overloaded to also work on strings and paths.
|
||||||
|
|
||||||
|
[arithmetic]: #arithmetic
|
||||||
|
|
||||||
|
## String concatenation
|
||||||
|
|
||||||
|
> *string* `+` *string*
|
||||||
|
|
||||||
|
Concatenate two [string]s and merge their string contexts.
|
||||||
|
|
||||||
|
[String concatenation]: #string-concatenation
|
||||||
|
|
||||||
|
## Path concatenation
|
||||||
|
|
||||||
|
> *path* `+` *path*
|
||||||
|
|
||||||
|
Concatenate two [path]s.
|
||||||
|
The result is a path.
|
||||||
|
|
||||||
|
[Path concatenation]: #path-concatenation
|
||||||
|
|
||||||
|
## Path and string concatenation
|
||||||
|
|
||||||
|
> *path* + *string*
|
||||||
|
|
||||||
|
Concatenate *[path]* with *[string]*.
|
||||||
|
The result is a path.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> The string must not have a string context that refers to a [store path].
|
||||||
|
|
||||||
|
[Path and string concatenation]: #path-and-string-concatenation
|
||||||
|
|
||||||
|
## String and path concatenation
|
||||||
|
|
||||||
|
> *string* + *path*
|
||||||
|
|
||||||
|
Concatenate *[string]* with *[path]*.
|
||||||
|
The result is a string.
|
||||||
|
|
||||||
|
> **Important**
|
||||||
|
>
|
||||||
|
> The file or directory at *path* must exist and is copied to the [store].
|
||||||
|
> The path appears in the result as the corresponding [store path].
|
||||||
|
|
||||||
|
[store path]: ../glossary.md#gloss-store-path
|
||||||
|
[store]: ../glossary.md#gloss-store
|
||||||
|
|
||||||
|
[Path and string concatenation]: #path-and-string-concatenation
|
||||||
|
|
||||||
|
## Update
|
||||||
|
|
||||||
|
> *attrset1* // *attrset2*
|
||||||
|
|
||||||
|
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
||||||
|
|
||||||
|
The returned attribute set will have of all the attributes in *attrset1* and *attrset2*.
|
||||||
|
If an attribute name is present in both, the attribute value from the latter is taken.
|
||||||
|
|
||||||
|
[Update]: #update
|
||||||
|
|
||||||
|
## Comparison
|
||||||
|
|
||||||
|
Comparison is
|
||||||
|
|
||||||
|
- [arithmetic] for [number]s
|
||||||
|
- lexicographic for [string]s and [path]s
|
||||||
|
- item-wise lexicographic for [list]s:
|
||||||
|
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||||
|
|
||||||
|
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||||
|
|
||||||
|
| comparison | implementation |
|
||||||
|
|--------------|-----------------------|
|
||||||
|
| *a* `<=` *b* | `! (` *b* `<` *a* `)` |
|
||||||
|
| *a* `>` *b* | *b* `<` *a* |
|
||||||
|
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||||
|
|
||||||
|
[Comparison]: #comparison-operators
|
||||||
|
|
||||||
|
## Equality
|
||||||
|
|
||||||
|
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
||||||
|
- Comparison of [function]s always returns `false`.
|
||||||
|
- Numbers are type-compatible, see [arithmetic] operators.
|
||||||
|
- Floating point numbers only differ up to a limited precision.
|
||||||
|
|
||||||
|
[function]: ./constructs.md#functions
|
||||||
|
|
||||||
|
[Equality]: #equality
|
||||||
|
|
||||||
|
## Logical implication
|
||||||
|
|
||||||
|
Equivalent to `!`*b1* `||` *b2*.
|
||||||
|
|
||||||
|
[Logical implication]: #logical-implication
|
||||||
|
|
||||||
| Name | Syntax | Associativity | Description | Precedence |
|
|
||||||
| ------------------------ | ----------------------------------- | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- |
|
|
||||||
| Select | *e* `.` *attrpath* \[ `or` *def* \] | none | Select attribute denoted by the attribute path *attrpath* from set *e*. (An attribute path is a dot-separated list of attribute names.) If the attribute doesn’t exist, return *def* if provided, otherwise abort evaluation. | 1 |
|
|
||||||
| Application | *e1* *e2* | left | Call function *e1* with argument *e2*. | 2 |
|
|
||||||
| Arithmetic Negation | `-` *e* | none | Arithmetic negation. | 3 |
|
|
||||||
| Has Attribute | *e* `?` *attrpath* | none | Test whether set *e* contains the attribute denoted by *attrpath*; return `true` or `false`. | 4 |
|
|
||||||
| List Concatenation | *e1* `++` *e2* | right | List concatenation. | 5 |
|
|
||||||
| Multiplication | *e1* `*` *e2*, | left | Arithmetic multiplication. | 6 |
|
|
||||||
| Division | *e1* `/` *e2* | left | Arithmetic division. | 6 |
|
|
||||||
| Addition | *e1* `+` *e2* | left | Arithmetic addition. | 7 |
|
|
||||||
| Subtraction | *e1* `-` *e2* | left | Arithmetic subtraction. | 7 |
|
|
||||||
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
|
|
||||||
| Not | `!` *e* | none | Boolean negation. | 8 |
|
|
||||||
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
|
|
||||||
| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
|
|
||||||
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
|
||||||
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
|
||||||
| Logical OR | *e1* <code>||</code> *e2* | left | Logical OR. | 13 |
|
|
||||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to <code>!e1 || e2</code>). | 14 |
|
|
||||||
|
|
82
doc/manual/src/language/string-interpolation.md
Normal file
82
doc/manual/src/language/string-interpolation.md
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
# String interpolation
|
||||||
|
|
||||||
|
String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
|
||||||
|
|
||||||
|
Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*.
|
||||||
|
|
||||||
|
Interpolated expressions must evaluate to one of the following:
|
||||||
|
|
||||||
|
- a [string]
|
||||||
|
- a [path]
|
||||||
|
- a [derivation]
|
||||||
|
|
||||||
|
[string]: ./values.md#type-string
|
||||||
|
[path]: ./values.md#type-path
|
||||||
|
[attribute name]: ./values.md#attribute-set
|
||||||
|
[derivation]: ../glossary.md#gloss-derivation
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### String
|
||||||
|
|
||||||
|
Rather than writing
|
||||||
|
|
||||||
|
```nix
|
||||||
|
"--with-freetype2-library=" + freetype + "/lib"
|
||||||
|
```
|
||||||
|
|
||||||
|
(where `freetype` is a [derivation]), you can instead write
|
||||||
|
|
||||||
|
```nix
|
||||||
|
"--with-freetype2-library=${freetype}/lib"
|
||||||
|
```
|
||||||
|
|
||||||
|
The latter is automatically translated to the former.
|
||||||
|
|
||||||
|
A more complicated example (from the Nix expression for [Qt](http://www.trolltech.com/products/qt)):
|
||||||
|
|
||||||
|
```nix
|
||||||
|
configureFlags = "
|
||||||
|
-system-zlib -system-libpng -system-libjpeg
|
||||||
|
${if openglSupport then "-dlopen-opengl
|
||||||
|
-L${mesa}/lib -I${mesa}/include
|
||||||
|
-L${libXmu}/lib -I${libXmu}/include" else ""}
|
||||||
|
${if threadSupport then "-thread" else "-no-thread"}
|
||||||
|
";
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that Nix expressions and strings can be arbitrarily nested;
|
||||||
|
in this case the outer string contains various interpolated expressions that themselves contain strings (e.g., `"-thread"`), some of which in turn contain interpolated expressions (e.g., `${mesa}`).
|
||||||
|
|
||||||
|
### Path
|
||||||
|
|
||||||
|
Rather than writing
|
||||||
|
|
||||||
|
```nix
|
||||||
|
./. + "/" + foo + "-" + bar + ".nix"
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```nix
|
||||||
|
./. + "/${foo}-${bar}.nix"
|
||||||
|
```
|
||||||
|
|
||||||
|
you can instead write
|
||||||
|
|
||||||
|
```nix
|
||||||
|
./${foo}-${bar}.nix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Attribute name
|
||||||
|
|
||||||
|
Attribute names can be created dynamically with string interpolation:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let name = "foo"; in
|
||||||
|
{
|
||||||
|
${name} = "bar";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
{ foo = "bar"; }
|
|
@ -13,41 +13,9 @@
|
||||||
returns and tabs can be written as `\n`, `\r` and `\t`,
|
returns and tabs can be written as `\n`, `\r` and `\t`,
|
||||||
respectively.
|
respectively.
|
||||||
|
|
||||||
You can include the result of an expression into a string by
|
You can include the results of other expressions into a string by enclosing them in `${ }`, a feature known as [string interpolation].
|
||||||
enclosing it in `${...}`, a feature known as *antiquotation*. The
|
|
||||||
enclosed expression must evaluate to something that can be coerced
|
|
||||||
into a string (meaning that it must be a string, a path, or a
|
|
||||||
derivation). For instance, rather than writing
|
|
||||||
|
|
||||||
```nix
|
[string interpolation]: ./string-interpolation.md
|
||||||
"--with-freetype2-library=" + freetype + "/lib"
|
|
||||||
```
|
|
||||||
|
|
||||||
(where `freetype` is a derivation), you can instead write the more
|
|
||||||
natural
|
|
||||||
|
|
||||||
```nix
|
|
||||||
"--with-freetype2-library=${freetype}/lib"
|
|
||||||
```
|
|
||||||
|
|
||||||
The latter is automatically translated to the former. A more
|
|
||||||
complicated example (from the Nix expression for
|
|
||||||
[Qt](http://www.trolltech.com/products/qt)):
|
|
||||||
|
|
||||||
```nix
|
|
||||||
configureFlags = "
|
|
||||||
-system-zlib -system-libpng -system-libjpeg
|
|
||||||
${if openglSupport then "-dlopen-opengl
|
|
||||||
-L${mesa}/lib -I${mesa}/include
|
|
||||||
-L${libXmu}/lib -I${libXmu}/include" else ""}
|
|
||||||
${if threadSupport then "-thread" else "-no-thread"}
|
|
||||||
";
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that Nix expressions and strings can be arbitrarily nested; in
|
|
||||||
this case the outer string contains various antiquotations that
|
|
||||||
themselves contain strings (e.g., `"-thread"`), some of which in
|
|
||||||
turn contain expressions (e.g., `${mesa}`).
|
|
||||||
|
|
||||||
The second way to write string literals is as an *indented string*,
|
The second way to write string literals is as an *indented string*,
|
||||||
which is enclosed between pairs of *double single-quotes*, like so:
|
which is enclosed between pairs of *double single-quotes*, like so:
|
||||||
|
@ -75,7 +43,7 @@
|
||||||
Note that the whitespace and newline following the opening `''` is
|
Note that the whitespace and newline following the opening `''` is
|
||||||
ignored if there is no non-whitespace text on the initial line.
|
ignored if there is no non-whitespace text on the initial line.
|
||||||
|
|
||||||
Antiquotation (`${expr}`) is supported in indented strings.
|
Indented strings support [string interpolation].
|
||||||
|
|
||||||
Since `${` and `''` have special meaning in indented strings, you
|
Since `${` and `''` have special meaning in indented strings, you
|
||||||
need a way to quote them. `$` can be escaped by prefixing it with
|
need a way to quote them. `$` can be escaped by prefixing it with
|
||||||
|
@ -117,9 +85,10 @@
|
||||||
Numbers, which can be *integers* (like `123`) or *floating point*
|
Numbers, which can be *integers* (like `123`) or *floating point*
|
||||||
(like `123.43` or `.27e13`).
|
(like `123.43` or `.27e13`).
|
||||||
|
|
||||||
Numbers are type-compatible: pure integer operations will always
|
See [arithmetic] and [comparison] operators for semantics.
|
||||||
return integers, whereas any operation involving at least one
|
|
||||||
floating point number will have a floating point number as a result.
|
[arithmetic]: ./operators.md#arithmetic
|
||||||
|
[comparison]: ./operators.md#comparison
|
||||||
|
|
||||||
- <a id="type-path" href="#type-path">Path</a>
|
- <a id="type-path" href="#type-path">Path</a>
|
||||||
|
|
||||||
|
@ -143,12 +112,23 @@
|
||||||
environment variable `NIX_PATH` will be searched for the given file
|
environment variable `NIX_PATH` will be searched for the given file
|
||||||
or directory name.
|
or directory name.
|
||||||
|
|
||||||
Antiquotation is supported in any paths except those in angle brackets.
|
When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object].
|
||||||
`./${foo}-${bar}.nix` is a more convenient way of writing
|
|
||||||
`./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
|
[store path]: ../glossary.md#gloss-store-path
|
||||||
least one slash must appear *before* any antiquotations for this to be
|
[store object]: ../glossary.md#gloss-store-object
|
||||||
recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
|
|
||||||
operation. `./a.${foo}/b.${bar}` is a path.
|
For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
|
||||||
|
|
||||||
|
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
|
||||||
|
For example, assume you used a file path in an interpolated string during a `nix repl` session.
|
||||||
|
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents.
|
||||||
|
|
||||||
|
Paths themselves, except those in angle brackets (`< >`), support [string interpolation].
|
||||||
|
|
||||||
|
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
|
||||||
|
|
||||||
|
`a.${foo}/b.${bar}` is a syntactically valid division operation.
|
||||||
|
`./a.${foo}/b.${bar}` is a path.
|
||||||
|
|
||||||
- <a id="type-boolean" href="#type-boolean">Boolean</a>
|
- <a id="type-boolean" href="#type-boolean">Boolean</a>
|
||||||
|
|
||||||
|
@ -221,23 +201,33 @@ will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
|
||||||
You can use arbitrary double-quoted strings as attribute names:
|
You can use arbitrary double-quoted strings as attribute names:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
|
{ "$!@#?" = 123; }."$!@#?"
|
||||||
```
|
```
|
||||||
|
|
||||||
This will evaluate to `123` (Assuming `bar` is antiquotable). In the
|
|
||||||
case where an attribute name is just a single antiquotation, the quotes
|
|
||||||
can be dropped:
|
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ foo = 123; }.${bar} or 456
|
let bar = "bar";
|
||||||
|
{ "foo ${bar}" = 123; }."foo ${bar}"
|
||||||
```
|
```
|
||||||
|
|
||||||
This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
|
Both will evaluate to `123`.
|
||||||
to a string and `456` otherwise (again assuming `bar` is antiquotable).
|
|
||||||
|
Attribute names support [string interpolation]:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let bar = "foo"; in
|
||||||
|
{ foo = 123; }.${bar}
|
||||||
|
```
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let bar = "foo"; in
|
||||||
|
{ ${bar} = 123; }.foo
|
||||||
|
```
|
||||||
|
|
||||||
|
Both will evaluate to `123`.
|
||||||
|
|
||||||
In the special case where an attribute name inside of a set declaration
|
In the special case where an attribute name inside of a set declaration
|
||||||
evaluates to `null` (which is normally an error, as `null` is not
|
evaluates to `null` (which is normally an error, as `null` cannot be coerced to
|
||||||
antiquotable), that attribute is simply not added to the set:
|
a string), that attribute is simply not added to the set:
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ ${if foo then "bar" else null} = true; }
|
{ ${if foo then "bar" else null} = true; }
|
||||||
|
|
|
@ -32,13 +32,13 @@ which should print something like:
|
||||||
Priority: 30
|
Priority: 30
|
||||||
|
|
||||||
On the client side, you can tell Nix to use your binary cache using
|
On the client side, you can tell Nix to use your binary cache using
|
||||||
`--option extra-binary-caches`, e.g.:
|
`--substituters`, e.g.:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
|
$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/
|
||||||
```
|
```
|
||||||
|
|
||||||
The option `extra-binary-caches` tells Nix to use this binary cache in
|
The option `substituters` tells Nix to use this binary cache in
|
||||||
addition to your default caches, such as <https://cache.nixos.org>.
|
addition to your default caches, such as <https://cache.nixos.org>.
|
||||||
Thus, for any path in the closure of Firefox, Nix will first check if
|
Thus, for any path in the closure of Firefox, Nix will first check if
|
||||||
the path is available on the server `avalon` or another binary caches.
|
the path is available on the server `avalon` or another binary caches.
|
||||||
|
@ -47,4 +47,4 @@ If not, it will fall back to building from source.
|
||||||
You can also tell Nix to always use your binary cache by adding a line
|
You can also tell Nix to always use your binary cache by adding a line
|
||||||
to the `nix.conf` configuration file like this:
|
to the `nix.conf` configuration file like this:
|
||||||
|
|
||||||
binary-caches = http://avalon:8080/ https://cache.nixos.org/
|
substituters = http://avalon:8080/ https://cache.nixos.org/
|
||||||
|
|
43
doc/manual/src/release-notes/rl-2.12.md
Normal file
43
doc/manual/src/release-notes/rl-2.12.md
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
# Release 2.12 (2022-12-06)
|
||||||
|
|
||||||
|
* On Linux, Nix can now run builds in a user namespace where they run
|
||||||
|
as root (UID 0) and have 65,536 UIDs available.
|
||||||
|
<!-- FIXME: move this to its own section about system features -->
|
||||||
|
This is primarily useful for running containers such as `systemd-nspawn`
|
||||||
|
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
|
||||||
|
|
||||||
|
[nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
|
||||||
|
|
||||||
|
A build can enable this by setting the derivation attribute:
|
||||||
|
|
||||||
|
```
|
||||||
|
requiredSystemFeatures = [ "uid-range" ];
|
||||||
|
```
|
||||||
|
|
||||||
|
The `uid-range` [system feature] requires the [`auto-allocate-uids`]
|
||||||
|
setting to be enabled.
|
||||||
|
|
||||||
|
[system feature]: ../command-ref/conf-file.md#conf-system-features
|
||||||
|
|
||||||
|
* Nix can now automatically pick UIDs for builds, removing the need to
|
||||||
|
create `nixbld*` user accounts. See [`auto-allocate-uids`].
|
||||||
|
|
||||||
|
[`auto-allocate-uids`]: ../command-ref/conf-file.md#conf-auto-allocate-uids
|
||||||
|
|
||||||
|
* On Linux, Nix has experimental support for running builds inside a
|
||||||
|
cgroup. See
|
||||||
|
[`use-cgroups`](../command-ref/conf-file.md#conf-use-cgroups).
|
||||||
|
|
||||||
|
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
|
||||||
|
defaults to `false`. If it is set to `true`, the `hash` and `sha256`
|
||||||
|
arguments will be ignored and the resulting derivation will have
|
||||||
|
`__impure` set to `true`, making it an impure derivation.
|
||||||
|
|
||||||
|
* If `builtins.readFile` is called on a file with context, then only
|
||||||
|
the parts of the context that appear in the content of the file are
|
||||||
|
retained. This avoids a lot of spurious errors where strings end up
|
||||||
|
having a context just because they are read from a store path
|
||||||
|
([#7260](https://github.com/NixOS/nix/pull/7260)).
|
||||||
|
|
||||||
|
* `nix build --json` now prints some statistics about top-level
|
||||||
|
derivations, such as CPU statistics when cgroups are enabled.
|
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Release 2.13 (2023-01-17)
|
||||||
|
|
||||||
|
* The `repeat` and `enforce-determinism` options have been removed
|
||||||
|
since they had been broken under many circumstances for a long time.
|
||||||
|
|
||||||
|
* You can now use [flake references] in the [old command line interface], e.g.
|
||||||
|
|
||||||
|
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
|
||||||
|
[old command line interface]: ../command-ref/main-commands.md
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
# nix-build flake:nixpkgs -A hello
|
||||||
|
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
||||||
|
'<nixpkgs>' -A hello
|
||||||
|
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
||||||
|
```
|
||||||
|
|
||||||
|
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
|
||||||
|
Historical release notes were not changed.
|
||||||
|
|
||||||
|
* Error traces have been reworked to provide detailed explanations and more
|
||||||
|
accurate error locations. A short excerpt of the trace is now shown by
|
||||||
|
default when an error occurs.
|
||||||
|
|
||||||
|
* Allow explicitly selecting outputs in a store derivation installable, just like we can do with other sorts of installables.
|
||||||
|
For example,
|
||||||
|
```shell-session
|
||||||
|
# nix build /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev
|
||||||
|
```
|
||||||
|
now works just as
|
||||||
|
```shell-session
|
||||||
|
# nix build nixpkgs#glibc^dev
|
||||||
|
```
|
||||||
|
does already.
|
||||||
|
|
||||||
|
* On Linux, `nix develop` now sets the
|
||||||
|
[*personality*](https://man7.org/linux/man-pages/man2/personality.2.html)
|
||||||
|
for the development shell in the same way as the actual build of the
|
||||||
|
derivation. This makes shells for `i686-linux` derivations work
|
||||||
|
correctly on `x86_64-linux`.
|
||||||
|
|
||||||
|
* You can now disable the global flake registry by setting the `flake-registry`
|
||||||
|
configuration option to an empty string. The same can be achieved at runtime with
|
||||||
|
`--flake-registry ""`.
|
|
@ -1,2 +1,10 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
* A new function `builtins.readFileType` is available. It is similar to
|
||||||
|
`builtins.readDir` but acts on a single file or directory.
|
||||||
|
|
||||||
|
* The `builtins.readDir` function has been optimized when encountering not-yet-known
|
||||||
|
file types from POSIX's `readdir`. In such cases the type of each file is/was
|
||||||
|
discovered by making multiple syscalls. This change makes these operations
|
||||||
|
lazy such that these lookups will only be performed if the attribute is used.
|
||||||
|
This optimization affects a minority of filesystems and operating systems.
|
||||||
|
|
|
@ -5,6 +5,32 @@ rec {
|
||||||
|
|
||||||
concatStrings = concatStringsSep "";
|
concatStrings = concatStringsSep "";
|
||||||
|
|
||||||
|
replaceStringsRec = from: to: string:
|
||||||
|
# recursively replace occurrences of `from` with `to` within `string`
|
||||||
|
# example:
|
||||||
|
# replaceStringRec "--" "-" "hello-----world"
|
||||||
|
# => "hello-world"
|
||||||
|
let
|
||||||
|
replaced = replaceStrings [ from ] [ to ] string;
|
||||||
|
in
|
||||||
|
if replaced == string then string else replaceStringsRec from to replaced;
|
||||||
|
|
||||||
|
squash = replaceStringsRec "\n\n\n" "\n\n";
|
||||||
|
|
||||||
|
trim = string:
|
||||||
|
# trim trailing spaces and squash non-leading spaces
|
||||||
|
let
|
||||||
|
trimLine = line:
|
||||||
|
let
|
||||||
|
# separate leading spaces from the rest
|
||||||
|
parts = split "(^ *)" line;
|
||||||
|
spaces = head (elemAt parts 1);
|
||||||
|
rest = elemAt parts 2;
|
||||||
|
# drop trailing spaces
|
||||||
|
body = head (split " *$" rest);
|
||||||
|
in spaces + replaceStringsRec " " " " body;
|
||||||
|
in concatStringsSep "\n" (map trimLine (splitLines string));
|
||||||
|
|
||||||
# FIXME: O(n^2)
|
# FIXME: O(n^2)
|
||||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
||||||
|
|
||||||
|
|
14
docker.nix
14
docker.nix
|
@ -33,9 +33,20 @@ let
|
||||||
|
|
||||||
root = {
|
root = {
|
||||||
uid = 0;
|
uid = 0;
|
||||||
shell = "/bin/bash";
|
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||||
home = "/root";
|
home = "/root";
|
||||||
gid = 0;
|
gid = 0;
|
||||||
|
groups = [ "root" ];
|
||||||
|
description = "System administrator";
|
||||||
|
};
|
||||||
|
|
||||||
|
nobody = {
|
||||||
|
uid = 65534;
|
||||||
|
shell = "${pkgs.shadow}/bin/nologin";
|
||||||
|
home = "/var/empty";
|
||||||
|
gid = 65534;
|
||||||
|
groups = [ "nobody" ];
|
||||||
|
description = "Unprivileged account (don't use!)";
|
||||||
};
|
};
|
||||||
|
|
||||||
} // lib.listToAttrs (
|
} // lib.listToAttrs (
|
||||||
|
@ -57,6 +68,7 @@ let
|
||||||
groups = {
|
groups = {
|
||||||
root.gid = 0;
|
root.gid = 0;
|
||||||
nixbld.gid = 30000;
|
nixbld.gid = 30000;
|
||||||
|
nobody.gid = 65534;
|
||||||
};
|
};
|
||||||
|
|
||||||
userToPasswd = (
|
userToPasswd = (
|
||||||
|
|
|
@ -18,16 +18,16 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1657693803,
|
"lastModified": 1670461440,
|
||||||
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
|
"narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
|
"rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-22.05-small",
|
"ref": "nixos-22.11-small",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
|
55
flake.nix
55
flake.nix
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small";
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
|
|
||||||
|
@ -9,21 +9,21 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
version = builtins.readFile ./.version + versionSuffix;
|
officialRelease = false;
|
||||||
|
|
||||||
|
version = nixpkgs.lib.fileContents ./.version + versionSuffix;
|
||||||
versionSuffix =
|
versionSuffix =
|
||||||
if officialRelease
|
if officialRelease
|
||||||
then ""
|
then ""
|
||||||
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
|
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
|
||||||
|
|
||||||
officialRelease = false;
|
|
||||||
|
|
||||||
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
|
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
|
||||||
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
|
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
|
||||||
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
|
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
|
||||||
|
|
||||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
|
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
||||||
|
|
||||||
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
||||||
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
||||||
|
@ -82,7 +82,9 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
configureFlags =
|
configureFlags =
|
||||||
lib.optionals stdenv.isLinux [
|
[
|
||||||
|
"CXXFLAGS=-I${lib.getDev rapidcheck}/extras/gtest/include"
|
||||||
|
] ++ lib.optionals stdenv.isLinux [
|
||||||
"--with-boost=${boost}/lib"
|
"--with-boost=${boost}/lib"
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
]
|
]
|
||||||
|
@ -96,6 +98,7 @@
|
||||||
buildPackages.flex
|
buildPackages.flex
|
||||||
(lib.getBin buildPackages.lowdown-nix)
|
(lib.getBin buildPackages.lowdown-nix)
|
||||||
buildPackages.mdbook
|
buildPackages.mdbook
|
||||||
|
buildPackages.mdbook-linkcheck
|
||||||
buildPackages.autoconf-archive
|
buildPackages.autoconf-archive
|
||||||
buildPackages.autoreconfHook
|
buildPackages.autoreconfHook
|
||||||
buildPackages.pkg-config
|
buildPackages.pkg-config
|
||||||
|
@ -108,13 +111,14 @@
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
||||||
|
|
||||||
buildDeps =
|
buildDeps =
|
||||||
[ (curl.override { patchNetrcRegression = true; })
|
[ curl
|
||||||
bzip2 xz brotli editline
|
bzip2 xz brotli editline
|
||||||
openssl sqlite
|
openssl sqlite
|
||||||
libarchive
|
libarchive
|
||||||
boost
|
boost
|
||||||
lowdown-nix
|
lowdown-nix
|
||||||
gtest
|
gtest
|
||||||
|
rapidcheck
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
|
@ -133,7 +137,8 @@
|
||||||
patches = (o.patches or []) ++ [
|
patches = (o.patches or []) ++ [
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
];
|
];
|
||||||
}))
|
})
|
||||||
|
)
|
||||||
nlohmann_json
|
nlohmann_json
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
@ -260,6 +265,7 @@
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
tar cvfJ $fn \
|
tar cvfJ $fn \
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
|
--mtime='1970-01-01' \
|
||||||
--absolute-names \
|
--absolute-names \
|
||||||
--hard-dereference \
|
--hard-dereference \
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
|
@ -363,7 +369,7 @@
|
||||||
|
|
||||||
buildInputs =
|
buildInputs =
|
||||||
[ nix
|
[ nix
|
||||||
(curl.override { patchNetrcRegression = true; })
|
curl
|
||||||
bzip2
|
bzip2
|
||||||
xz
|
xz
|
||||||
pkgs.perl
|
pkgs.perl
|
||||||
|
@ -419,6 +425,8 @@
|
||||||
buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
|
buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
|
||||||
nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
|
nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
|
||||||
|
|
||||||
|
buildNoGc = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
|
||||||
|
|
||||||
# Perl bindings for various platforms.
|
# Perl bindings for various platforms.
|
||||||
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
|
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
|
||||||
|
|
||||||
|
@ -457,6 +465,10 @@
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
|
||||||
|
configureFlags = [
|
||||||
|
"CXXFLAGS=-I${lib.getDev pkgs.rapidcheck}/extras/gtest/include"
|
||||||
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
|
@ -505,6 +517,12 @@
|
||||||
overlay = self.overlays.default;
|
overlay = self.overlays.default;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
tests.containers = (import ./tests/containers.nix rec {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
overlay = self.overlays.default;
|
||||||
|
});
|
||||||
|
|
||||||
tests.setuid = nixpkgs.lib.genAttrs
|
tests.setuid = nixpkgs.lib.genAttrs
|
||||||
["i686-linux" "x86_64-linux"]
|
["i686-linux" "x86_64-linux"]
|
||||||
(system:
|
(system:
|
||||||
|
@ -526,6 +544,12 @@
|
||||||
mkdir $out
|
mkdir $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
tests.nixpkgsLibTests =
|
||||||
|
nixpkgs.lib.genAttrs systems (system:
|
||||||
|
import (nixpkgs + "/lib/tests/release.nix")
|
||||||
|
{ pkgs = nixpkgsFor.${system}; }
|
||||||
|
);
|
||||||
|
|
||||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||||
pkgs = nixpkgsFor.x86_64-linux;
|
pkgs = nixpkgsFor.x86_64-linux;
|
||||||
nixpkgs = nixpkgs-regression;
|
nixpkgs = nixpkgs-regression;
|
||||||
|
@ -545,12 +569,18 @@
|
||||||
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
||||||
} "touch $out");
|
} "touch $out");
|
||||||
|
|
||||||
|
installerTests = import ./tests/installer {
|
||||||
|
binaryTarballs = self.hydraJobs.binaryTarball;
|
||||||
|
inherit nixpkgsFor;
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
checks = forAllSystems (system: {
|
checks = forAllSystems (system: {
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
});
|
});
|
||||||
|
@ -632,6 +662,7 @@
|
||||||
inherit system crossSystem;
|
inherit system crossSystem;
|
||||||
overlays = [ self.overlays.default ];
|
overlays = [ self.overlays.default ];
|
||||||
};
|
};
|
||||||
|
inherit (nixpkgsCross) lib;
|
||||||
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
||||||
name = "nix-${version}";
|
name = "nix-${version}";
|
||||||
|
|
||||||
|
@ -644,7 +675,11 @@
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
buildInputs = buildDeps ++ propagatedDeps;
|
buildInputs = buildDeps ++ propagatedDeps;
|
||||||
|
|
||||||
configureFlags = [ "--sysconfdir=/etc" "--disable-doc-gen" ];
|
configureFlags = [
|
||||||
|
"CXXFLAGS=-I${lib.getDev nixpkgsCross.rapidcheck}/extras/gtest/include"
|
||||||
|
"--sysconfdir=/etc"
|
||||||
|
"--disable-doc-gen"
|
||||||
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
|
107
maintainers/README.md
Normal file
107
maintainers/README.md
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
# Nix maintainers team
|
||||||
|
|
||||||
|
## Motivation
|
||||||
|
|
||||||
|
The goal of the team is to help other people to contribute to Nix.
|
||||||
|
|
||||||
|
## Members
|
||||||
|
|
||||||
|
- Eelco Dolstra (@edolstra) – Team lead
|
||||||
|
- Théophane Hufschmitt (@thufschmitt)
|
||||||
|
- Valentin Gagarin (@fricklerhandwerk)
|
||||||
|
- Thomas Bereknyei (@tomberek)
|
||||||
|
- Robert Hensing (@roberth)
|
||||||
|
|
||||||
|
## Meeting protocol
|
||||||
|
|
||||||
|
The team meets twice a week:
|
||||||
|
|
||||||
|
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
|
1. Triage issues and pull requests from the _No Status_ column (30 min)
|
||||||
|
2. Discuss issues and pull requests from the _To discuss_ column (30 min)
|
||||||
|
|
||||||
|
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
|
1. Code review on pull requests from _In review_.
|
||||||
|
2. Other chores and tasks.
|
||||||
|
|
||||||
|
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
|
||||||
|
|
||||||
|
## Project board protocol
|
||||||
|
|
||||||
|
The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19/views/1) for tracking its work.
|
||||||
|
|
||||||
|
Issues on the board progress through the following states:
|
||||||
|
|
||||||
|
- No Status
|
||||||
|
|
||||||
|
During the discussion meeting, the team triages new items.
|
||||||
|
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
|
||||||
|
|
||||||
|
On every meeting, at least one item from each of the following categories is inspected:
|
||||||
|
|
||||||
|
1. [critical](https://github.com/NixOS/nix/labels/critical)
|
||||||
|
2. [security](https://github.com/NixOS/nix/labels/security)
|
||||||
|
3. [regression](https://github.com/NixOS/nix/labels/regression)
|
||||||
|
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
|
||||||
|
|
||||||
|
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
|
||||||
|
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||||
|
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
|
||||||
|
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||||
|
|
||||||
|
Team members can also add pull requests or issues they would like the whole team to consider.
|
||||||
|
|
||||||
|
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
|
||||||
|
|
||||||
|
- To discuss
|
||||||
|
|
||||||
|
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||||
|
|
||||||
|
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||||
|
|
||||||
|
As a general guideline, the order of items is determined as follows:
|
||||||
|
|
||||||
|
- Prioritise pull requests over issues
|
||||||
|
|
||||||
|
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
|
||||||
|
|
||||||
|
- Prioritise fixing bugs over documentation, improvements or new features
|
||||||
|
|
||||||
|
The team values stability and accessibility higher than raw functionality.
|
||||||
|
|
||||||
|
- Interleave issues and PRs
|
||||||
|
|
||||||
|
This way issues without attempts at a solution get a chance to get addressed.
|
||||||
|
|
||||||
|
- In review
|
||||||
|
|
||||||
|
Pull requests in this column are reviewed together during work meetings.
|
||||||
|
This is both for spreading implementation knowledge and for establishing common values in code reviews.
|
||||||
|
|
||||||
|
When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member.
|
||||||
|
|
||||||
|
- Assigned for merging
|
||||||
|
|
||||||
|
One team member is assigned to each of these pull requests.
|
||||||
|
They will communicate with the authors, and make the final approval once all remaining issues are addressed.
|
||||||
|
|
||||||
|
If more substantive issues arise, the assignee can move the pull request back to _To discuss_ to involve the team again.
|
||||||
|
|
||||||
|
The process is illustrated in the following diagram:
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart TD
|
||||||
|
discuss[To discuss]
|
||||||
|
|
||||||
|
review[To review]
|
||||||
|
|
||||||
|
New --> |Disagreement on idea| discuss
|
||||||
|
New & discuss --> |Consensus on idea| review
|
||||||
|
|
||||||
|
review --> |Consensus on implementation| Assigned
|
||||||
|
|
||||||
|
Assigned --> |Implementation issues arise| review
|
||||||
|
Assigned --> |Remaining issues fixed| Merged
|
||||||
|
```
|
|
@ -115,10 +115,6 @@ sub downloadFile {
|
||||||
|
|
||||||
write_file("$tmpFile.sha256", $sha256_actual);
|
write_file("$tmpFile.sha256", $sha256_actual);
|
||||||
|
|
||||||
if (! -e "$tmpFile.asc") {
|
|
||||||
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
return $sha256_expected;
|
return $sha256_expected;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,7 +190,7 @@ for my $fn (glob "$tmpDir/*") {
|
||||||
my $configuration = ();
|
my $configuration = ();
|
||||||
$configuration->{content_type} = "application/octet-stream";
|
$configuration->{content_type} = "application/octet-stream";
|
||||||
|
|
||||||
if ($fn =~ /.sha256|.asc|install/) {
|
if ($fn =~ /.sha256|install/) {
|
||||||
# Text files
|
# Text files
|
||||||
$configuration->{content_type} = "text/plain";
|
$configuration->{content_type} = "text/plain";
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
<key>SoftResourceLimits</key>
|
<key>SoftResourceLimits</key>
|
||||||
<dict>
|
<dict>
|
||||||
<key>NumberOfFiles</key>
|
<key>NumberOfFiles</key>
|
||||||
<integer>4096</integer>
|
<integer>1048576</integer>
|
||||||
</dict>
|
</dict>
|
||||||
</dict>
|
</dict>
|
||||||
</plist>
|
</plist>
|
||||||
|
|
|
@ -9,7 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
|
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
|
||||||
KillMode=process
|
KillMode=process
|
||||||
LimitNOFILE=4096
|
LimitNOFILE=1048576
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
|
@ -10,14 +10,15 @@ function _nix() {
|
||||||
local -a suggestions
|
local -a suggestions
|
||||||
declare -a suggestions
|
declare -a suggestions
|
||||||
for suggestion in ${res:1}; do
|
for suggestion in ${res:1}; do
|
||||||
# FIXME: This doesn't work properly if the suggestion word contains a `:`
|
suggestions+=("${suggestion%% *}")
|
||||||
# itself
|
|
||||||
suggestions+="${suggestion/ /:}"
|
|
||||||
done
|
done
|
||||||
|
local -a args
|
||||||
if [[ "$tpe" == filenames ]]; then
|
if [[ "$tpe" == filenames ]]; then
|
||||||
compadd -f
|
args+=('-f')
|
||||||
|
elif [[ "$tpe" == attrs ]]; then
|
||||||
|
args+=('-S' '')
|
||||||
fi
|
fi
|
||||||
_describe 'nix' suggestions
|
compadd -J nix "${args[@]}" -a suggestions
|
||||||
}
|
}
|
||||||
|
|
||||||
_nix "$@"
|
_nix "$@"
|
||||||
|
|
11
mk/common-test.sh
Normal file
11
mk/common-test.sh
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
||||||
|
|
||||||
|
: ${BASH:=/usr/bin/env bash}
|
||||||
|
|
||||||
|
init_test () {
|
||||||
|
cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
run_test_proper () {
|
||||||
|
cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
||||||
|
}
|
11
mk/debug-test.sh
Executable file
11
mk/debug-test.sh
Executable file
|
@ -0,0 +1,11 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
test=$1
|
||||||
|
|
||||||
|
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
source "$dir/common-test.sh"
|
||||||
|
|
||||||
|
(init_test)
|
||||||
|
run_test_proper
|
|
@ -1,4 +1,4 @@
|
||||||
#!/bin/sh
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -u
|
set -u
|
||||||
|
|
||||||
|
@ -7,7 +7,12 @@ green=""
|
||||||
yellow=""
|
yellow=""
|
||||||
normal=""
|
normal=""
|
||||||
|
|
||||||
post_run_msg="ran test $1..."
|
test=$1
|
||||||
|
|
||||||
|
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
source "$dir/common-test.sh"
|
||||||
|
|
||||||
|
post_run_msg="ran test $test..."
|
||||||
if [ -t 1 ]; then
|
if [ -t 1 ]; then
|
||||||
red="[31;1m"
|
red="[31;1m"
|
||||||
green="[32;1m"
|
green="[32;1m"
|
||||||
|
@ -16,12 +21,12 @@ if [ -t 1 ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
run_test () {
|
run_test () {
|
||||||
(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
|
(init_test 2>/dev/null > /dev/null)
|
||||||
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
|
log="$(run_test_proper 2>&1)"
|
||||||
status=$?
|
status=$?
|
||||||
}
|
}
|
||||||
|
|
||||||
run_test "$1"
|
run_test
|
||||||
|
|
||||||
# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these
|
# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these
|
||||||
# appear randomly without anyone knowing why.
|
# appear randomly without anyone knowing why.
|
||||||
|
@ -32,7 +37,7 @@ if [[ $status -ne 0 && $status -ne 99 && \
|
||||||
]]; then
|
]]; then
|
||||||
echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)"
|
echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)"
|
||||||
echo "$log" | sed 's/^/ /'
|
echo "$log" | sed 's/^/ /'
|
||||||
run_test "$1"
|
run_test
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ $status -eq 0 ]; then
|
if [ $status -eq 0 ]; then
|
|
@ -8,7 +8,11 @@ define run-install-test
|
||||||
|
|
||||||
.PHONY: $1.test
|
.PHONY: $1.test
|
||||||
$1.test: $1 $(test-deps)
|
$1.test: $1 $(test-deps)
|
||||||
@env TEST_NAME=$(basename $1) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null
|
@env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
|
||||||
|
|
||||||
|
.PHONY: $1.test-debug
|
||||||
|
$1.test-debug: $1 $(test-deps)
|
||||||
|
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
|
||||||
|
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e
|
||||||
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
||||||
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
||||||
|
|
||||||
|
# Fish has different syntax than zsh/bash, treat it separate
|
||||||
|
readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish"
|
||||||
|
readonly PROFILE_FISH_PREFIXES=(
|
||||||
|
# each of these are common values of $__fish_sysconf_dir,
|
||||||
|
# under which Fish will look for a file named
|
||||||
|
# $PROFILE_FISH_SUFFIX.
|
||||||
|
"/etc/fish" # standard
|
||||||
|
"/usr/local/etc/fish" # their installer .pkg for macOS
|
||||||
|
"/opt/homebrew/etc/fish" # homebrew
|
||||||
|
"/opt/local/etc/fish" # macports
|
||||||
|
)
|
||||||
|
readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish"
|
||||||
|
|
||||||
readonly NIX_INSTALLED_NIX="@nix@"
|
readonly NIX_INSTALLED_NIX="@nix@"
|
||||||
readonly NIX_INSTALLED_CACERT="@cacert@"
|
readonly NIX_INSTALLED_CACERT="@cacert@"
|
||||||
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
||||||
|
@ -45,7 +58,7 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
|
||||||
|
|
||||||
readonly ROOT_HOME=~root
|
readonly ROOT_HOME=~root
|
||||||
|
|
||||||
if [ -t 0 ]; then
|
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
|
||||||
readonly IS_HEADLESS='no'
|
readonly IS_HEADLESS='no'
|
||||||
else
|
else
|
||||||
readonly IS_HEADLESS='yes'
|
readonly IS_HEADLESS='yes'
|
||||||
|
@ -84,13 +97,10 @@ is_os_darwin() {
|
||||||
}
|
}
|
||||||
|
|
||||||
contact_us() {
|
contact_us() {
|
||||||
echo "You can open an issue at https://github.com/nixos/nix/issues"
|
echo "You can open an issue at"
|
||||||
|
echo "https://github.com/NixOS/nix/issues/new?labels=installer&template=installer.md"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Or feel free to contact the team:"
|
echo "Or get in touch with the community: https://nixos.org/community"
|
||||||
echo " - Matrix: #nix:nixos.org"
|
|
||||||
echo " - IRC: in #nixos on irc.libera.chat"
|
|
||||||
echo " - twitter: @nixos_org"
|
|
||||||
echo " - forum: https://discourse.nixos.org"
|
|
||||||
}
|
}
|
||||||
get_help() {
|
get_help() {
|
||||||
echo "We'd love to help if you need it."
|
echo "We'd love to help if you need it."
|
||||||
|
@ -362,7 +372,7 @@ finish_fail() {
|
||||||
finish_cleanup
|
finish_cleanup
|
||||||
|
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
Jeeze, something went wrong. If you can take all the output and open
|
Oh no, something went wrong. If you can take all the output and open
|
||||||
an issue, we'd love to fix the problem so nobody else has this issue.
|
an issue, we'd love to fix the problem so nobody else has this issue.
|
||||||
|
|
||||||
:(
|
:(
|
||||||
|
@ -565,7 +575,7 @@ EOF
|
||||||
# to extract _just_ the user's note, instead it is prefixed with
|
# to extract _just_ the user's note, instead it is prefixed with
|
||||||
# some plist junk. This was causing the user note to always be set,
|
# some plist junk. This was causing the user note to always be set,
|
||||||
# even if there was no reason for it.
|
# even if there was no reason for it.
|
||||||
if ! poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then
|
if poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then
|
||||||
row " Note" "Nix build user $coreid"
|
row " Note" "Nix build user $coreid"
|
||||||
else
|
else
|
||||||
poly_user_note_set "$username" "Nix build user $coreid"
|
poly_user_note_set "$username" "Nix build user $coreid"
|
||||||
|
@ -810,7 +820,7 @@ EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
_sudo "to load data for the first time in to the Nix Database" \
|
_sudo "to load data for the first time in to the Nix Database" \
|
||||||
"$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
|
HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
|
||||||
|
|
||||||
echo " Just finished getting the nix database ready."
|
echo " Just finished getting the nix database ready."
|
||||||
)
|
)
|
||||||
|
@ -828,6 +838,19 @@ fi
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Fish has differing syntax
|
||||||
|
fish_source_lines() {
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
# Nix
|
||||||
|
if test -e '$PROFILE_NIX_FILE_FISH'
|
||||||
|
. '$PROFILE_NIX_FILE_FISH'
|
||||||
|
end
|
||||||
|
# End Nix
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
configure_shell_profile() {
|
configure_shell_profile() {
|
||||||
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
||||||
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
||||||
|
@ -849,6 +872,27 @@ configure_shell_profile() {
|
||||||
tee -a "$profile_target"
|
tee -a "$profile_target"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}"
|
||||||
|
for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do
|
||||||
|
if [ ! -d "$fish_prefix" ]; then
|
||||||
|
# this specific prefix (ie: /etc/fish) is very likely to exist
|
||||||
|
# if Fish is installed with this sysconfdir.
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}"
|
||||||
|
conf_dir=$(dirname "$profile_target")
|
||||||
|
if [ ! -d "$conf_dir" ]; then
|
||||||
|
_sudo "create $conf_dir for our Fish hook" \
|
||||||
|
mkdir "$conf_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fish_source_lines \
|
||||||
|
| _sudo "write nix-daemon settings to $profile_target" \
|
||||||
|
tee "$profile_target"
|
||||||
|
done
|
||||||
|
|
||||||
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
||||||
# their way less disruptively, but a counter-argument is that they won't
|
# their way less disruptively, but a counter-argument is that they won't
|
||||||
# immediately notice if something didn't get set up right?
|
# immediately notice if something didn't get set up right?
|
||||||
|
|
|
@ -71,6 +71,8 @@ while [ $# -gt 0 ]; do
|
||||||
# # intentional tail space
|
# # intentional tail space
|
||||||
# ACTIONS="${ACTIONS}uninstall "
|
# ACTIONS="${ACTIONS}uninstall "
|
||||||
# ;;
|
# ;;
|
||||||
|
--yes)
|
||||||
|
export NIX_INSTALLER_YES=1;;
|
||||||
--no-channel-add)
|
--no-channel-add)
|
||||||
export NIX_INSTALLER_NO_CHANNEL_ADD=1;;
|
export NIX_INSTALLER_NO_CHANNEL_ADD=1;;
|
||||||
--daemon-user-count)
|
--daemon-user-count)
|
||||||
|
@ -90,7 +92,7 @@ while [ $# -gt 0 ]; do
|
||||||
shift;;
|
shift;;
|
||||||
*)
|
*)
|
||||||
{
|
{
|
||||||
echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]"
|
echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--yes] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]"
|
||||||
|
|
||||||
echo "Choose installation method."
|
echo "Choose installation method."
|
||||||
echo ""
|
echo ""
|
||||||
|
@ -104,6 +106,8 @@ while [ $# -gt 0 ]; do
|
||||||
echo " trivial to uninstall."
|
echo " trivial to uninstall."
|
||||||
echo " (default)"
|
echo " (default)"
|
||||||
echo ""
|
echo ""
|
||||||
|
echo " --yes: Run the script non-interactively, accepting all prompts."
|
||||||
|
echo ""
|
||||||
echo " --no-channel-add: Don't add any channels. nixpkgs-unstable is installed by default."
|
echo " --no-channel-add: Don't add any channels. nixpkgs-unstable is installed by default."
|
||||||
echo ""
|
echo ""
|
||||||
echo " --no-modify-profile: Don't modify the user profile to automatically load nix."
|
echo " --no-modify-profile: Don't modify the user profile to automatically load nix."
|
||||||
|
@ -209,31 +213,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
added=
|
added=
|
||||||
p=$HOME/.nix-profile/etc/profile.d/nix.sh
|
p=
|
||||||
|
p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh
|
||||||
|
p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish
|
||||||
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
# Make the shell source nix.sh during login.
|
# Make the shell source nix.sh during login.
|
||||||
for i in .bash_profile .bash_login .profile; do
|
for i in .bash_profile .bash_login .profile; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
for i in .zshenv .zshrc; do
|
for i in .zshenv .zshrc; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ -d "$HOME/.config/fish" ]; then
|
||||||
|
fishdir=$HOME/.config/fish/conf.d
|
||||||
|
if [ ! -d "$fishdir" ]; then
|
||||||
|
mkdir -p "$fishdir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fn="$fishdir/nix.fish"
|
||||||
|
echo "placing $fn..." >&2
|
||||||
|
printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn"
|
||||||
|
added=1
|
||||||
|
p=${p_fish}
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
p=${p_sh}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$added" ]; then
|
if [ -z "$added" ]; then
|
||||||
|
|
|
@ -24,12 +24,17 @@ $1
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
escape_systemd_env() {
|
||||||
|
temp_var="${1//\'/\\\'}"
|
||||||
|
echo "${temp_var//\%/%%}"
|
||||||
|
}
|
||||||
|
|
||||||
# Gather all non-empty proxy environment variables into a string
|
# Gather all non-empty proxy environment variables into a string
|
||||||
create_systemd_proxy_env() {
|
create_systemd_proxy_env() {
|
||||||
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
|
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
|
||||||
for v in $vars; do
|
for v in $vars; do
|
||||||
if [ "x${!v:-}" != "x" ]; then
|
if [ "x${!v:-}" != "x" ]; then
|
||||||
echo "Environment=${v}=${!v}"
|
echo "Environment=${v}=$(escape_systemd_env ${!v})"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,12 +40,12 @@ case "$(uname -s).$(uname -m)" in
|
||||||
path=@tarballPath_aarch64-linux@
|
path=@tarballPath_aarch64-linux@
|
||||||
system=aarch64-linux
|
system=aarch64-linux
|
||||||
;;
|
;;
|
||||||
Linux.armv6l_linux)
|
Linux.armv6l)
|
||||||
hash=@tarballHash_armv6l-linux@
|
hash=@tarballHash_armv6l-linux@
|
||||||
path=@tarballPath_armv6l-linux@
|
path=@tarballPath_armv6l-linux@
|
||||||
system=armv6l-linux
|
system=armv6l-linux
|
||||||
;;
|
;;
|
||||||
Linux.armv7l_linux)
|
Linux.armv7l)
|
||||||
hash=@tarballHash_armv7l-linux@
|
hash=@tarballHash_armv7l-linux@
|
||||||
path=@tarballPath_armv7l-linux@
|
path=@tarballPath_armv7l-linux@
|
||||||
system=armv7l-linux
|
system=armv7l-linux
|
||||||
|
|
|
@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts)
|
||||||
profiledir = $(sysconfdir)/profile.d
|
profiledir = $(sysconfdir)/profile.d
|
||||||
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
|
||||||
|
|
||||||
clean-files += $(nix_noinst_scripts)
|
clean-files += $(nix_noinst_scripts)
|
||||||
|
|
49
scripts/nix-profile-daemon.fish.in
Normal file
49
scripts/nix-profile-daemon.fish.in
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
function add_path --argument-names new_path
|
||||||
|
if type -q fish_add_path
|
||||||
|
# fish 3.2.0 or newer
|
||||||
|
fish_add_path --prepend --global $new_path
|
||||||
|
else
|
||||||
|
# older versions of fish
|
||||||
|
if not contains $new_path $fish_user_paths
|
||||||
|
set --global fish_user_paths $new_path $fish_user_paths
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Only execute this file once per shell.
|
||||||
|
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
|
||||||
|
set __ETC_PROFILE_NIX_SOURCED 1
|
||||||
|
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
else
|
||||||
|
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
||||||
|
for i in $NIX_PROFILES
|
||||||
|
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
add_path "@localstatedir@/nix/profiles/default/bin"
|
||||||
|
add_path "$HOME/.nix-profile/bin"
|
||||||
|
|
||||||
|
functions -e add_path
|
51
scripts/nix-profile.fish.in
Normal file
51
scripts/nix-profile.fish.in
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
function add_path --argument-names new_path
|
||||||
|
if type -q fish_add_path
|
||||||
|
# fish 3.2.0 or newer
|
||||||
|
fish_add_path --prepend --global $new_path
|
||||||
|
else
|
||||||
|
# older versions of fish
|
||||||
|
if not contains $new_path $fish_user_paths
|
||||||
|
set --global fish_user_paths $new_path $fish_user_paths
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$HOME" && test -n "$USER"
|
||||||
|
|
||||||
|
# Set up the per-user profile.
|
||||||
|
|
||||||
|
set NIX_LINK $HOME/.nix-profile
|
||||||
|
|
||||||
|
# Set up environment.
|
||||||
|
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Only use MANPATH if it is already set. In general `man` will just simply
|
||||||
|
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
|
||||||
|
# which is in the $PATH. For more info, run `manpath -d`.
|
||||||
|
if set --query MANPATH
|
||||||
|
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
||||||
|
end
|
||||||
|
|
||||||
|
add_path "$NIX_LINK/bin"
|
||||||
|
set --erase NIX_LINK
|
||||||
|
end
|
||||||
|
|
||||||
|
functions -e add_path
|
|
@ -1,7 +1,6 @@
|
||||||
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
||||||
|
|
||||||
# Set up the per-user profile.
|
# Set up the per-user profile.
|
||||||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
|
|
||||||
|
|
||||||
NIX_LINK=$HOME/.nix-profile
|
NIX_LINK=$HOME/.nix-profile
|
||||||
|
|
||||||
|
|
|
@ -186,12 +186,12 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
// build the hint template.
|
// build the hint template.
|
||||||
std::string errorText =
|
std::string errorText =
|
||||||
"Failed to find a machine for remote build!\n"
|
"Failed to find a machine for remote build!\n"
|
||||||
"derivation: %s\nrequired (system, features): (%s, %s)";
|
"derivation: %s\nrequired (system, features): (%s, [%s])";
|
||||||
errorText += "\n%s available machines:";
|
errorText += "\n%s available machines:";
|
||||||
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
|
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
|
||||||
|
|
||||||
for (unsigned int i = 0; i < machines.size(); ++i)
|
for (unsigned int i = 0; i < machines.size(); ++i)
|
||||||
errorText += "\n(%s, %s, %s, %s)";
|
errorText += "\n([%s], %s, [%s], [%s])";
|
||||||
|
|
||||||
// add the template values.
|
// add the template values.
|
||||||
std::string drvstr;
|
std::string drvstr;
|
||||||
|
|
|
@ -88,7 +88,8 @@ EvalCommand::EvalCommand()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "debugger",
|
.longName = "debugger",
|
||||||
.description = "start an interactive environment if evaluation fails",
|
.description = "Start an interactive environment if evaluation fails.",
|
||||||
|
.category = MixEvalArgs::category,
|
||||||
.handler = {&startReplOnEvalErrors, true},
|
.handler = {&startReplOnEvalErrors, true},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -225,7 +226,7 @@ MixProfile::MixProfile()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "profile",
|
.longName = "profile",
|
||||||
.description = "The profile to update.",
|
.description = "The profile to operate on.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&profile},
|
.handler = {&profile},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
|
|
|
@ -13,8 +13,6 @@ namespace nix {
|
||||||
|
|
||||||
MixEvalArgs::MixEvalArgs()
|
MixEvalArgs::MixEvalArgs()
|
||||||
{
|
{
|
||||||
auto category = "Common evaluation options";
|
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "arg",
|
.longName = "arg",
|
||||||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||||
|
@ -34,7 +32,77 @@ MixEvalArgs::MixEvalArgs()
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "include",
|
.longName = "include",
|
||||||
.shortName = 'I',
|
.shortName = 'I',
|
||||||
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
|
.description = R"(
|
||||||
|
Add *path* to the Nix search path. The Nix search path is
|
||||||
|
initialized from the colon-separated [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment
|
||||||
|
variable, and is used to look up the location of Nix expressions using [paths](@docroot@/language/values.md#type-path) enclosed in angle
|
||||||
|
brackets (i.e., `<nixpkgs>`).
|
||||||
|
|
||||||
|
For instance, passing
|
||||||
|
|
||||||
|
```
|
||||||
|
-I /home/eelco/Dev
|
||||||
|
-I /etc/nixos
|
||||||
|
```
|
||||||
|
|
||||||
|
will cause Nix to look for paths relative to `/home/eelco/Dev` and
|
||||||
|
`/etc/nixos`, in that order. This is equivalent to setting the
|
||||||
|
`NIX_PATH` environment variable to
|
||||||
|
|
||||||
|
```
|
||||||
|
/home/eelco/Dev:/etc/nixos
|
||||||
|
```
|
||||||
|
|
||||||
|
It is also possible to match paths against a prefix. For example,
|
||||||
|
passing
|
||||||
|
|
||||||
|
```
|
||||||
|
-I nixpkgs=/home/eelco/Dev/nixpkgs-branch
|
||||||
|
-I /etc/nixos
|
||||||
|
```
|
||||||
|
|
||||||
|
will cause Nix to search for `<nixpkgs/path>` in
|
||||||
|
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
|
||||||
|
|
||||||
|
If a path in the Nix search path starts with `http://` or `https://`,
|
||||||
|
it is interpreted as the URL of a tarball that will be downloaded and
|
||||||
|
unpacked to a temporary location. The tarball must consist of a single
|
||||||
|
top-level directory. For example, passing
|
||||||
|
|
||||||
|
```
|
||||||
|
-I nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
|
||||||
|
```
|
||||||
|
|
||||||
|
tells Nix to download and use the current contents of the `master`
|
||||||
|
branch in the `nixpkgs` repository.
|
||||||
|
|
||||||
|
The URLs of the tarballs from the official `nixos.org` channels
|
||||||
|
(see [the manual page for `nix-channel`](../nix-channel.md)) can be
|
||||||
|
abbreviated as `channel:<channel-name>`. For instance, the
|
||||||
|
following two flags are equivalent:
|
||||||
|
|
||||||
|
```
|
||||||
|
-I nixpkgs=channel:nixos-21.05
|
||||||
|
-I nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also fetch source trees using [flake URLs](./nix3-flake.md#url-like-syntax) and add them to the
|
||||||
|
search path. For instance,
|
||||||
|
|
||||||
|
```
|
||||||
|
-I nixpkgs=flake:nixpkgs
|
||||||
|
```
|
||||||
|
|
||||||
|
specifies that the prefix `nixpkgs` shall refer to the source tree
|
||||||
|
downloaded from the `nixpkgs` entry in the flake registry. Similarly,
|
||||||
|
|
||||||
|
```
|
||||||
|
-I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
|
||||||
|
```
|
||||||
|
|
||||||
|
makes `<nixpkgs>` refer to a particular branch of the
|
||||||
|
`NixOS/nixpkgs` repository on GitHub.
|
||||||
|
)",
|
||||||
.category = category,
|
.category = category,
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
||||||
|
@ -91,14 +159,25 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
|
|
||||||
Path lookupFileArg(EvalState & state, std::string_view s)
|
Path lookupFileArg(EvalState & state, std::string_view s)
|
||||||
{
|
{
|
||||||
if (isUri(s)) {
|
if (EvalSettings::isPseudoUrl(s)) {
|
||||||
return state.store->toRealPath(
|
auto storePath = fetchers::downloadTarball(
|
||||||
fetchers::downloadTarball(
|
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
|
||||||
state.store, resolveUri(s), "source", false).first.storePath);
|
return state.store->toRealPath(storePath);
|
||||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
}
|
||||||
|
|
||||||
|
else if (hasPrefix(s, "flake:")) {
|
||||||
|
settings.requireExperimentalFeature(Xp::Flakes);
|
||||||
|
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
|
||||||
|
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
|
||||||
|
return state.store->toRealPath(storePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||||
Path p(s.substr(1, s.size() - 2));
|
Path p(s.substr(1, s.size() - 2));
|
||||||
return state.findFile(p);
|
return state.findFile(p);
|
||||||
} else
|
}
|
||||||
|
|
||||||
|
else
|
||||||
return absPath(std::string(s));
|
return absPath(std::string(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,8 @@ class Bindings;
|
||||||
|
|
||||||
struct MixEvalArgs : virtual Args
|
struct MixEvalArgs : virtual Args
|
||||||
{
|
{
|
||||||
|
static constexpr auto category = "Common evaluation options";
|
||||||
|
|
||||||
MixEvalArgs();
|
MixEvalArgs();
|
||||||
|
|
||||||
Bindings * getAutoArgs(EvalState & state);
|
Bindings * getAutoArgs(EvalState & state);
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "installables.hh"
|
#include "installables.hh"
|
||||||
|
#include "outputs-spec.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
|
@ -168,7 +169,7 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "derivation",
|
.longName = "derivation",
|
||||||
.description = "Operate on the store derivation rather than its outputs.",
|
.description = "Operate on the [store derivation](../../glossary.md#gloss-store-derivation) rather than its outputs.",
|
||||||
.category = installablesCategory,
|
.category = installablesCategory,
|
||||||
.handler = {&operateOn, OperateOn::Derivation},
|
.handler = {&operateOn, OperateOn::Derivation},
|
||||||
});
|
});
|
||||||
|
@ -207,55 +208,59 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||||
|
|
||||||
void SourceExprCommand::completeInstallable(std::string_view prefix)
|
void SourceExprCommand::completeInstallable(std::string_view prefix)
|
||||||
{
|
{
|
||||||
if (file) {
|
try {
|
||||||
completionType = ctAttrs;
|
if (file) {
|
||||||
|
completionType = ctAttrs;
|
||||||
|
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
Expr *e = state->parseExprFromFile(
|
Expr *e = state->parseExprFromFile(
|
||||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
||||||
);
|
);
|
||||||
|
|
||||||
Value root;
|
Value root;
|
||||||
state->eval(e, root);
|
state->eval(e, root);
|
||||||
|
|
||||||
auto autoArgs = getAutoArgs(*state);
|
auto autoArgs = getAutoArgs(*state);
|
||||||
|
|
||||||
std::string prefix_ = std::string(prefix);
|
std::string prefix_ = std::string(prefix);
|
||||||
auto sep = prefix_.rfind('.');
|
auto sep = prefix_.rfind('.');
|
||||||
std::string searchWord;
|
std::string searchWord;
|
||||||
if (sep != std::string::npos) {
|
if (sep != std::string::npos) {
|
||||||
searchWord = prefix_.substr(sep + 1, std::string::npos);
|
searchWord = prefix_.substr(sep + 1, std::string::npos);
|
||||||
prefix_ = prefix_.substr(0, sep);
|
prefix_ = prefix_.substr(0, sep);
|
||||||
} else {
|
} else {
|
||||||
searchWord = prefix_;
|
searchWord = prefix_;
|
||||||
prefix_ = "";
|
prefix_ = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||||
Value &v1(*v);
|
Value &v1(*v);
|
||||||
state->forceValue(v1, pos);
|
state->forceValue(v1, pos);
|
||||||
Value v2;
|
Value v2;
|
||||||
state->autoCallFunction(*autoArgs, v1, v2);
|
state->autoCallFunction(*autoArgs, v1, v2);
|
||||||
|
|
||||||
if (v2.type() == nAttrs) {
|
if (v2.type() == nAttrs) {
|
||||||
for (auto & i : *v2.attrs) {
|
for (auto & i : *v2.attrs) {
|
||||||
std::string name = state->symbols[i.name];
|
std::string name = state->symbols[i.name];
|
||||||
if (name.find(searchWord) == 0) {
|
if (name.find(searchWord) == 0) {
|
||||||
if (prefix_ == "")
|
if (prefix_ == "")
|
||||||
completions->add(name);
|
completions->add(name);
|
||||||
else
|
else
|
||||||
completions->add(prefix_ + "." + name);
|
completions->add(prefix_ + "." + name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
completeFlakeRefWithFragment(
|
||||||
|
getEvalState(),
|
||||||
|
lockFlags,
|
||||||
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
|
getDefaultFlakeAttrPaths(),
|
||||||
|
prefix);
|
||||||
}
|
}
|
||||||
} else {
|
} catch (EvalError&) {
|
||||||
completeFlakeRefWithFragment(
|
// Don't want eval errors to mess-up with the completion engine, so let's just swallow them
|
||||||
getEvalState(),
|
|
||||||
lockFlags,
|
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
|
||||||
getDefaultFlakeAttrPaths(),
|
|
||||||
prefix);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -354,7 +359,7 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DerivedPath Installable::toDerivedPath()
|
DerivedPathWithInfo Installable::toDerivedPath()
|
||||||
{
|
{
|
||||||
auto buildables = toDerivedPaths();
|
auto buildables = toDerivedPaths();
|
||||||
if (buildables.size() != 1)
|
if (buildables.size() != 1)
|
||||||
|
@ -395,93 +400,53 @@ static StorePath getDeriver(
|
||||||
struct InstallableStorePath : Installable
|
struct InstallableStorePath : Installable
|
||||||
{
|
{
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
StorePath storePath;
|
DerivedPath req;
|
||||||
|
|
||||||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
InstallableStorePath(ref<Store> store, DerivedPath && req)
|
||||||
: store(store), storePath(std::move(storePath)) { }
|
: store(store), req(std::move(req))
|
||||||
|
{ }
|
||||||
|
|
||||||
std::string what() const override { return store->printStorePath(storePath); }
|
std::string what() const override
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override
|
|
||||||
{
|
{
|
||||||
if (storePath.isDerivation()) {
|
return req.to_string(*store);
|
||||||
auto drv = store->readDerivation(storePath);
|
|
||||||
return {
|
|
||||||
DerivedPath::Built {
|
|
||||||
.drvPath = storePath,
|
|
||||||
.outputs = drv.outputNames(),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
DerivedPath::Opaque {
|
|
||||||
.path = storePath,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePathSet toDrvPaths(ref<Store> store) override
|
DerivedPathsWithInfo toDerivedPaths() override
|
||||||
{
|
{
|
||||||
if (storePath.isDerivation()) {
|
return {{.path = req, .info = {} }};
|
||||||
return {storePath};
|
|
||||||
} else {
|
|
||||||
return {getDeriver(store, *this, storePath)};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<StorePath> getStorePath() override
|
std::optional<StorePath> getStorePath() override
|
||||||
{
|
{
|
||||||
return storePath;
|
return std::visit(overloaded {
|
||||||
|
[&](const DerivedPath::Built & bfd) {
|
||||||
|
return bfd.drvPath;
|
||||||
|
},
|
||||||
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
|
return bo.path;
|
||||||
|
},
|
||||||
|
}, req.raw());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
DerivedPaths InstallableValue::toDerivedPaths()
|
|
||||||
{
|
|
||||||
DerivedPaths res;
|
|
||||||
|
|
||||||
std::map<StorePath, std::set<std::string>> drvsToOutputs;
|
|
||||||
RealisedPath::Set drvsToCopy;
|
|
||||||
|
|
||||||
// Group by derivation, helps with .all in particular
|
|
||||||
for (auto & drv : toDerivations()) {
|
|
||||||
for (auto & outputName : drv.outputsToInstall)
|
|
||||||
drvsToOutputs[drv.drvPath].insert(outputName);
|
|
||||||
drvsToCopy.insert(drv.drvPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (auto & i : drvsToOutputs)
|
|
||||||
res.push_back(DerivedPath::Built { i.first, i.second });
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
|
||||||
{
|
|
||||||
StorePathSet res;
|
|
||||||
for (auto & drv : toDerivations())
|
|
||||||
res.insert(drv.drvPath);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct InstallableAttrPath : InstallableValue
|
struct InstallableAttrPath : InstallableValue
|
||||||
{
|
{
|
||||||
SourceExprCommand & cmd;
|
SourceExprCommand & cmd;
|
||||||
RootValue v;
|
RootValue v;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
OutputsSpec outputsSpec;
|
ExtendedOutputsSpec extendedOutputsSpec;
|
||||||
|
|
||||||
InstallableAttrPath(
|
InstallableAttrPath(
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
SourceExprCommand & cmd,
|
SourceExprCommand & cmd,
|
||||||
Value * v,
|
Value * v,
|
||||||
const std::string & attrPath,
|
const std::string & attrPath,
|
||||||
OutputsSpec outputsSpec)
|
ExtendedOutputsSpec extendedOutputsSpec)
|
||||||
: InstallableValue(state)
|
: InstallableValue(state)
|
||||||
, cmd(cmd)
|
, cmd(cmd)
|
||||||
, v(allocRootValue(v))
|
, v(allocRootValue(v))
|
||||||
, attrPath(attrPath)
|
, attrPath(attrPath)
|
||||||
, outputsSpec(std::move(outputsSpec))
|
, extendedOutputsSpec(std::move(extendedOutputsSpec))
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
std::string what() const override { return attrPath; }
|
std::string what() const override { return attrPath; }
|
||||||
|
@ -493,40 +458,54 @@ struct InstallableAttrPath : InstallableValue
|
||||||
return {vRes, pos};
|
return {vRes, pos};
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual std::vector<InstallableValue::DerivationInfo> toDerivations() override;
|
DerivedPathsWithInfo toDerivedPaths() override
|
||||||
};
|
{
|
||||||
|
auto v = toValue(*state).first;
|
||||||
|
|
||||||
std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations()
|
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||||
{
|
|
||||||
auto v = toValue(*state).first;
|
|
||||||
|
|
||||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
DrvInfos drvInfos;
|
||||||
|
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
||||||
|
|
||||||
DrvInfos drvInfos;
|
// Backward compatibility hack: group results by drvPath. This
|
||||||
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
// helps keep .all output together.
|
||||||
|
std::map<StorePath, OutputsSpec> byDrvPath;
|
||||||
|
|
||||||
std::vector<DerivationInfo> res;
|
for (auto & drvInfo : drvInfos) {
|
||||||
for (auto & drvInfo : drvInfos) {
|
auto drvPath = drvInfo.queryDrvPath();
|
||||||
auto drvPath = drvInfo.queryDrvPath();
|
if (!drvPath)
|
||||||
if (!drvPath)
|
throw Error("'%s' is not a derivation", what());
|
||||||
throw Error("'%s' is not a derivation", what());
|
|
||||||
|
|
||||||
std::set<std::string> outputsToInstall;
|
auto newOutputs = std::visit(overloaded {
|
||||||
|
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||||
|
std::set<std::string> outputsToInstall;
|
||||||
|
for (auto & output : drvInfo.queryOutputs(false, true))
|
||||||
|
outputsToInstall.insert(output.first);
|
||||||
|
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||||
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||||
|
return e;
|
||||||
|
},
|
||||||
|
}, extendedOutputsSpec.raw());
|
||||||
|
|
||||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
|
||||||
outputsToInstall = *outputNames;
|
|
||||||
else
|
|
||||||
for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
|
|
||||||
outputsToInstall.insert(output.first);
|
|
||||||
|
|
||||||
res.push_back(DerivationInfo {
|
if (!didInsert)
|
||||||
.drvPath = *drvPath,
|
iter->second = iter->second.union_(newOutputs);
|
||||||
.outputsToInstall = std::move(outputsToInstall)
|
}
|
||||||
});
|
|
||||||
|
DerivedPathsWithInfo res;
|
||||||
|
for (auto & [drvPath, outputs] : byDrvPath)
|
||||||
|
res.push_back({
|
||||||
|
.path = DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = outputs,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||||
{
|
{
|
||||||
|
@ -575,7 +554,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
auto vFlake = state.allocValue();
|
auto vFlake = state.allocValue();
|
||||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||||
|
|
||||||
state.forceAttrs(*vFlake, noPos);
|
state.forceAttrs(*vFlake, noPos, "while parsing cached flake data");
|
||||||
|
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
@ -599,7 +578,7 @@ InstallableFlake::InstallableFlake(
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
OutputsSpec outputsSpec,
|
ExtendedOutputsSpec extendedOutputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags)
|
const flake::LockFlags & lockFlags)
|
||||||
|
@ -607,14 +586,14 @@ InstallableFlake::InstallableFlake(
|
||||||
flakeRef(flakeRef),
|
flakeRef(flakeRef),
|
||||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||||
outputsSpec(std::move(outputsSpec)),
|
extendedOutputsSpec(std::move(extendedOutputsSpec)),
|
||||||
lockFlags(lockFlags)
|
lockFlags(lockFlags)
|
||||||
{
|
{
|
||||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||||
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
|
DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
|
||||||
|
|
||||||
|
@ -622,56 +601,84 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
||||||
|
|
||||||
auto attrPath = attr->getAttrPathStr();
|
auto attrPath = attr->getAttrPathStr();
|
||||||
|
|
||||||
if (!attr->isDerivation())
|
if (!attr->isDerivation()) {
|
||||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
|
||||||
|
// FIXME: use eval cache?
|
||||||
|
auto v = attr->forceValue();
|
||||||
|
|
||||||
|
if (v.type() == nPath) {
|
||||||
|
PathSet context;
|
||||||
|
auto storePath = state->copyPathToStore(context, Path(v.path));
|
||||||
|
return {{
|
||||||
|
.path = DerivedPath::Opaque {
|
||||||
|
.path = std::move(storePath),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (v.type() == nString) {
|
||||||
|
PathSet context;
|
||||||
|
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
|
||||||
|
auto storePath = state->store->maybeParseStorePath(s);
|
||||||
|
if (storePath && context.count(std::string(s))) {
|
||||||
|
return {{
|
||||||
|
.path = DerivedPath::Opaque {
|
||||||
|
.path = std::move(*storePath),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
} else
|
||||||
|
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
|
||||||
|
}
|
||||||
|
|
||||||
auto drvPath = attr->forceDerivation();
|
auto drvPath = attr->forceDerivation();
|
||||||
|
|
||||||
std::set<std::string> outputsToInstall;
|
|
||||||
std::optional<NixInt> priority;
|
std::optional<NixInt> priority;
|
||||||
|
|
||||||
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
if (attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||||
if (aOutputSpecified->getBool()) {
|
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||||
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
|
||||||
outputsToInstall = { aOutputName->getString() };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
|
||||||
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
|
||||||
for (auto & s : aOutputsToInstall->getListOfStrings())
|
|
||||||
outputsToInstall.insert(s);
|
|
||||||
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||||
priority = aPriority->getInt();
|
priority = aPriority->getInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
|
return {{
|
||||||
outputsToInstall.clear();
|
.path = DerivedPath::Built {
|
||||||
if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
|
.drvPath = std::move(drvPath),
|
||||||
for (auto & s : aOutputs->getListOfStrings())
|
.outputs = std::visit(overloaded {
|
||||||
outputsToInstall.insert(s);
|
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||||
}
|
std::set<std::string> outputsToInstall;
|
||||||
|
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||||
|
if (aOutputSpecified->getBool()) {
|
||||||
|
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
||||||
|
outputsToInstall = { aOutputName->getString() };
|
||||||
|
}
|
||||||
|
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||||
|
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||||
|
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||||
|
outputsToInstall.insert(s);
|
||||||
|
}
|
||||||
|
|
||||||
if (outputsToInstall.empty())
|
if (outputsToInstall.empty())
|
||||||
outputsToInstall.insert("out");
|
outputsToInstall.insert("out");
|
||||||
|
|
||||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||||
outputsToInstall = *outputNames;
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||||
auto drvInfo = DerivationInfo {
|
return e;
|
||||||
.drvPath = std::move(drvPath),
|
},
|
||||||
.outputsToInstall = std::move(outputsToInstall),
|
}, extendedOutputsSpec.raw()),
|
||||||
.priority = priority,
|
},
|
||||||
};
|
.info = {
|
||||||
|
.priority = priority,
|
||||||
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
.originalRef = flakeRef,
|
||||||
}
|
.resolvedRef = getLockedFlake()->flake.lockedRef,
|
||||||
|
.attrPath = attrPath,
|
||||||
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
|
.extendedOutputsSpec = extendedOutputsSpec,
|
||||||
{
|
}
|
||||||
std::vector<DerivationInfo> res;
|
}};
|
||||||
res.push_back(std::get<2>(toDerivation()));
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
||||||
|
@ -777,7 +784,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
if (file == "-") {
|
if (file == "-") {
|
||||||
auto e = state->parseStdin();
|
auto e = state->parseStdin();
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
} else if (file)
|
}
|
||||||
|
else if (file)
|
||||||
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
state->evalFile(lookupFileArg(*state, *file), *vFile);
|
||||||
else {
|
else {
|
||||||
auto e = state->parseExprFromString(*expr, absPath("."));
|
auto e = state->parseExprFromString(*expr, absPath("."));
|
||||||
|
@ -785,12 +793,12 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & s : ss) {
|
for (auto & s : ss) {
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec(s);
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
||||||
result.push_back(
|
result.push_back(
|
||||||
std::make_shared<InstallableAttrPath>(
|
std::make_shared<InstallableAttrPath>(
|
||||||
state, *this, vFile,
|
state, *this, vFile,
|
||||||
prefix == "." ? "" : prefix,
|
prefix == "." ? "" : std::string { prefix },
|
||||||
outputsSpec));
|
extendedOutputsSpec));
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -798,9 +806,46 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
for (auto & s : ss) {
|
for (auto & s : ss) {
|
||||||
std::exception_ptr ex;
|
std::exception_ptr ex;
|
||||||
|
|
||||||
if (s.find('/') != std::string::npos) {
|
auto [prefix_, extendedOutputsSpec_] = ExtendedOutputsSpec::parse(s);
|
||||||
|
// To avoid clang's pedantry
|
||||||
|
auto prefix = std::move(prefix_);
|
||||||
|
auto extendedOutputsSpec = std::move(extendedOutputsSpec_);
|
||||||
|
|
||||||
|
auto found = prefix.find('/');
|
||||||
|
if (found != std::string::npos) {
|
||||||
try {
|
try {
|
||||||
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
|
auto derivedPath = std::visit(overloaded {
|
||||||
|
// If the user did not use ^, we treat the output more liberally.
|
||||||
|
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||||
|
// First, we accept a symlink chain or an actual store path.
|
||||||
|
auto storePath = store->followLinksToStorePath(prefix);
|
||||||
|
// Second, we see if the store path ends in `.drv` to decide what sort
|
||||||
|
// of derived path they want.
|
||||||
|
//
|
||||||
|
// This handling predates the `^` syntax. The `^*` in
|
||||||
|
// `/nix/store/hash-foo.drv^*` unambiguously means "do the
|
||||||
|
// `DerivedPath::Built` case", so plain `/nix/store/hash-foo.drv` could
|
||||||
|
// also unambiguously mean "do the DerivedPath::Opaque` case".
|
||||||
|
//
|
||||||
|
// Issue #7261 tracks reconsidering this `.drv` dispatching.
|
||||||
|
return storePath.isDerivation()
|
||||||
|
? (DerivedPath) DerivedPath::Built {
|
||||||
|
.drvPath = std::move(storePath),
|
||||||
|
.outputs = OutputsSpec::All {},
|
||||||
|
}
|
||||||
|
: (DerivedPath) DerivedPath::Opaque {
|
||||||
|
.path = std::move(storePath),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
// If the user did use ^, we just do exactly what is written.
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||||
|
return DerivedPath::Built {
|
||||||
|
.drvPath = store->parseStorePath(prefix),
|
||||||
|
.outputs = outputSpec,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}, extendedOutputsSpec.raw());
|
||||||
|
result.push_back(std::make_shared<InstallableStorePath>(store, std::move(derivedPath)));
|
||||||
continue;
|
continue;
|
||||||
} catch (BadStorePath &) {
|
} catch (BadStorePath &) {
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -810,13 +855,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
|
||||||
result.push_back(std::make_shared<InstallableFlake>(
|
result.push_back(std::make_shared<InstallableFlake>(
|
||||||
this,
|
this,
|
||||||
getEvalState(),
|
getEvalState(),
|
||||||
std::move(flakeRef),
|
std::move(flakeRef),
|
||||||
fragment,
|
fragment,
|
||||||
outputsSpec,
|
extendedOutputsSpec,
|
||||||
getDefaultFlakeAttrPaths(),
|
getDefaultFlakeAttrPaths(),
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
lockFlags));
|
lockFlags));
|
||||||
|
@ -840,20 +885,20 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
|
||||||
return installables.front();
|
return installables.front();
|
||||||
}
|
}
|
||||||
|
|
||||||
BuiltPaths Installable::build(
|
std::vector<BuiltPathWithResult> Installable::build(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode,
|
Realise mode,
|
||||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||||
BuildMode bMode)
|
BuildMode bMode)
|
||||||
{
|
{
|
||||||
BuiltPaths res;
|
std::vector<BuiltPathWithResult> res;
|
||||||
for (auto & [_, builtPath] : build2(evalStore, store, mode, installables, bMode))
|
for (auto & [_, builtPathWithResult] : build2(evalStore, store, mode, installables, bMode))
|
||||||
res.push_back(builtPath);
|
res.push_back(builtPathWithResult);
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::build2(
|
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Installable::build2(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode,
|
Realise mode,
|
||||||
|
@ -863,17 +908,23 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
if (mode == Realise::Nothing)
|
if (mode == Realise::Nothing)
|
||||||
settings.readOnlyMode = true;
|
settings.readOnlyMode = true;
|
||||||
|
|
||||||
|
struct Aux
|
||||||
|
{
|
||||||
|
ExtraPathInfo info;
|
||||||
|
std::shared_ptr<Installable> installable;
|
||||||
|
};
|
||||||
|
|
||||||
std::vector<DerivedPath> pathsToBuild;
|
std::vector<DerivedPath> pathsToBuild;
|
||||||
std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap;
|
std::map<DerivedPath, std::vector<Aux>> backmap;
|
||||||
|
|
||||||
for (auto & i : installables) {
|
for (auto & i : installables) {
|
||||||
for (auto b : i->toDerivedPaths()) {
|
for (auto b : i->toDerivedPaths()) {
|
||||||
pathsToBuild.push_back(b);
|
pathsToBuild.push_back(b.path);
|
||||||
backmap[b].push_back(i);
|
backmap[b.path].push_back({.info = b.info, .installable = i});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> res;
|
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> res;
|
||||||
|
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
|
|
||||||
|
@ -882,42 +933,18 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
printMissing(store, pathsToBuild, lvlError);
|
printMissing(store, pathsToBuild, lvlError);
|
||||||
|
|
||||||
for (auto & path : pathsToBuild) {
|
for (auto & path : pathsToBuild) {
|
||||||
for (auto & installable : backmap[path]) {
|
for (auto & aux : backmap[path]) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
OutputPathMap outputs;
|
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
res.push_back({aux.installable, {
|
||||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
.info = aux.info}});
|
||||||
for (auto & output : bfd.outputs) {
|
|
||||||
auto outputHash = get(outputHashes, output);
|
|
||||||
if (!outputHash)
|
|
||||||
throw Error(
|
|
||||||
"the derivation '%s' doesn't have an output named '%s'",
|
|
||||||
store->printStorePath(bfd.drvPath), output);
|
|
||||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
|
||||||
DrvOutput outputId { *outputHash, output };
|
|
||||||
auto realisation = store->queryRealisation(outputId);
|
|
||||||
if (!realisation)
|
|
||||||
throw Error(
|
|
||||||
"cannot operate on an output of the "
|
|
||||||
"unbuilt derivation '%s'",
|
|
||||||
outputId.to_string());
|
|
||||||
outputs.insert_or_assign(output, realisation->outPath);
|
|
||||||
} else {
|
|
||||||
// If ca-derivations isn't enabled, assume that
|
|
||||||
// the output path is statically known.
|
|
||||||
auto drvOutput = get(drvOutputs, output);
|
|
||||||
assert(drvOutput);
|
|
||||||
assert(drvOutput->second);
|
|
||||||
outputs.insert_or_assign(
|
|
||||||
output, *drvOutput->second);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
res.push_back({installable, BuiltPath::Opaque { bo.path }});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Opaque { bo.path },
|
||||||
|
.info = aux.info}});
|
||||||
},
|
},
|
||||||
}, path.raw());
|
}, path.raw());
|
||||||
}
|
}
|
||||||
|
@ -927,22 +954,28 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
|
||||||
|
|
||||||
case Realise::Outputs: {
|
case Realise::Outputs: {
|
||||||
if (settings.printMissing)
|
if (settings.printMissing)
|
||||||
printMissing(store, pathsToBuild, lvlInfo);
|
printMissing(store, pathsToBuild, lvlInfo);
|
||||||
|
|
||||||
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
||||||
if (!buildResult.success())
|
if (!buildResult.success())
|
||||||
buildResult.rethrow();
|
buildResult.rethrow();
|
||||||
|
|
||||||
for (auto & installable : backmap[buildResult.path]) {
|
for (auto & aux : backmap[buildResult.path]) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
std::map<std::string, StorePath> outputs;
|
std::map<std::string, StorePath> outputs;
|
||||||
for (auto & path : buildResult.builtOutputs)
|
for (auto & path : buildResult.builtOutputs)
|
||||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||||
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||||
|
.info = aux.info,
|
||||||
|
.result = buildResult}});
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
res.push_back({installable, BuiltPath::Opaque { bo.path }});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Opaque { bo.path },
|
||||||
|
.info = aux.info,
|
||||||
|
.result = buildResult}});
|
||||||
},
|
},
|
||||||
}, buildResult.path.raw());
|
}, buildResult.path.raw());
|
||||||
}
|
}
|
||||||
|
@ -965,9 +998,12 @@ BuiltPaths Installable::toBuiltPaths(
|
||||||
OperateOn operateOn,
|
OperateOn operateOn,
|
||||||
const std::vector<std::shared_ptr<Installable>> & installables)
|
const std::vector<std::shared_ptr<Installable>> & installables)
|
||||||
{
|
{
|
||||||
if (operateOn == OperateOn::Output)
|
if (operateOn == OperateOn::Output) {
|
||||||
return Installable::build(evalStore, store, mode, installables);
|
BuiltPaths res;
|
||||||
else {
|
for (auto & p : Installable::build(evalStore, store, mode, installables))
|
||||||
|
res.push_back(p.path);
|
||||||
|
return res;
|
||||||
|
} else {
|
||||||
if (mode == Realise::Nothing)
|
if (mode == Realise::Nothing)
|
||||||
settings.readOnlyMode = true;
|
settings.readOnlyMode = true;
|
||||||
|
|
||||||
|
@ -1024,7 +1060,7 @@ StorePathSet Installable::toDerivations(
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
drvPaths.insert(bfd.drvPath);
|
drvPaths.insert(bfd.drvPath);
|
||||||
},
|
},
|
||||||
}, b.raw());
|
}, b.path.raw());
|
||||||
|
|
||||||
return drvPaths;
|
return drvPaths;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,11 +2,12 @@
|
||||||
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "path-with-outputs.hh"
|
#include "outputs-spec.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "flake/flake.hh"
|
#include "flake/flake.hh"
|
||||||
|
#include "build-result.hh"
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ namespace eval_cache { class EvalCache; class AttrCursor; }
|
||||||
|
|
||||||
struct App
|
struct App
|
||||||
{
|
{
|
||||||
std::vector<StorePathWithOutputs> context;
|
std::vector<DerivedPath> context;
|
||||||
Path program;
|
Path program;
|
||||||
// FIXME: add args, sandbox settings, metadata, ...
|
// FIXME: add args, sandbox settings, metadata, ...
|
||||||
};
|
};
|
||||||
|
@ -51,20 +52,42 @@ enum class OperateOn {
|
||||||
Derivation
|
Derivation
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct ExtraPathInfo
|
||||||
|
{
|
||||||
|
std::optional<NixInt> priority;
|
||||||
|
std::optional<FlakeRef> originalRef;
|
||||||
|
std::optional<FlakeRef> resolvedRef;
|
||||||
|
std::optional<std::string> attrPath;
|
||||||
|
// FIXME: merge with DerivedPath's 'outputs' field?
|
||||||
|
std::optional<ExtendedOutputsSpec> extendedOutputsSpec;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* A derived path with any additional info that commands might
|
||||||
|
need from the derivation. */
|
||||||
|
struct DerivedPathWithInfo
|
||||||
|
{
|
||||||
|
DerivedPath path;
|
||||||
|
ExtraPathInfo info;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct BuiltPathWithResult
|
||||||
|
{
|
||||||
|
BuiltPath path;
|
||||||
|
ExtraPathInfo info;
|
||||||
|
std::optional<BuildResult> result;
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::vector<DerivedPathWithInfo> DerivedPathsWithInfo;
|
||||||
|
|
||||||
struct Installable
|
struct Installable
|
||||||
{
|
{
|
||||||
virtual ~Installable() { }
|
virtual ~Installable() { }
|
||||||
|
|
||||||
virtual std::string what() const = 0;
|
virtual std::string what() const = 0;
|
||||||
|
|
||||||
virtual DerivedPaths toDerivedPaths() = 0;
|
virtual DerivedPathsWithInfo toDerivedPaths() = 0;
|
||||||
|
|
||||||
virtual StorePathSet toDrvPaths(ref<Store> store)
|
DerivedPathWithInfo toDerivedPath();
|
||||||
{
|
|
||||||
throw Error("'%s' cannot be converted to a derivation path", what());
|
|
||||||
}
|
|
||||||
|
|
||||||
DerivedPath toDerivedPath();
|
|
||||||
|
|
||||||
UnresolvedApp toApp(EvalState & state);
|
UnresolvedApp toApp(EvalState & state);
|
||||||
|
|
||||||
|
@ -91,14 +114,14 @@ struct Installable
|
||||||
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
|
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
|
||||||
}
|
}
|
||||||
|
|
||||||
static BuiltPaths build(
|
static std::vector<BuiltPathWithResult> build(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode,
|
Realise mode,
|
||||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||||
BuildMode bMode = bmNormal);
|
BuildMode bMode = bmNormal);
|
||||||
|
|
||||||
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> build2(
|
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> build2(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode,
|
Realise mode,
|
||||||
|
@ -139,19 +162,6 @@ struct InstallableValue : Installable
|
||||||
ref<EvalState> state;
|
ref<EvalState> state;
|
||||||
|
|
||||||
InstallableValue(ref<EvalState> state) : state(state) {}
|
InstallableValue(ref<EvalState> state) : state(state) {}
|
||||||
|
|
||||||
struct DerivationInfo
|
|
||||||
{
|
|
||||||
StorePath drvPath;
|
|
||||||
std::set<std::string> outputsToInstall;
|
|
||||||
std::optional<NixInt> priority;
|
|
||||||
};
|
|
||||||
|
|
||||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override;
|
|
||||||
|
|
||||||
StorePathSet toDrvPaths(ref<Store> store) override;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct InstallableFlake : InstallableValue
|
struct InstallableFlake : InstallableValue
|
||||||
|
@ -159,7 +169,7 @@ struct InstallableFlake : InstallableValue
|
||||||
FlakeRef flakeRef;
|
FlakeRef flakeRef;
|
||||||
Strings attrPaths;
|
Strings attrPaths;
|
||||||
Strings prefixes;
|
Strings prefixes;
|
||||||
OutputsSpec outputsSpec;
|
ExtendedOutputsSpec extendedOutputsSpec;
|
||||||
const flake::LockFlags & lockFlags;
|
const flake::LockFlags & lockFlags;
|
||||||
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
||||||
|
|
||||||
|
@ -168,7 +178,7 @@ struct InstallableFlake : InstallableValue
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
OutputsSpec outputsSpec,
|
ExtendedOutputsSpec extendedOutputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags);
|
const flake::LockFlags & lockFlags);
|
||||||
|
@ -179,9 +189,7 @@ struct InstallableFlake : InstallableValue
|
||||||
|
|
||||||
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
|
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
|
||||||
|
|
||||||
std::tuple<std::string, FlakeRef, DerivationInfo> toDerivation();
|
DerivedPathsWithInfo toDerivedPaths() override;
|
||||||
|
|
||||||
std::vector<DerivationInfo> toDerivations() override;
|
|
||||||
|
|
||||||
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||||
|
|
||||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
|
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
|
||||||
|
|
||||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread
|
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
|
||||||
|
|
||||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||||
|
|
||||||
|
|
|
@ -215,17 +215,15 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||||
out << dt.hint.str() << "\n";
|
out << dt.hint.str() << "\n";
|
||||||
|
|
||||||
// prefer direct pos, but if noPos then try the expr.
|
// prefer direct pos, but if noPos then try the expr.
|
||||||
auto pos = *dt.pos
|
auto pos = dt.pos
|
||||||
? *dt.pos
|
? dt.pos
|
||||||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
: static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
|
||||||
|
|
||||||
if (pos) {
|
if (pos) {
|
||||||
printAtPos(pos, out);
|
out << pos;
|
||||||
|
if (auto loc = pos->getCodeLines()) {
|
||||||
auto loc = getCodeLines(pos);
|
|
||||||
if (loc.has_value()) {
|
|
||||||
out << "\n";
|
out << "\n";
|
||||||
printCodeLines(out, "", pos, *loc);
|
printCodeLines(out, "", *pos, *loc);
|
||||||
out << "\n";
|
out << "\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,7 +240,11 @@ void NixRepl::mainLoop()
|
||||||
|
|
||||||
// Allow nix-repl specific settings in .inputrc
|
// Allow nix-repl specific settings in .inputrc
|
||||||
rl_readline_name = "nix-repl";
|
rl_readline_name = "nix-repl";
|
||||||
createDirs(dirOf(historyFile));
|
try {
|
||||||
|
createDirs(dirOf(historyFile));
|
||||||
|
} catch (SysError & e) {
|
||||||
|
logWarning(e.info());
|
||||||
|
}
|
||||||
#ifndef READLINE
|
#ifndef READLINE
|
||||||
el_hist_size = 1000;
|
el_hist_size = 1000;
|
||||||
#endif
|
#endif
|
||||||
|
@ -266,6 +268,7 @@ void NixRepl::mainLoop()
|
||||||
// ctrl-D should exit the debugger.
|
// ctrl-D should exit the debugger.
|
||||||
state->debugStop = false;
|
state->debugStop = false;
|
||||||
state->debugQuit = true;
|
state->debugQuit = true;
|
||||||
|
logger->cout("");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
@ -380,6 +383,10 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
/* Temporarily disable the debugger, to avoid re-entering readline. */
|
||||||
|
auto debug_repl = state->debugRepl;
|
||||||
|
state->debugRepl = nullptr;
|
||||||
|
Finally restoreDebug([&]() { state->debugRepl = debug_repl; });
|
||||||
try {
|
try {
|
||||||
/* This is an expression that should evaluate to an
|
/* This is an expression that should evaluate to an
|
||||||
attribute set. Evaluate it to get the names of the
|
attribute set. Evaluate it to get the names of the
|
||||||
|
@ -390,7 +397,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||||
Expr * e = parseString(expr);
|
Expr * e = parseString(expr);
|
||||||
Value v;
|
Value v;
|
||||||
e->eval(*state, *env, v);
|
e->eval(*state, *env, v);
|
||||||
state->forceAttrs(v, noPos);
|
state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
|
||||||
|
|
||||||
for (auto & i : *v.attrs) {
|
for (auto & i : *v.attrs) {
|
||||||
std::string_view name = state->symbols[i.name];
|
std::string_view name = state->symbols[i.name];
|
||||||
|
@ -580,15 +587,17 @@ bool NixRepl::processLine(std::string line)
|
||||||
Value v;
|
Value v;
|
||||||
evalString(arg, v);
|
evalString(arg, v);
|
||||||
|
|
||||||
const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> {
|
const auto [path, line] = [&] () -> std::pair<Path, uint32_t> {
|
||||||
if (v.type() == nPath || v.type() == nString) {
|
if (v.type() == nPath || v.type() == nString) {
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto filename = state->coerceToString(noPos, v, context).toOwned();
|
auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
|
||||||
state->symbols.create(filename);
|
return {path, 0};
|
||||||
return {filename, 0};
|
|
||||||
} else if (v.isLambda()) {
|
} else if (v.isLambda()) {
|
||||||
auto pos = state->positions[v.lambda.fun->pos];
|
auto pos = state->positions[v.lambda.fun->pos];
|
||||||
return {pos.file, pos.line};
|
if (auto path = std::get_if<Path>(&pos.origin))
|
||||||
|
return {*path, pos.line};
|
||||||
|
else
|
||||||
|
throw Error("'%s' cannot be shown in an editor", pos);
|
||||||
} else {
|
} else {
|
||||||
// assume it's a derivation
|
// assume it's a derivation
|
||||||
return findPackageFilename(*state, v, arg);
|
return findPackageFilename(*state, v, arg);
|
||||||
|
@ -596,7 +605,7 @@ bool NixRepl::processLine(std::string line)
|
||||||
}();
|
}();
|
||||||
|
|
||||||
// Open in EDITOR
|
// Open in EDITOR
|
||||||
auto args = editorFor(file, line);
|
auto args = editorFor(path, line);
|
||||||
auto editor = args.front();
|
auto editor = args.front();
|
||||||
args.pop_front();
|
args.pop_front();
|
||||||
|
|
||||||
|
@ -632,7 +641,12 @@ bool NixRepl::processLine(std::string line)
|
||||||
Path drvPathRaw = state->store->printStorePath(drvPath);
|
Path drvPathRaw = state->store->printStorePath(drvPath);
|
||||||
|
|
||||||
if (command == ":b" || command == ":bl") {
|
if (command == ":b" || command == ":bl") {
|
||||||
state->store->buildPaths({DerivedPath::Built{drvPath}});
|
state->store->buildPaths({
|
||||||
|
DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::All { },
|
||||||
|
},
|
||||||
|
});
|
||||||
auto drv = state->store->readDerivation(drvPath);
|
auto drv = state->store->readDerivation(drvPath);
|
||||||
logger->cout("\nThis derivation produced the following outputs:");
|
logger->cout("\nThis derivation produced the following outputs:");
|
||||||
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
|
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
|
||||||
|
@ -778,7 +792,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
|
||||||
flake::LockFlags {
|
flake::LockFlags {
|
||||||
.updateLockFile = false,
|
.updateLockFile = false,
|
||||||
.useRegistries = !evalSettings.pureEval,
|
.useRegistries = !evalSettings.pureEval,
|
||||||
.allowMutable = !evalSettings.pureEval,
|
.allowUnlocked = !evalSettings.pureEval,
|
||||||
}),
|
}),
|
||||||
v);
|
v);
|
||||||
addAttrsToScope(v);
|
addAttrsToScope(v);
|
||||||
|
@ -825,7 +839,7 @@ void NixRepl::loadFiles()
|
||||||
|
|
||||||
void NixRepl::addAttrsToScope(Value & attrs)
|
void NixRepl::addAttrsToScope(Value & attrs)
|
||||||
{
|
{
|
||||||
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); });
|
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); }, "while evaluating an attribute set to be merged in the global scope");
|
||||||
if (displ + attrs.attrs->size() >= envSize)
|
if (displ + attrs.attrs->size() >= envSize)
|
||||||
throw Error("environment full; cannot add more variables");
|
throw Error("environment full; cannot add more variables");
|
||||||
|
|
||||||
|
@ -930,7 +944,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||||
Bindings::iterator i = v.attrs->find(state->sDrvPath);
|
Bindings::iterator i = v.attrs->find(state->sDrvPath);
|
||||||
PathSet context;
|
PathSet context;
|
||||||
if (i != v.attrs->end())
|
if (i != v.attrs->end())
|
||||||
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context));
|
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
|
||||||
else
|
else
|
||||||
str << "???";
|
str << "???";
|
||||||
str << "»";
|
str << "»";
|
||||||
|
@ -1046,7 +1060,7 @@ struct CmdRepl : InstallablesCommand
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void prepare()
|
void prepare() override
|
||||||
{
|
{
|
||||||
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
|
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
|
||||||
warn("future versions of Nix will require using `--file` to load a file");
|
warn("future versions of Nix will require using `--file` to load a file");
|
||||||
|
|
|
@ -118,7 +118,7 @@ std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value &
|
||||||
|
|
||||||
// FIXME: is it possible to extract the Pos object instead of doing this
|
// FIXME: is it possible to extract the Pos object instead of doing this
|
||||||
// toString + parsing?
|
// toString + parsing?
|
||||||
auto pos = state.forceString(*v2);
|
auto pos = state.forceString(*v2, noPos, "while evaluating the 'meta.position' attribute of a derivation");
|
||||||
|
|
||||||
auto colon = pos.rfind(':');
|
auto colon = pos.rfind(':');
|
||||||
if (colon == std::string::npos)
|
if (colon == std::string::npos)
|
||||||
|
|
|
@ -300,7 +300,7 @@ struct AttrDb
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
if (!queryAttribute.isNull(3))
|
if (!queryAttribute.isNull(3))
|
||||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||||
context.push_back(decodeContext(cfg, s));
|
context.push_back(NixStringContextElem::parse(cfg, s));
|
||||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||||
}
|
}
|
||||||
case AttrType::Bool:
|
case AttrType::Bool:
|
||||||
|
@ -385,7 +385,7 @@ Value & AttrCursor::getValue()
|
||||||
if (!_value) {
|
if (!_value) {
|
||||||
if (parent) {
|
if (parent) {
|
||||||
auto & vParent = parent->first->getValue();
|
auto & vParent = parent->first->getValue();
|
||||||
root->state.forceAttrs(vParent, noPos);
|
root->state.forceAttrs(vParent, noPos, "while searching for an attribute");
|
||||||
auto attr = vParent.attrs->get(parent->second);
|
auto attr = vParent.attrs->get(parent->second);
|
||||||
if (!attr)
|
if (!attr)
|
||||||
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||||
|
@ -571,14 +571,14 @@ std::string AttrCursor::getString()
|
||||||
debug("using cached string attribute '%s'", getAttrPathStr());
|
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||||
return s->first;
|
return s->first;
|
||||||
} else
|
} else
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
|
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nString && v.type() != nPath)
|
if (v.type() != nString && v.type() != nPath)
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
|
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
|
|
||||||
return v.type() == nString ? v.string.s : v.path;
|
return v.type() == nString ? v.string.s : v.path;
|
||||||
}
|
}
|
||||||
|
@ -592,7 +592,18 @@ string_t AttrCursor::getStringWithContext()
|
||||||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
bool valid = true;
|
bool valid = true;
|
||||||
for (auto & c : s->second) {
|
for (auto & c : s->second) {
|
||||||
if (!root->state.store->isValidPath(c.first)) {
|
const StorePath & path = std::visit(overloaded {
|
||||||
|
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & {
|
||||||
|
return d.drvPath;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||||
|
return b.drvPath;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
|
||||||
|
return o.path;
|
||||||
|
},
|
||||||
|
}, c.raw());
|
||||||
|
if (!root->state.store->isValidPath(path)) {
|
||||||
valid = false;
|
valid = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -602,7 +613,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
return *s;
|
return *s;
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
|
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -613,7 +624,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
else if (v.type() == nPath)
|
else if (v.type() == nPath)
|
||||||
return {v.path, {}};
|
return {v.path, {}};
|
||||||
else
|
else
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
|
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AttrCursor::getBool()
|
bool AttrCursor::getBool()
|
||||||
|
@ -626,14 +637,14 @@ bool AttrCursor::getBool()
|
||||||
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||||
return *b;
|
return *b;
|
||||||
} else
|
} else
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
|
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nBool)
|
if (v.type() != nBool)
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
|
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
|
|
||||||
return v.boolean;
|
return v.boolean;
|
||||||
}
|
}
|
||||||
|
@ -645,17 +656,17 @@ NixInt AttrCursor::getInt()
|
||||||
cachedValue = root->db->getAttr(getKey());
|
cachedValue = root->db->getAttr(getKey());
|
||||||
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
|
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
|
||||||
debug("using cached Integer attribute '%s'", getAttrPathStr());
|
debug("using cached integer attribute '%s'", getAttrPathStr());
|
||||||
return i->x;
|
return i->x;
|
||||||
} else
|
} else
|
||||||
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nInt)
|
if (v.type() != nInt)
|
||||||
throw TypeError("'%s' is not an Integer", getAttrPathStr());
|
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||||
|
|
||||||
return v.integer;
|
return v.integer;
|
||||||
}
|
}
|
||||||
|
@ -685,7 +696,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||||
std::vector<std::string> res;
|
std::vector<std::string> res;
|
||||||
|
|
||||||
for (auto & elem : v.listItems())
|
for (auto & elem : v.listItems())
|
||||||
res.push_back(std::string(root->state.forceStringNoCtx(*elem)));
|
res.push_back(std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching")));
|
||||||
|
|
||||||
if (root->db)
|
if (root->db)
|
||||||
cachedValue = {root->db->setListOfStrings(getKey(), res), res};
|
cachedValue = {root->db->setListOfStrings(getKey(), res), res};
|
||||||
|
@ -703,14 +714,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
||||||
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||||
return *attrs;
|
return *attrs;
|
||||||
} else
|
} else
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
|
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nAttrs)
|
if (v.type() != nAttrs)
|
||||||
root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
|
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||||
|
|
||||||
std::vector<Symbol> attrs;
|
std::vector<Symbol> attrs;
|
||||||
for (auto & attr : *getValue().attrs)
|
for (auto & attr : *getValue().attrs)
|
||||||
|
|
|
@ -103,33 +103,36 @@ void EvalState::forceValue(Value & v, Callable getPos)
|
||||||
else if (v.isApp())
|
else if (v.isApp())
|
||||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
callFunction(*v.app.left, *v.app.right, v, noPos);
|
||||||
else if (v.isBlackhole())
|
else if (v.isBlackhole())
|
||||||
throwEvalError(getPos(), "infinite recursion encountered");
|
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceAttrs(Value & v, const PosIdx pos)
|
inline void EvalState::forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceAttrs(v, [&]() { return pos; });
|
forceAttrs(v, [&]() { return pos; }, errorCtx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
template <typename Callable>
|
template <typename Callable>
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceAttrs(Value & v, Callable getPos)
|
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, getPos);
|
forceValue(v, noPos);
|
||||||
if (v.type() != nAttrs)
|
if (v.type() != nAttrs) {
|
||||||
throwTypeError(getPos(), "value is %1% while a set was expected", v);
|
PosIdx pos = getPos();
|
||||||
|
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
[[gnu::always_inline]]
|
[[gnu::always_inline]]
|
||||||
inline void EvalState::forceList(Value & v, const PosIdx pos)
|
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceValue(v, noPos);
|
||||||
if (!v.isList())
|
if (!v.isList()) {
|
||||||
throwTypeError(pos, "value is %1% while a list was expected", v);
|
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -60,7 +60,6 @@ void copyContext(const Value & v, PathSet & context);
|
||||||
typedef std::map<Path, StorePath> SrcToStore;
|
typedef std::map<Path, StorePath> SrcToStore;
|
||||||
|
|
||||||
|
|
||||||
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
|
|
||||||
std::string printValue(const EvalState & state, const Value & v);
|
std::string printValue(const EvalState & state, const Value & v);
|
||||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||||
|
|
||||||
|
@ -78,7 +77,7 @@ struct RegexCache;
|
||||||
std::shared_ptr<RegexCache> makeRegexCache();
|
std::shared_ptr<RegexCache> makeRegexCache();
|
||||||
|
|
||||||
struct DebugTrace {
|
struct DebugTrace {
|
||||||
std::optional<ErrPos> pos;
|
std::shared_ptr<AbstractPos> pos;
|
||||||
const Expr & expr;
|
const Expr & expr;
|
||||||
const Env & env;
|
const Env & env;
|
||||||
hintformat hint;
|
hintformat hint;
|
||||||
|
@ -87,6 +86,43 @@ struct DebugTrace {
|
||||||
|
|
||||||
void debugError(Error * e, Env & env, Expr & expr);
|
void debugError(Error * e, Env & env, Expr & expr);
|
||||||
|
|
||||||
|
class ErrorBuilder
|
||||||
|
{
|
||||||
|
private:
|
||||||
|
EvalState & state;
|
||||||
|
ErrorInfo info;
|
||||||
|
|
||||||
|
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
|
||||||
|
|
||||||
|
public:
|
||||||
|
template<typename... Args>
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
static ErrorBuilder * create(EvalState & s, const Args & ... args)
|
||||||
|
{
|
||||||
|
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
|
||||||
|
}
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & atPos(PosIdx pos);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & withSuggestions(Suggestions & s);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
|
||||||
|
|
||||||
|
template<class ErrorType>
|
||||||
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
|
void debugThrow();
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
class EvalState : public std::enable_shared_from_this<EvalState>
|
class EvalState : public std::enable_shared_from_this<EvalState>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
@ -146,29 +182,38 @@ public:
|
||||||
|
|
||||||
template<class E>
|
template<class E>
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void debugThrow(E && error, const Env & env, const Expr & expr)
|
void debugThrowLastTrace(E && error)
|
||||||
{
|
{
|
||||||
if (debugRepl)
|
debugThrow(error, nullptr, nullptr);
|
||||||
runDebugRepl(&error, env, expr);
|
|
||||||
|
|
||||||
throw std::move(error);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class E>
|
template<class E>
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
[[gnu::noinline, gnu::noreturn]]
|
||||||
void debugThrowLastTrace(E && e)
|
void debugThrow(E && error, const Env * env, const Expr * expr)
|
||||||
{
|
{
|
||||||
// Call this in the situation where Expr and Env are inaccessible.
|
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
|
||||||
// The debugger will start in the last context that's in the
|
if (!env || !expr) {
|
||||||
// DebugTrace stack.
|
const DebugTrace & last = debugTraces.front();
|
||||||
if (debugRepl && !debugTraces.empty()) {
|
env = &last.env;
|
||||||
const DebugTrace & last = debugTraces.front();
|
expr = &last.expr;
|
||||||
runDebugRepl(&e, last.env, last.expr);
|
}
|
||||||
|
runDebugRepl(&error, *env, *expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw std::move(e);
|
throw std::move(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is dangerous, but gets in line with the idea that error creation and
|
||||||
|
// throwing should not allocate on the stack of hot functions.
|
||||||
|
// as long as errors are immediately thrown, it works.
|
||||||
|
ErrorBuilder * errorBuilder;
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
[[nodiscard, gnu::noinline]]
|
||||||
|
ErrorBuilder & error(const Args & ... args) {
|
||||||
|
errorBuilder = ErrorBuilder::create(*this, args...);
|
||||||
|
return *errorBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
SrcToStore srcToStore;
|
SrcToStore srcToStore;
|
||||||
|
@ -283,8 +328,8 @@ public:
|
||||||
/* Evaluation the expression, then verify that it has the expected
|
/* Evaluation the expression, then verify that it has the expected
|
||||||
type. */
|
type. */
|
||||||
inline bool evalBool(Env & env, Expr * e);
|
inline bool evalBool(Env & env, Expr * e);
|
||||||
inline bool evalBool(Env & env, Expr * e, const PosIdx pos);
|
inline bool evalBool(Env & env, Expr * e, const PosIdx pos, std::string_view errorCtx);
|
||||||
inline void evalAttrs(Env & env, Expr * e, Value & v);
|
inline void evalAttrs(Env & env, Expr * e, Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
/* If `v' is a thunk, enter it and overwrite `v' with the result
|
/* If `v' is a thunk, enter it and overwrite `v' with the result
|
||||||
of the evaluation of the thunk. If `v' is a delayed function
|
of the evaluation of the thunk. If `v' is a delayed function
|
||||||
|
@ -300,89 +345,25 @@ public:
|
||||||
void forceValueDeep(Value & v);
|
void forceValueDeep(Value & v);
|
||||||
|
|
||||||
/* Force `v', and then verify that it has the expected type. */
|
/* Force `v', and then verify that it has the expected type. */
|
||||||
NixInt forceInt(Value & v, const PosIdx pos);
|
NixInt forceInt(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
NixFloat forceFloat(Value & v, const PosIdx pos);
|
NixFloat forceFloat(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
bool forceBool(Value & v, const PosIdx pos);
|
bool forceBool(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
void forceAttrs(Value & v, const PosIdx pos);
|
void forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
template <typename Callable>
|
template <typename Callable>
|
||||||
inline void forceAttrs(Value & v, Callable getPos);
|
inline void forceAttrs(Value & v, Callable getPos, std::string_view errorCtx);
|
||||||
|
|
||||||
inline void forceList(Value & v, const PosIdx pos);
|
inline void forceList(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
void forceFunction(Value & v, const PosIdx pos); // either lambda or primop
|
void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx); // either lambda or primop
|
||||||
std::string_view forceString(Value & v, const PosIdx pos = noPos);
|
std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos = noPos);
|
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx);
|
||||||
std::string_view forceStringNoCtx(Value & v, const PosIdx pos = noPos);
|
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const char * s, const std::string & s2);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const char * s, const std::string & s2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const char * s, const std::string & s2, const std::string & s3,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const char * s, const std::string & s2, const std::string & s3);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string & s2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const char * s, const Value & v);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const char * s, const Value & v,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const char * s);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const char * s,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwTypeError(const char * s, const Value & v,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1,
|
|
||||||
Env & env, Expr & expr);
|
|
||||||
|
|
||||||
[[gnu::noinline]]
|
[[gnu::noinline]]
|
||||||
void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
|
void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
|
||||||
[[gnu::noinline]]
|
[[gnu::noinline]]
|
||||||
void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
|
void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame = false) const;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
/* Return true iff the value `v' denotes a derivation (i.e. a
|
/* Return true iff the value `v' denotes a derivation (i.e. a
|
||||||
|
@ -397,18 +378,19 @@ public:
|
||||||
booleans and lists to a string. If `copyToStore' is set,
|
booleans and lists to a string. If `copyToStore' is set,
|
||||||
referenced paths are copied to the Nix store as a side effect. */
|
referenced paths are copied to the Nix store as a side effect. */
|
||||||
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||||
|
std::string_view errorCtx,
|
||||||
bool coerceMore = false, bool copyToStore = true,
|
bool coerceMore = false, bool copyToStore = true,
|
||||||
bool canonicalizePath = true);
|
bool canonicalizePath = true);
|
||||||
|
|
||||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
StorePath copyPathToStore(PathSet & context, const Path & path);
|
||||||
|
|
||||||
/* Path coercion. Converts strings, paths and derivations to a
|
/* Path coercion. Converts strings, paths and derivations to a
|
||||||
path. The result is guaranteed to be a canonicalised, absolute
|
path. The result is guaranteed to be a canonicalised, absolute
|
||||||
path. Nothing is copied to the store. */
|
path. Nothing is copied to the store. */
|
||||||
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context);
|
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
|
||||||
|
|
||||||
/* Like coerceToPath, but the result must be a store path. */
|
/* Like coerceToPath, but the result must be a store path. */
|
||||||
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context);
|
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
@ -457,14 +439,18 @@ private:
|
||||||
friend struct ExprAttrs;
|
friend struct ExprAttrs;
|
||||||
friend struct ExprLet;
|
friend struct ExprLet;
|
||||||
|
|
||||||
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
|
Expr * parse(
|
||||||
const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
char * text,
|
||||||
|
size_t length,
|
||||||
|
Pos::Origin origin,
|
||||||
|
Path basePath,
|
||||||
|
std::shared_ptr<StaticEnv> & staticEnv);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/* Do a deep equality test between two values. That is, list
|
/* Do a deep equality test between two values. That is, list
|
||||||
elements and attributes are compared recursively. */
|
elements and attributes are compared recursively. */
|
||||||
bool eqValues(Value & v1, Value & v2);
|
bool eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
bool isFunctor(Value & fun);
|
bool isFunctor(Value & fun);
|
||||||
|
|
||||||
|
@ -499,7 +485,7 @@ public:
|
||||||
void mkThunk_(Value & v, Expr * expr);
|
void mkThunk_(Value & v, Expr * expr);
|
||||||
void mkPos(Value & v, PosIdx pos);
|
void mkPos(Value & v, PosIdx pos);
|
||||||
|
|
||||||
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos);
|
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
|
||||||
|
|
||||||
/* Print statistics. */
|
/* Print statistics. */
|
||||||
void printStats();
|
void printStats();
|
||||||
|
@ -568,10 +554,6 @@ struct DebugTraceStacker {
|
||||||
std::string_view showType(ValueType type);
|
std::string_view showType(ValueType type);
|
||||||
std::string showType(const Value & v);
|
std::string showType(const Value & v);
|
||||||
|
|
||||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
|
||||||
name>. */
|
|
||||||
NixStringContextElem decodeContext(const Store & store, std::string_view s);
|
|
||||||
|
|
||||||
/* If `path' refers to a directory, then append "/default.nix". */
|
/* If `path' refers to a directory, then append "/default.nix". */
|
||||||
Path resolveExprPath(Path path);
|
Path resolveExprPath(Path path);
|
||||||
|
|
||||||
|
@ -590,6 +572,10 @@ struct EvalSettings : Config
|
||||||
|
|
||||||
static Strings getDefaultNixPath();
|
static Strings getDefaultNixPath();
|
||||||
|
|
||||||
|
static bool isPseudoUrl(std::string_view s);
|
||||||
|
|
||||||
|
static std::string resolvePseudoUrl(std::string_view url);
|
||||||
|
|
||||||
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
|
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
|
||||||
"Whether builtin functions that allow executing native code should be enabled."};
|
"Whether builtin functions that allow executing native code should be enabled."};
|
||||||
|
|
||||||
|
@ -662,6 +648,13 @@ extern EvalSettings evalSettings;
|
||||||
|
|
||||||
static const std::string corepkgsPrefix{"/__corepkgs__/"};
|
static const std::string corepkgsPrefix{"/__corepkgs__/"};
|
||||||
|
|
||||||
|
template<class ErrorType>
|
||||||
|
void ErrorBuilder::debugThrow()
|
||||||
|
{
|
||||||
|
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
|
||||||
|
state.debugThrowLastTrace(ErrorType(info));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
|
|
|
@ -12,13 +12,13 @@
|
||||||
, executable ? false
|
, executable ? false
|
||||||
, unpack ? false
|
, unpack ? false
|
||||||
, name ? baseNameOf (toString url)
|
, name ? baseNameOf (toString url)
|
||||||
|
, impure ? false
|
||||||
}:
|
}:
|
||||||
|
|
||||||
derivation {
|
derivation ({
|
||||||
builder = "builtin:fetchurl";
|
builder = "builtin:fetchurl";
|
||||||
|
|
||||||
# New-style output content requirements.
|
# New-style output content requirements.
|
||||||
inherit outputHashAlgo outputHash;
|
|
||||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||||
|
|
||||||
inherit name url executable unpack;
|
inherit name url executable unpack;
|
||||||
|
@ -38,4 +38,6 @@ derivation {
|
||||||
|
|
||||||
# To make "nix-prefetch-url" work.
|
# To make "nix-prefetch-url" work.
|
||||||
urls = [ url ];
|
urls = [ url ];
|
||||||
}
|
} // (if impure
|
||||||
|
then { __impure = true; }
|
||||||
|
else { inherit outputHashAlgo outputHash; }))
|
||||||
|
|
|
@ -43,7 +43,7 @@ let
|
||||||
|
|
||||||
outputs = flake.outputs (inputs // { self = result; });
|
outputs = flake.outputs (inputs // { self = result; });
|
||||||
|
|
||||||
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
|
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; };
|
||||||
in
|
in
|
||||||
if node.flake or true then
|
if node.flake or true then
|
||||||
assert builtins.isFunction flake.outputs;
|
assert builtins.isFunction flake.outputs;
|
||||||
|
|
|
@ -56,7 +56,7 @@ void ConfigFile::apply()
|
||||||
auto tlname = get(trustedList, name);
|
auto tlname = get(trustedList, name);
|
||||||
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
|
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
|
||||||
trusted = *saved;
|
trusted = *saved;
|
||||||
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
|
printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
|
||||||
} else {
|
} else {
|
||||||
// FIXME: filter ANSI escapes, newlines, \r, etc.
|
// FIXME: filter ANSI escapes, newlines, \r, etc.
|
||||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
|
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
|
||||||
|
@ -68,7 +68,7 @@ void ConfigFile::apply()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!trusted) {
|
if (!trusted) {
|
||||||
warn("ignoring untrusted flake configuration setting '%s'", name);
|
warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -143,7 +143,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(
|
e.addTrace(
|
||||||
state.positions[attr.pos],
|
state.positions[attr.pos],
|
||||||
hintfmt("in flake attribute '%s'", state.symbols[attr.name]));
|
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -152,7 +152,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
try {
|
try {
|
||||||
input.ref = FlakeRef::fromAttrs(attrs);
|
input.ref = FlakeRef::fromAttrs(attrs);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[pos], hintfmt("in flake input"));
|
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -220,7 +220,7 @@ static Flake getFlake(
|
||||||
Value vInfo;
|
Value vInfo;
|
||||||
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
||||||
|
|
||||||
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0));
|
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1));
|
||||||
|
|
||||||
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||||
expectType(state, nString, *description->value, description->pos);
|
expectType(state, nString, *description->value, description->pos);
|
||||||
|
@ -259,28 +259,28 @@ static Flake getFlake(
|
||||||
if (setting.value->type() == nString)
|
if (setting.value->type() == nString)
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
state.symbols[setting.name],
|
state.symbols[setting.name],
|
||||||
std::string(state.forceStringNoCtx(*setting.value, setting.pos)));
|
std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
|
||||||
else if (setting.value->type() == nPath) {
|
else if (setting.value->type() == nPath) {
|
||||||
PathSet emptyContext = {};
|
PathSet emptyContext = {};
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
state.symbols[setting.name],
|
state.symbols[setting.name],
|
||||||
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
|
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
|
||||||
}
|
}
|
||||||
else if (setting.value->type() == nInt)
|
else if (setting.value->type() == nInt)
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
state.symbols[setting.name],
|
state.symbols[setting.name],
|
||||||
state.forceInt(*setting.value, setting.pos));
|
state.forceInt(*setting.value, setting.pos, ""));
|
||||||
else if (setting.value->type() == nBool)
|
else if (setting.value->type() == nBool)
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
state.symbols[setting.name],
|
state.symbols[setting.name],
|
||||||
Explicit<bool> { state.forceBool(*setting.value, setting.pos) });
|
Explicit<bool> { state.forceBool(*setting.value, setting.pos, "") });
|
||||||
else if (setting.value->type() == nList) {
|
else if (setting.value->type() == nList) {
|
||||||
std::vector<std::string> ss;
|
std::vector<std::string> ss;
|
||||||
for (auto elem : setting.value->listItems()) {
|
for (auto elem : setting.value->listItems()) {
|
||||||
if (elem->type() != nString)
|
if (elem->type() != nString)
|
||||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||||
state.symbols[setting.name], showType(*setting.value));
|
state.symbols[setting.name], showType(*setting.value));
|
||||||
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos));
|
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
||||||
}
|
}
|
||||||
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
||||||
}
|
}
|
||||||
|
@ -353,7 +353,7 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
std::function<void(
|
std::function<void(
|
||||||
const FlakeInputs & flakeInputs,
|
const FlakeInputs & flakeInputs,
|
||||||
std::shared_ptr<Node> node,
|
ref<Node> node,
|
||||||
const InputPath & inputPathPrefix,
|
const InputPath & inputPathPrefix,
|
||||||
std::shared_ptr<const Node> oldNode,
|
std::shared_ptr<const Node> oldNode,
|
||||||
const InputPath & lockRootPath,
|
const InputPath & lockRootPath,
|
||||||
|
@ -362,9 +362,15 @@ LockedFlake lockFlake(
|
||||||
computeLocks;
|
computeLocks;
|
||||||
|
|
||||||
computeLocks = [&](
|
computeLocks = [&](
|
||||||
|
/* The inputs of this node, either from flake.nix or
|
||||||
|
flake.lock. */
|
||||||
const FlakeInputs & flakeInputs,
|
const FlakeInputs & flakeInputs,
|
||||||
std::shared_ptr<Node> node,
|
/* The node whose locks are to be updated.*/
|
||||||
|
ref<Node> node,
|
||||||
|
/* The path to this node in the lock file graph. */
|
||||||
const InputPath & inputPathPrefix,
|
const InputPath & inputPathPrefix,
|
||||||
|
/* The old node, if any, from which locks can be
|
||||||
|
copied. */
|
||||||
std::shared_ptr<const Node> oldNode,
|
std::shared_ptr<const Node> oldNode,
|
||||||
const InputPath & lockRootPath,
|
const InputPath & lockRootPath,
|
||||||
const Path & parentPath,
|
const Path & parentPath,
|
||||||
|
@ -452,7 +458,7 @@ LockedFlake lockFlake(
|
||||||
/* Copy the input from the old lock since its flakeref
|
/* Copy the input from the old lock since its flakeref
|
||||||
didn't change and there is no override from a
|
didn't change and there is no override from a
|
||||||
higher level flake. */
|
higher level flake. */
|
||||||
auto childNode = std::make_shared<LockedNode>(
|
auto childNode = make_ref<LockedNode>(
|
||||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
||||||
|
|
||||||
node->inputs.insert_or_assign(id, childNode);
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
@ -481,14 +487,14 @@ LockedFlake lockFlake(
|
||||||
.isFlake = (*lockedNode)->isFlake,
|
.isFlake = (*lockedNode)->isFlake,
|
||||||
});
|
});
|
||||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||||
if (! trustLock) {
|
if (!trustLock) {
|
||||||
// It is possible that the flake has changed,
|
// It is possible that the flake has changed,
|
||||||
// so we must confirm all the follows that are in the lockfile are also in the flake.
|
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||||
auto overridePath(inputPath);
|
auto overridePath(inputPath);
|
||||||
overridePath.push_back(i.first);
|
overridePath.push_back(i.first);
|
||||||
auto o = overrides.find(overridePath);
|
auto o = overrides.find(overridePath);
|
||||||
// If the override disappeared, we have to refetch the flake,
|
// If the override disappeared, we have to refetch the flake,
|
||||||
// since some of the inputs may not be present in the lockfile.
|
// since some of the inputs may not be present in the lock file.
|
||||||
if (o == overrides.end()) {
|
if (o == overrides.end()) {
|
||||||
mustRefetch = true;
|
mustRefetch = true;
|
||||||
// There's no point populating the rest of the fake inputs,
|
// There's no point populating the rest of the fake inputs,
|
||||||
|
@ -521,8 +527,8 @@ LockedFlake lockFlake(
|
||||||
this input. */
|
this input. */
|
||||||
debug("creating new input '%s'", inputPathS);
|
debug("creating new input '%s'", inputPathS);
|
||||||
|
|
||||||
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
if (!lockFlags.allowUnlocked && !input.ref->input.isLocked())
|
||||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
|
||||||
|
|
||||||
/* Note: in case of an --override-input, we use
|
/* Note: in case of an --override-input, we use
|
||||||
the *original* ref (input2.ref) for the
|
the *original* ref (input2.ref) for the
|
||||||
|
@ -544,7 +550,7 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||||
|
|
||||||
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
|
auto childNode = make_ref<LockedNode>(inputFlake.lockedRef, ref);
|
||||||
|
|
||||||
node->inputs.insert_or_assign(id, childNode);
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
|
||||||
|
@ -564,15 +570,19 @@ LockedFlake lockFlake(
|
||||||
oldLock
|
oldLock
|
||||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||||
: LockFile::read(
|
: LockFile::read(
|
||||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
|
||||||
oldLock ? lockRootPath : inputPath, localPath, false);
|
oldLock ? lockRootPath : inputPath,
|
||||||
|
localPath,
|
||||||
|
false);
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||||
state, *input.ref, useRegistries, flakeCache);
|
state, *input.ref, useRegistries, flakeCache);
|
||||||
node->inputs.insert_or_assign(id,
|
|
||||||
std::make_shared<LockedNode>(lockedRef, ref, false));
|
auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
|
||||||
|
|
||||||
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -587,8 +597,13 @@ LockedFlake lockFlake(
|
||||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||||
|
|
||||||
computeLocks(
|
computeLocks(
|
||||||
flake.inputs, newLockFile.root, {},
|
flake.inputs,
|
||||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
|
newLockFile.root,
|
||||||
|
{},
|
||||||
|
lockFlags.recreateLockFile ? nullptr : oldLockFile.root.get_ptr(),
|
||||||
|
{},
|
||||||
|
parentPath,
|
||||||
|
false);
|
||||||
|
|
||||||
for (auto & i : lockFlags.inputOverrides)
|
for (auto & i : lockFlags.inputOverrides)
|
||||||
if (!overridesUsed.count(i.first))
|
if (!overridesUsed.count(i.first))
|
||||||
|
@ -611,9 +626,9 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
if (lockFlags.writeLockFile) {
|
if (lockFlags.writeLockFile) {
|
||||||
if (auto sourcePath = topRef.input.getSourcePath()) {
|
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||||
if (!newLockFile.isImmutable()) {
|
if (auto unlockedInput = newLockFile.isUnlocked()) {
|
||||||
if (fetchSettings.warnDirty)
|
if (fetchSettings.warnDirty)
|
||||||
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
|
||||||
} else {
|
} else {
|
||||||
if (!lockFlags.updateLockFile)
|
if (!lockFlags.updateLockFile)
|
||||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||||
|
@ -726,7 +741,7 @@ void callFlake(EvalState & state,
|
||||||
|
|
||||||
static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos));
|
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.getFlake"));
|
||||||
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
||||||
if (evalSettings.pureEval && !flakeRef.input.isLocked())
|
if (evalSettings.pureEval && !flakeRef.input.isLocked())
|
||||||
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]);
|
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]);
|
||||||
|
@ -737,7 +752,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
.updateLockFile = false,
|
.updateLockFile = false,
|
||||||
.writeLockFile = false,
|
.writeLockFile = false,
|
||||||
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
||||||
.allowMutable = !evalSettings.pureEval,
|
.allowUnlocked = !evalSettings.pureEval,
|
||||||
}),
|
}),
|
||||||
v);
|
v);
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,11 +108,11 @@ struct LockFlags
|
||||||
|
|
||||||
bool applyNixConfig = false;
|
bool applyNixConfig = false;
|
||||||
|
|
||||||
/* Whether mutable flake references (i.e. those without a Git
|
/* Whether unlocked flake references (i.e. those without a Git
|
||||||
revision or similar) without a corresponding lock are
|
revision or similar) without a corresponding lock are
|
||||||
allowed. Mutable flake references with a lock are always
|
allowed. Unlocked flake references with a lock are always
|
||||||
allowed. */
|
allowed. */
|
||||||
bool allowMutable = true;
|
bool allowUnlocked = true;
|
||||||
|
|
||||||
/* Whether to commit changes to flake.lock. */
|
/* Whether to commit changes to flake.lock. */
|
||||||
bool commitLockFile = false;
|
bool commitLockFile = false;
|
||||||
|
|
|
@ -238,15 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::optional<Path> & baseDir,
|
const std::optional<Path> & baseDir,
|
||||||
bool allowMissing,
|
bool allowMissing,
|
||||||
bool isFlake)
|
bool isFlake)
|
||||||
{
|
{
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec(url);
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
|
||||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
|
||||||
return {std::move(flakeRef), fragment, outputsSpec};
|
return {std::move(flakeRef), fragment, extendedOutputsSpec};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "path-with-outputs.hh"
|
#include "outputs-spec.hh"
|
||||||
|
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ typedef std::string FlakeId;
|
||||||
|
|
||||||
struct FlakeRef
|
struct FlakeRef
|
||||||
{
|
{
|
||||||
/* fetcher-specific representation of the input, sufficient to
|
/* Fetcher-specific representation of the input, sufficient to
|
||||||
perform the fetch operation. */
|
perform the fetch operation. */
|
||||||
fetchers::Input input;
|
fetchers::Input input;
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::optional<Path> & baseDir = {},
|
const std::optional<Path> & baseDir = {},
|
||||||
bool allowMissing = false,
|
bool allowMissing = false,
|
||||||
|
|
|
@ -31,12 +31,12 @@ FlakeRef getFlakeRef(
|
||||||
}
|
}
|
||||||
|
|
||||||
LockedNode::LockedNode(const nlohmann::json & json)
|
LockedNode::LockedNode(const nlohmann::json & json)
|
||||||
: lockedRef(getFlakeRef(json, "locked", "info"))
|
: lockedRef(getFlakeRef(json, "locked", "info")) // FIXME: remove "info"
|
||||||
, originalRef(getFlakeRef(json, "original", nullptr))
|
, originalRef(getFlakeRef(json, "original", nullptr))
|
||||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||||
{
|
{
|
||||||
if (!lockedRef.input.isLocked())
|
if (!lockedRef.input.isLocked())
|
||||||
throw Error("lockfile contains mutable lock '%s'",
|
throw Error("lock file contains mutable lock '%s'",
|
||||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,15 +49,15 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||||
{
|
{
|
||||||
auto pos = root;
|
auto pos = root;
|
||||||
|
|
||||||
if (!pos) return {};
|
|
||||||
|
|
||||||
for (auto & elem : path) {
|
for (auto & elem : path) {
|
||||||
if (auto i = get(pos->inputs, elem)) {
|
if (auto i = get(pos->inputs, elem)) {
|
||||||
if (auto node = std::get_if<0>(&*i))
|
if (auto node = std::get_if<0>(&*i))
|
||||||
pos = *node;
|
pos = *node;
|
||||||
else if (auto follows = std::get_if<1>(&*i)) {
|
else if (auto follows = std::get_if<1>(&*i)) {
|
||||||
pos = findInput(*follows);
|
if (auto p = findInput(*follows))
|
||||||
if (!pos) return {};
|
pos = ref(p);
|
||||||
|
else
|
||||||
|
return {};
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
return {};
|
return {};
|
||||||
|
@ -72,7 +72,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||||
if (version < 5 || version > 7)
|
if (version < 5 || version > 7)
|
||||||
throw Error("lock file '%s' has unsupported version %d", path, version);
|
throw Error("lock file '%s' has unsupported version %d", path, version);
|
||||||
|
|
||||||
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
|
std::map<std::string, ref<Node>> nodeMap;
|
||||||
|
|
||||||
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
||||||
|
|
||||||
|
@ -93,12 +93,12 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||||
auto jsonNode2 = nodes.find(inputKey);
|
auto jsonNode2 = nodes.find(inputKey);
|
||||||
if (jsonNode2 == nodes.end())
|
if (jsonNode2 == nodes.end())
|
||||||
throw Error("lock file references missing node '%s'", inputKey);
|
throw Error("lock file references missing node '%s'", inputKey);
|
||||||
auto input = std::make_shared<LockedNode>(*jsonNode2);
|
auto input = make_ref<LockedNode>(*jsonNode2);
|
||||||
k = nodeMap.insert_or_assign(inputKey, input).first;
|
k = nodeMap.insert_or_assign(inputKey, input).first;
|
||||||
getInputs(*input, *jsonNode2);
|
getInputs(*input, *jsonNode2);
|
||||||
}
|
}
|
||||||
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
|
if (auto child = k->second.dynamic_pointer_cast<LockedNode>())
|
||||||
node.inputs.insert_or_assign(i.key(), child);
|
node.inputs.insert_or_assign(i.key(), ref(child));
|
||||||
else
|
else
|
||||||
// FIXME: replace by follows node
|
// FIXME: replace by follows node
|
||||||
throw Error("lock file contains cycle to root node");
|
throw Error("lock file contains cycle to root node");
|
||||||
|
@ -122,9 +122,9 @@ nlohmann::json LockFile::toJSON() const
|
||||||
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
|
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
|
||||||
std::unordered_set<std::string> keys;
|
std::unordered_set<std::string> keys;
|
||||||
|
|
||||||
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
|
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
|
||||||
|
|
||||||
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
|
dumpNode = [&](std::string key, ref<const Node> node) -> std::string
|
||||||
{
|
{
|
||||||
auto k = nodeKeys.find(node);
|
auto k = nodeKeys.find(node);
|
||||||
if (k != nodeKeys.end())
|
if (k != nodeKeys.end())
|
||||||
|
@ -159,10 +159,11 @@ nlohmann::json LockFile::toJSON() const
|
||||||
n["inputs"] = std::move(inputs);
|
n["inputs"] = std::move(inputs);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
|
if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
|
||||||
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
|
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
|
||||||
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
|
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
|
||||||
if (!lockedNode->isFlake) n["flake"] = false;
|
if (!lockedNode->isFlake)
|
||||||
|
n["flake"] = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
nodes[key] = std::move(n);
|
nodes[key] = std::move(n);
|
||||||
|
@ -201,13 +202,13 @@ void LockFile::write(const Path & path) const
|
||||||
writeFile(path, fmt("%s\n", *this));
|
writeFile(path, fmt("%s\n", *this));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool LockFile::isImmutable() const
|
std::optional<FlakeRef> LockFile::isUnlocked() const
|
||||||
{
|
{
|
||||||
std::unordered_set<std::shared_ptr<const Node>> nodes;
|
std::set<ref<const Node>> nodes;
|
||||||
|
|
||||||
std::function<void(std::shared_ptr<const Node> node)> visit;
|
std::function<void(ref<const Node> node)> visit;
|
||||||
|
|
||||||
visit = [&](std::shared_ptr<const Node> node)
|
visit = [&](ref<const Node> node)
|
||||||
{
|
{
|
||||||
if (!nodes.insert(node).second) return;
|
if (!nodes.insert(node).second) return;
|
||||||
for (auto & i : node->inputs)
|
for (auto & i : node->inputs)
|
||||||
|
@ -219,11 +220,12 @@ bool LockFile::isImmutable() const
|
||||||
|
|
||||||
for (auto & i : nodes) {
|
for (auto & i : nodes) {
|
||||||
if (i == root) continue;
|
if (i == root) continue;
|
||||||
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
|
auto node = i.dynamic_pointer_cast<const LockedNode>();
|
||||||
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
|
if (node && !node->lockedRef.input.isLocked())
|
||||||
|
return node->lockedRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
bool LockFile::operator ==(const LockFile & other) const
|
bool LockFile::operator ==(const LockFile & other) const
|
||||||
|
@ -247,12 +249,12 @@ InputPath parseInputPath(std::string_view s)
|
||||||
|
|
||||||
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
||||||
{
|
{
|
||||||
std::unordered_set<std::shared_ptr<Node>> done;
|
std::set<ref<Node>> done;
|
||||||
std::map<InputPath, Node::Edge> res;
|
std::map<InputPath, Node::Edge> res;
|
||||||
|
|
||||||
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
|
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
|
||||||
|
|
||||||
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
|
recurse = [&](const InputPath & prefix, ref<Node> node)
|
||||||
{
|
{
|
||||||
if (!done.insert(node).second) return;
|
if (!done.insert(node).second) return;
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ struct LockedNode;
|
||||||
type LockedNode. */
|
type LockedNode. */
|
||||||
struct Node : std::enable_shared_from_this<Node>
|
struct Node : std::enable_shared_from_this<Node>
|
||||||
{
|
{
|
||||||
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
|
typedef std::variant<ref<LockedNode>, InputPath> Edge;
|
||||||
|
|
||||||
std::map<FlakeId, Edge> inputs;
|
std::map<FlakeId, Edge> inputs;
|
||||||
|
|
||||||
|
@ -47,11 +47,13 @@ struct LockedNode : Node
|
||||||
|
|
||||||
struct LockFile
|
struct LockFile
|
||||||
{
|
{
|
||||||
std::shared_ptr<Node> root = std::make_shared<Node>();
|
ref<Node> root = make_ref<Node>();
|
||||||
|
|
||||||
LockFile() {};
|
LockFile() {};
|
||||||
LockFile(const nlohmann::json & json, const Path & path);
|
LockFile(const nlohmann::json & json, const Path & path);
|
||||||
|
|
||||||
|
typedef std::map<ref<const Node>, std::string> KeyMap;
|
||||||
|
|
||||||
nlohmann::json toJSON() const;
|
nlohmann::json toJSON() const;
|
||||||
|
|
||||||
std::string to_string() const;
|
std::string to_string() const;
|
||||||
|
@ -60,7 +62,8 @@ struct LockFile
|
||||||
|
|
||||||
void write(const Path & path) const;
|
void write(const Path & path) const;
|
||||||
|
|
||||||
bool isImmutable() const;
|
/* Check whether this lock file has any unlocked inputs. */
|
||||||
|
std::optional<FlakeRef> isUnlocked() const;
|
||||||
|
|
||||||
bool operator ==(const LockFile & other) const;
|
bool operator ==(const LockFile & other) const;
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ std::string DrvInfo::queryName() const
|
||||||
if (name == "" && attrs) {
|
if (name == "" && attrs) {
|
||||||
auto i = attrs->find(state->sName);
|
auto i = attrs->find(state->sName);
|
||||||
if (i == attrs->end()) throw TypeError("derivation name missing");
|
if (i == attrs->end()) throw TypeError("derivation name missing");
|
||||||
name = state->forceStringNoCtx(*i->value);
|
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
||||||
}
|
}
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ std::string DrvInfo::querySystem() const
|
||||||
{
|
{
|
||||||
if (system == "" && attrs) {
|
if (system == "" && attrs) {
|
||||||
auto i = attrs->find(state->sSystem);
|
auto i = attrs->find(state->sSystem);
|
||||||
system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos);
|
system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation");
|
||||||
}
|
}
|
||||||
return system;
|
return system;
|
||||||
}
|
}
|
||||||
|
@ -75,7 +75,7 @@ std::optional<StorePath> DrvInfo::queryDrvPath() const
|
||||||
if (i == attrs->end())
|
if (i == attrs->end())
|
||||||
drvPath = {std::nullopt};
|
drvPath = {std::nullopt};
|
||||||
else
|
else
|
||||||
drvPath = {state->coerceToStorePath(i->pos, *i->value, context)};
|
drvPath = {state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation")};
|
||||||
}
|
}
|
||||||
return drvPath.value_or(std::nullopt);
|
return drvPath.value_or(std::nullopt);
|
||||||
}
|
}
|
||||||
|
@ -95,7 +95,7 @@ StorePath DrvInfo::queryOutPath() const
|
||||||
Bindings::iterator i = attrs->find(state->sOutPath);
|
Bindings::iterator i = attrs->find(state->sOutPath);
|
||||||
PathSet context;
|
PathSet context;
|
||||||
if (i != attrs->end())
|
if (i != attrs->end())
|
||||||
outPath = state->coerceToStorePath(i->pos, *i->value, context);
|
outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation");
|
||||||
}
|
}
|
||||||
if (!outPath)
|
if (!outPath)
|
||||||
throw UnimplementedError("CA derivations are not yet supported");
|
throw UnimplementedError("CA derivations are not yet supported");
|
||||||
|
@ -109,23 +109,23 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
||||||
/* Get the ‘outputs’ list. */
|
/* Get the ‘outputs’ list. */
|
||||||
Bindings::iterator i;
|
Bindings::iterator i;
|
||||||
if (attrs && (i = attrs->find(state->sOutputs)) != attrs->end()) {
|
if (attrs && (i = attrs->find(state->sOutputs)) != attrs->end()) {
|
||||||
state->forceList(*i->value, i->pos);
|
state->forceList(*i->value, i->pos, "while evaluating the 'outputs' attribute of a derivation");
|
||||||
|
|
||||||
/* For each output... */
|
/* For each output... */
|
||||||
for (auto elem : i->value->listItems()) {
|
for (auto elem : i->value->listItems()) {
|
||||||
std::string output(state->forceStringNoCtx(*elem, i->pos));
|
std::string output(state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation"));
|
||||||
|
|
||||||
if (withPaths) {
|
if (withPaths) {
|
||||||
/* Evaluate the corresponding set. */
|
/* Evaluate the corresponding set. */
|
||||||
Bindings::iterator out = attrs->find(state->symbols.create(output));
|
Bindings::iterator out = attrs->find(state->symbols.create(output));
|
||||||
if (out == attrs->end()) continue; // FIXME: throw error?
|
if (out == attrs->end()) continue; // FIXME: throw error?
|
||||||
state->forceAttrs(*out->value, i->pos);
|
state->forceAttrs(*out->value, i->pos, "while evaluating an output of a derivation");
|
||||||
|
|
||||||
/* And evaluate its ‘outPath’ attribute. */
|
/* And evaluate its ‘outPath’ attribute. */
|
||||||
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
||||||
if (outPath == out->value->attrs->end()) continue; // FIXME: throw error?
|
if (outPath == out->value->attrs->end()) continue; // FIXME: throw error?
|
||||||
PathSet context;
|
PathSet context;
|
||||||
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context));
|
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation"));
|
||||||
} else
|
} else
|
||||||
outputs.emplace(output, std::nullopt);
|
outputs.emplace(output, std::nullopt);
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
||||||
return outputs;
|
return outputs;
|
||||||
|
|
||||||
Bindings::iterator i;
|
Bindings::iterator i;
|
||||||
if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos)) {
|
if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) {
|
||||||
Outputs result;
|
Outputs result;
|
||||||
auto out = outputs.find(queryOutputName());
|
auto out = outputs.find(queryOutputName());
|
||||||
if (out == outputs.end())
|
if (out == outputs.end())
|
||||||
|
@ -150,7 +150,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
||||||
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
|
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
|
||||||
const Value * outTI = queryMeta("outputsToInstall");
|
const Value * outTI = queryMeta("outputsToInstall");
|
||||||
if (!outTI) return outputs;
|
if (!outTI) return outputs;
|
||||||
const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
|
auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
|
||||||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||||
if (!outTI->isList()) throw errMsg;
|
if (!outTI->isList()) throw errMsg;
|
||||||
Outputs result;
|
Outputs result;
|
||||||
|
@ -169,7 +169,7 @@ std::string DrvInfo::queryOutputName() const
|
||||||
{
|
{
|
||||||
if (outputName == "" && attrs) {
|
if (outputName == "" && attrs) {
|
||||||
Bindings::iterator i = attrs->find(state->sOutputName);
|
Bindings::iterator i = attrs->find(state->sOutputName);
|
||||||
outputName = i != attrs->end() ? state->forceStringNoCtx(*i->value) : "";
|
outputName = i != attrs->end() ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : "";
|
||||||
}
|
}
|
||||||
return outputName;
|
return outputName;
|
||||||
}
|
}
|
||||||
|
@ -181,7 +181,7 @@ Bindings * DrvInfo::getMeta()
|
||||||
if (!attrs) return 0;
|
if (!attrs) return 0;
|
||||||
Bindings::iterator a = attrs->find(state->sMeta);
|
Bindings::iterator a = attrs->find(state->sMeta);
|
||||||
if (a == attrs->end()) return 0;
|
if (a == attrs->end()) return 0;
|
||||||
state->forceAttrs(*a->value, a->pos);
|
state->forceAttrs(*a->value, a->pos, "while evaluating the 'meta' attribute of a derivation");
|
||||||
meta = a->value->attrs;
|
meta = a->value->attrs;
|
||||||
return meta;
|
return meta;
|
||||||
}
|
}
|
||||||
|
@ -382,7 +382,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
||||||
`recurseForDerivations = true' attribute. */
|
`recurseForDerivations = true' attribute. */
|
||||||
if (i->value->type() == nAttrs) {
|
if (i->value->type() == nAttrs) {
|
||||||
Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations);
|
Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations);
|
||||||
if (j != i->value->attrs->end() && state.forceBool(*j->value, j->pos))
|
if (j != i->value->attrs->end() && state.forceBool(*j->value, j->pos, "while evaluating the attribute `recurseForDerivations`"))
|
||||||
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ libexpr_DIR := $(d)
|
||||||
|
|
||||||
libexpr_SOURCES := \
|
libexpr_SOURCES := \
|
||||||
$(wildcard $(d)/*.cc) \
|
$(wildcard $(d)/*.cc) \
|
||||||
|
$(wildcard $(d)/value/*.cc) \
|
||||||
$(wildcard $(d)/primops/*.cc) \
|
$(wildcard $(d)/primops/*.cc) \
|
||||||
$(wildcard $(d)/flake/*.cc) \
|
$(wildcard $(d)/flake/*.cc) \
|
||||||
$(d)/lexer-tab.cc \
|
$(d)/lexer-tab.cc \
|
||||||
|
@ -37,6 +38,8 @@ clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexe
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
||||||
|
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||||
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,58 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
struct PosAdapter : AbstractPos
|
||||||
|
{
|
||||||
|
Pos::Origin origin;
|
||||||
|
|
||||||
|
PosAdapter(Pos::Origin origin)
|
||||||
|
: origin(std::move(origin))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> getSource() const override
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[](const Pos::none_tag &) -> std::optional<std::string> {
|
||||||
|
return std::nullopt;
|
||||||
|
},
|
||||||
|
[](const Pos::Stdin & s) -> std::optional<std::string> {
|
||||||
|
// Get rid of the null terminators added by the parser.
|
||||||
|
return std::string(s.source->c_str());
|
||||||
|
},
|
||||||
|
[](const Pos::String & s) -> std::optional<std::string> {
|
||||||
|
// Get rid of the null terminators added by the parser.
|
||||||
|
return std::string(s.source->c_str());
|
||||||
|
},
|
||||||
|
[](const Path & path) -> std::optional<std::string> {
|
||||||
|
try {
|
||||||
|
return readFile(path);
|
||||||
|
} catch (Error &) {
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, origin);
|
||||||
|
}
|
||||||
|
|
||||||
|
void print(std::ostream & out) const override
|
||||||
|
{
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&](const Pos::none_tag &) { out << "«none»"; },
|
||||||
|
[&](const Pos::Stdin &) { out << "«stdin»"; },
|
||||||
|
[&](const Pos::String & s) { out << "«string»"; },
|
||||||
|
[&](const Path & path) { out << path; }
|
||||||
|
}, origin);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Pos::operator std::shared_ptr<AbstractPos>() const
|
||||||
|
{
|
||||||
|
auto pos = std::make_shared<PosAdapter>(origin);
|
||||||
|
pos->line = line;
|
||||||
|
pos->column = column;
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
/* Displaying abstract syntax trees. */
|
/* Displaying abstract syntax trees. */
|
||||||
|
|
||||||
static void showString(std::ostream & str, std::string_view s)
|
static void showString(std::ostream & str, std::string_view s)
|
||||||
|
@ -248,24 +300,10 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const Pos & pos)
|
std::ostream & operator << (std::ostream & str, const Pos & pos)
|
||||||
{
|
{
|
||||||
if (!pos)
|
if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
|
||||||
|
str << *pos2;
|
||||||
|
} else
|
||||||
str << "undefined position";
|
str << "undefined position";
|
||||||
else
|
|
||||||
{
|
|
||||||
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
|
|
||||||
switch (pos.origin) {
|
|
||||||
case foFile:
|
|
||||||
f % (const std::string &) pos.file;
|
|
||||||
break;
|
|
||||||
case foStdin:
|
|
||||||
case foString:
|
|
||||||
f % "(string)";
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw Error("unhandled Pos origin!");
|
|
||||||
}
|
|
||||||
str << (f % pos.line % pos.column).str();
|
|
||||||
}
|
|
||||||
|
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
@ -289,7 +327,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* Computing levels/displacements for variables. */
|
/* Computing levels/displacements for variables. */
|
||||||
|
|
||||||
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
#include "error.hh"
|
#include "error.hh"
|
||||||
#include "chunked-vector.hh"
|
#include "chunked-vector.hh"
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,15 +22,22 @@ MakeError(MissingArgumentError, EvalError);
|
||||||
MakeError(RestrictedPathError, Error);
|
MakeError(RestrictedPathError, Error);
|
||||||
|
|
||||||
/* Position objects. */
|
/* Position objects. */
|
||||||
|
|
||||||
struct Pos
|
struct Pos
|
||||||
{
|
{
|
||||||
std::string file;
|
|
||||||
FileOrigin origin;
|
|
||||||
uint32_t line;
|
uint32_t line;
|
||||||
uint32_t column;
|
uint32_t column;
|
||||||
|
|
||||||
|
struct none_tag { };
|
||||||
|
struct Stdin { ref<std::string> source; };
|
||||||
|
struct String { ref<std::string> source; };
|
||||||
|
|
||||||
|
typedef std::variant<none_tag, Stdin, String, Path> Origin;
|
||||||
|
|
||||||
|
Origin origin;
|
||||||
|
|
||||||
explicit operator bool() const { return line > 0; }
|
explicit operator bool() const { return line > 0; }
|
||||||
|
|
||||||
|
operator std::shared_ptr<AbstractPos>() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
class PosIdx {
|
class PosIdx {
|
||||||
|
@ -47,7 +53,11 @@ public:
|
||||||
|
|
||||||
explicit operator bool() const { return id > 0; }
|
explicit operator bool() const { return id > 0; }
|
||||||
|
|
||||||
bool operator<(const PosIdx other) const { return id < other.id; }
|
bool operator <(const PosIdx other) const { return id < other.id; }
|
||||||
|
|
||||||
|
bool operator ==(const PosIdx other) const { return id == other.id; }
|
||||||
|
|
||||||
|
bool operator !=(const PosIdx other) const { return id != other.id; }
|
||||||
};
|
};
|
||||||
|
|
||||||
class PosTable
|
class PosTable
|
||||||
|
@ -61,13 +71,13 @@ public:
|
||||||
// current origins.back() can be reused or not.
|
// current origins.back() can be reused or not.
|
||||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||||
|
|
||||||
explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {}
|
// Used for searching in PosTable::[].
|
||||||
|
explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
const std::string file;
|
const Pos::Origin origin;
|
||||||
const FileOrigin origin;
|
|
||||||
|
|
||||||
Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {}
|
Origin(Pos::Origin origin): origin(origin) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Offset {
|
struct Offset {
|
||||||
|
@ -107,7 +117,7 @@ public:
|
||||||
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||||
const auto origin = *std::prev(pastOrigin);
|
const auto origin = *std::prev(pastOrigin);
|
||||||
const auto offset = offsets[idx];
|
const auto offset = offsets[idx];
|
||||||
return {origin.file, origin.origin, offset.line, offset.column};
|
return {offset.line, offset.column, origin.origin};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -34,11 +34,6 @@ namespace nix {
|
||||||
Path basePath;
|
Path basePath;
|
||||||
PosTable::Origin origin;
|
PosTable::Origin origin;
|
||||||
std::optional<ErrorInfo> error;
|
std::optional<ErrorInfo> error;
|
||||||
ParseData(EvalState & state, PosTable::Origin origin)
|
|
||||||
: state(state)
|
|
||||||
, symbols(state.symbols)
|
|
||||||
, origin(std::move(origin))
|
|
||||||
{ };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ParserFormals {
|
struct ParserFormals {
|
||||||
|
@ -405,21 +400,21 @@ expr_op
|
||||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
||||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||||
| expr_op '<' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
| expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
||||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
||||||
| expr_op '>' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
| expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
||||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
||||||
| expr_op AND expr_op { $$ = new ExprOpAnd(CUR_POS, $1, $3); }
|
| expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); }
|
||||||
| expr_op OR expr_op { $$ = new ExprOpOr(CUR_POS, $1, $3); }
|
| expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); }
|
||||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(CUR_POS, $1, $3); }
|
| expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); }
|
||||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(CUR_POS, $1, $3); }
|
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); }
|
||||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
|
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
|
||||||
| expr_op '+' expr_op
|
| expr_op '+' expr_op
|
||||||
{ $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<PosIdx, Expr *>>({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
{ $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector<std::pair<PosIdx, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
||||||
| expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
| expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
||||||
| expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
| expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
||||||
| expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
| expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
||||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(CUR_POS, $1, $3); }
|
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); }
|
||||||
| expr_app
|
| expr_app
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -643,29 +638,26 @@ formal
|
||||||
#include "filetransfer.hh"
|
#include "filetransfer.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "flake/flake.hh"
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
Expr * EvalState::parse(
|
||||||
const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
char * text,
|
||||||
|
size_t length,
|
||||||
|
Pos::Origin origin,
|
||||||
|
Path basePath,
|
||||||
|
std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
yyscan_t scanner;
|
yyscan_t scanner;
|
||||||
std::string file;
|
ParseData data {
|
||||||
switch (origin) {
|
.state = *this,
|
||||||
case foFile:
|
.symbols = symbols,
|
||||||
file = path;
|
.basePath = std::move(basePath),
|
||||||
break;
|
.origin = {origin},
|
||||||
case foStdin:
|
};
|
||||||
case foString:
|
|
||||||
file = text;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
assert(false);
|
|
||||||
}
|
|
||||||
ParseData data(*this, {file, origin});
|
|
||||||
data.basePath = basePath;
|
|
||||||
|
|
||||||
yylex_init(&scanner);
|
yylex_init(&scanner);
|
||||||
yy_scan_buffer(text, length, scanner);
|
yy_scan_buffer(text, length, scanner);
|
||||||
|
@ -717,14 +709,15 @@ Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv
|
||||||
auto buffer = readFile(path);
|
auto buffer = readFile(path);
|
||||||
// readFile should have left some extra space for terminators
|
// readFile should have left some extra space for terminators
|
||||||
buffer.append("\0\0", 2);
|
buffer.append("\0\0", 2);
|
||||||
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
|
return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
s.append("\0\0", 2);
|
auto s = make_ref<std::string>(std::move(s_));
|
||||||
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
|
s->append("\0\0", 2);
|
||||||
|
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -740,7 +733,8 @@ Expr * EvalState::parseStdin()
|
||||||
auto buffer = drainFD(0);
|
auto buffer = drainFD(0);
|
||||||
// drainFD should have left some extra space for terminators
|
// drainFD should have left some extra space for terminators
|
||||||
buffer.append("\0\0", 2);
|
buffer.append("\0\0", 2);
|
||||||
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
|
auto s = make_ref<std::string>(std::move(buffer));
|
||||||
|
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -788,13 +782,13 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
|
||||||
if (hasPrefix(path, "nix/"))
|
if (hasPrefix(path, "nix/"))
|
||||||
return concatStrings(corepkgsPrefix, path.substr(4));
|
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||||
|
|
||||||
debugThrowLastTrace(ThrownError({
|
debugThrow(ThrownError({
|
||||||
.msg = hintfmt(evalSettings.pureEval
|
.msg = hintfmt(evalSettings.pureEval
|
||||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||||
path),
|
path),
|
||||||
.errPos = positions[pos]
|
.errPos = positions[pos]
|
||||||
}));
|
}), 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -805,17 +799,28 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
||||||
|
|
||||||
std::pair<bool, std::string> res;
|
std::pair<bool, std::string> res;
|
||||||
|
|
||||||
if (isUri(elem.second)) {
|
if (EvalSettings::isPseudoUrl(elem.second)) {
|
||||||
try {
|
try {
|
||||||
res = { true, store->toRealPath(fetchers::downloadTarball(
|
auto storePath = fetchers::downloadTarball(
|
||||||
store, resolveUri(elem.second), "source", false).first.storePath) };
|
store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first.storePath;
|
||||||
|
res = { true, store->toRealPath(storePath) };
|
||||||
} catch (FileTransferError & e) {
|
} catch (FileTransferError & e) {
|
||||||
logWarning({
|
logWarning({
|
||||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
||||||
});
|
});
|
||||||
res = { false, "" };
|
res = { false, "" };
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
|
|
||||||
|
else if (hasPrefix(elem.second, "flake:")) {
|
||||||
|
settings.requireExperimentalFeature(Xp::Flakes);
|
||||||
|
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
|
||||||
|
debug("fetching flake search path element '%s''", elem.second);
|
||||||
|
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
|
||||||
|
res = { true, store->toRealPath(storePath) };
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
auto path = absPath(elem.second);
|
auto path = absPath(elem.second);
|
||||||
if (pathExists(path))
|
if (pathExists(path))
|
||||||
res = { true, path };
|
res = { true, path };
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -8,7 +8,7 @@ namespace nix {
|
||||||
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto s = state.coerceToString(pos, *args[0], context);
|
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext");
|
||||||
v.mkString(*s);
|
v.mkString(*s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringCo
|
||||||
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
state.forceString(*args[0], context, pos);
|
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext");
|
||||||
v.mkBool(!context.empty());
|
v.mkBool(!context.empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,11 +34,18 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto s = state.coerceToString(pos, *args[0], context);
|
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||||
|
|
||||||
PathSet context2;
|
PathSet context2;
|
||||||
for (auto & p : context)
|
for (auto && p : context) {
|
||||||
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
|
auto c = NixStringContextElem::parse(*state.store, p);
|
||||||
|
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
|
||||||
|
context2.emplace(state.store->printStorePath(ptr->drvPath));
|
||||||
|
} else {
|
||||||
|
/* Can reuse original item */
|
||||||
|
context2.emplace(std::move(p));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
v.mkString(*s, context2);
|
v.mkString(*s, context2);
|
||||||
}
|
}
|
||||||
|
@ -73,35 +80,21 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
Strings outputs;
|
Strings outputs;
|
||||||
};
|
};
|
||||||
PathSet context;
|
PathSet context;
|
||||||
state.forceString(*args[0], context, pos);
|
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
||||||
auto contextInfos = std::map<Path, ContextInfo>();
|
auto contextInfos = std::map<StorePath, ContextInfo>();
|
||||||
for (const auto & p : context) {
|
for (const auto & p : context) {
|
||||||
Path drv;
|
NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p);
|
||||||
std::string output;
|
std::visit(overloaded {
|
||||||
const Path * path = &p;
|
[&](NixStringContextElem::DrvDeep & d) {
|
||||||
if (p.at(0) == '=') {
|
contextInfos[d.drvPath].allOutputs = true;
|
||||||
drv = std::string(p, 1);
|
},
|
||||||
path = &drv;
|
[&](NixStringContextElem::Built & b) {
|
||||||
} else if (p.at(0) == '!') {
|
contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output));
|
||||||
NixStringContextElem ctx = decodeContext(*state.store, p);
|
},
|
||||||
drv = state.store->printStorePath(ctx.first);
|
[&](NixStringContextElem::Opaque & o) {
|
||||||
output = ctx.second;
|
contextInfos[o.path].path = true;
|
||||||
path = &drv;
|
},
|
||||||
}
|
}, ctx.raw());
|
||||||
auto isPath = drv.empty();
|
|
||||||
auto isAllOutputs = (!drv.empty()) && output.empty();
|
|
||||||
|
|
||||||
auto iter = contextInfos.find(*path);
|
|
||||||
if (iter == contextInfos.end()) {
|
|
||||||
contextInfos.emplace(*path, ContextInfo{isPath, isAllOutputs, output.empty() ? Strings{} : Strings{std::move(output)}});
|
|
||||||
} else {
|
|
||||||
if (isPath)
|
|
||||||
iter->second.path = true;
|
|
||||||
else if (isAllOutputs)
|
|
||||||
iter->second.allOutputs = true;
|
|
||||||
else
|
|
||||||
iter->second.outputs.emplace_back(std::move(output));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto attrs = state.buildBindings(contextInfos.size());
|
auto attrs = state.buildBindings(contextInfos.size());
|
||||||
|
@ -120,7 +113,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
for (const auto & [i, output] : enumerate(info.second.outputs))
|
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||||
}
|
}
|
||||||
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
v.mkAttrs(attrs);
|
v.mkAttrs(attrs);
|
||||||
|
@ -137,9 +130,9 @@ static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
||||||
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto orig = state.forceString(*args[0], context, pos);
|
auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext");
|
||||||
|
|
||||||
state.forceAttrs(*args[1], pos);
|
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext");
|
||||||
|
|
||||||
auto sPath = state.symbols.create("path");
|
auto sPath = state.symbols.create("path");
|
||||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||||
|
@ -147,24 +140,24 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||||
const auto & name = state.symbols[i.name];
|
const auto & name = state.symbols[i.name];
|
||||||
if (!state.store->isStorePath(name))
|
if (!state.store->isStorePath(name))
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.msg = hintfmt("Context key '%s' is not a store path", name),
|
.msg = hintfmt("context key '%s' is not a store path", name),
|
||||||
.errPos = state.positions[i.pos]
|
.errPos = state.positions[i.pos]
|
||||||
});
|
});
|
||||||
if (!settings.readOnlyMode)
|
if (!settings.readOnlyMode)
|
||||||
state.store->ensurePath(state.store->parseStorePath(name));
|
state.store->ensurePath(state.store->parseStorePath(name));
|
||||||
state.forceAttrs(*i.value, i.pos);
|
state.forceAttrs(*i.value, i.pos, "while evaluating the value of a string context");
|
||||||
auto iter = i.value->attrs->find(sPath);
|
auto iter = i.value->attrs->find(sPath);
|
||||||
if (iter != i.value->attrs->end()) {
|
if (iter != i.value->attrs->end()) {
|
||||||
if (state.forceBool(*iter->value, iter->pos))
|
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `path` attribute of a string context"))
|
||||||
context.emplace(name);
|
context.emplace(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
iter = i.value->attrs->find(sAllOutputs);
|
iter = i.value->attrs->find(sAllOutputs);
|
||||||
if (iter != i.value->attrs->end()) {
|
if (iter != i.value->attrs->end()) {
|
||||||
if (state.forceBool(*iter->value, iter->pos)) {
|
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||||
if (!isDerivation(name)) {
|
if (!isDerivation(name)) {
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", name),
|
.msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name),
|
||||||
.errPos = state.positions[i.pos]
|
.errPos = state.positions[i.pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -174,15 +167,15 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||||
|
|
||||||
iter = i.value->attrs->find(state.sOutputs);
|
iter = i.value->attrs->find(state.sOutputs);
|
||||||
if (iter != i.value->attrs->end()) {
|
if (iter != i.value->attrs->end()) {
|
||||||
state.forceList(*iter->value, iter->pos);
|
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
|
||||||
if (iter->value->listSize() && !isDerivation(name)) {
|
if (iter->value->listSize() && !isDerivation(name)) {
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", name),
|
.msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name),
|
||||||
.errPos = state.positions[i.pos]
|
.errPos = state.positions[i.pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
for (auto elem : iter->value->listItems()) {
|
for (auto elem : iter->value->listItems()) {
|
||||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos);
|
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||||
context.insert(concatStrings("!", outputName, "!", name));
|
context.insert(concatStrings("!", outputName, "!", name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ namespace nix {
|
||||||
|
|
||||||
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
state.forceAttrs(*args[0], pos);
|
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure");
|
||||||
|
|
||||||
std::optional<std::string> fromStoreUrl;
|
std::optional<std::string> fromStoreUrl;
|
||||||
std::optional<StorePath> fromPath;
|
std::optional<StorePath> fromPath;
|
||||||
|
@ -19,7 +19,8 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
|
|
||||||
if (attrName == "fromPath") {
|
if (attrName == "fromPath") {
|
||||||
PathSet context;
|
PathSet context;
|
||||||
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context);
|
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context,
|
||||||
|
"while evaluating the 'fromPath' attribute passed to builtins.fetchClosure");
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (attrName == "toPath") {
|
else if (attrName == "toPath") {
|
||||||
|
@ -27,12 +28,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
toCA = true;
|
toCA = true;
|
||||||
if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
|
if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
|
||||||
PathSet context;
|
PathSet context;
|
||||||
toPath = state.coerceToStorePath(attr.pos, *attr.value, context);
|
toPath = state.coerceToStorePath(attr.pos, *attr.value, context,
|
||||||
|
"while evaluating the 'toPath' attribute passed to builtins.fetchClosure");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (attrName == "fromStore")
|
else if (attrName == "fromStore")
|
||||||
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos);
|
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos,
|
||||||
|
"while evaluating the 'fromStore' attribute passed to builtins.fetchClosure");
|
||||||
|
|
||||||
else
|
else
|
||||||
throw Error({
|
throw Error({
|
||||||
|
|
|
@ -19,23 +19,23 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
|
|
||||||
if (args[0]->type() == nAttrs) {
|
if (args[0]->type() == nAttrs) {
|
||||||
|
|
||||||
state.forceAttrs(*args[0], pos);
|
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
std::string_view n(state.symbols[attr.name]);
|
std::string_view n(state.symbols[attr.name]);
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.coerceToString(attr.pos, *attr.value, context, false, false).toOwned();
|
url = state.coerceToString(attr.pos, *attr.value, context,
|
||||||
|
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
|
||||||
|
false, false).toOwned();
|
||||||
else if (n == "rev") {
|
else if (n == "rev") {
|
||||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
auto value = state.forceStringNoCtx(*attr.value, attr.pos);
|
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, htSHA1);
|
||||||
else
|
else
|
||||||
ref = value;
|
ref = value;
|
||||||
}
|
}
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, attr.pos);
|
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||||
else
|
else
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
|
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
|
||||||
|
@ -50,7 +50,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
});
|
});
|
||||||
|
|
||||||
} else
|
} else
|
||||||
url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
url = state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to builtins.fetchMercurial",
|
||||||
|
false, false).toOwned();
|
||||||
|
|
||||||
// FIXME: git externals probably can be used to bypass the URI
|
// FIXME: git externals probably can be used to bypass the URI
|
||||||
// whitelist. Ah well.
|
// whitelist. Ah well.
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue