Merge remote-tracking branch 'origin/master' into coerce-string

This commit is contained in:
Eelco Dolstra 2023-01-02 20:53:39 +01:00
commit 6b69652385
233 changed files with 5278 additions and 2874 deletions

15
.github/CODEOWNERS vendored Normal file
View file

@ -0,0 +1,15 @@
# Pull requests concerning the listed files will automatically invite the respective maintainers as reviewers.
# This file is not used for denoting any kind of ownership, but is merely a tool for handling notifications.
#
# Merge permissions are required for maintaining an entry in this file.
# For documentation on this mechanism, see https://help.github.com/articles/about-codeowners/
# Default reviewers if nothing else matches
* @edolstra @thufschmitt
# This file
.github/CODEOWNERS @edolstra
# Public documentation
/doc @fricklerhandwerk
*.md @fricklerhandwerk

View file

@ -30,3 +30,7 @@ A clear and concise description of what you expected to happen.
**Additional context** **Additional context**
Add any other context about the problem here. Add any other context about the problem here.
**Priorities**
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -18,3 +18,7 @@ A clear and concise description of any alternative solutions or features you've
**Additional context** **Additional context**
Add any other context or screenshots about the feature request here. Add any other context or screenshots about the feature request here.
**Priorities**
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

36
.github/ISSUE_TEMPLATE/installer.md vendored Normal file
View file

@ -0,0 +1,36 @@
---
name: Installer issue
about: Report problems with installation
title: ''
labels: installer
assignees: ''
---
## Platform
<!-- select the platform on which you tried to install Nix -->
- [ ] Linux: <!-- state your distribution, e.g. Arch Linux, Ubuntu, ... -->
- [ ] macOS
- [ ] WSL
## Additional information
<!-- state special circumstances on your system or additional steps you have taken prior to installation -->
## Output
<details><summary>Output</summary>
```log
<!-- paste console output here and remove this comment -->
```
</details>
## Priorities
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -26,3 +26,6 @@ assignees: ''
<!-- propose a solution --> <!-- propose a solution -->
## Priorities
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -5,3 +5,7 @@ Please include relevant [release notes](https://github.com/NixOS/nix/blob/master
**Testing** **Testing**
If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master). If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
**Priorities**
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -21,7 +21,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Create backport PRs - name: Create backport PRs
# should be kept in sync with `version` # should be kept in sync with `version`
uses: zeebe-io/backport-action@v0.0.8 uses: zeebe-io/backport-action@v0.0.9
with: with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action # Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -19,9 +19,9 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v17 - uses: cachix/install-nix-action@v18
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v12
if: needs.check_secrets.outputs.cachix == 'true' if: needs.check_secrets.outputs.cachix == 'true'
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'
@ -58,8 +58,8 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v17 - uses: cachix/install-nix-action@v18
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v12
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
@ -77,7 +77,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v17 - uses: cachix/install-nix-action@v18
with: with:
install_url: '${{needs.installer.outputs.installerURL}}' install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
@ -102,10 +102,10 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v17 - uses: cachix/install-nix-action@v18
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10 - uses: cachix/cachix-action@v12
if: needs.check_secrets.outputs.cachix == 'true' if: needs.check_secrets.outputs.cachix == 'true'
with: with:
name: '${{ env.CACHIX_NAME }}' name: '${{ env.CACHIX_NAME }}'

View file

@ -1 +1 @@
2.12.0 2.13.0

View file

@ -1,77 +0,0 @@
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
index 3dbaa3fb..36a1d1f7 100644
--- a/darwin_stop_world.c
+++ b/darwin_stop_world.c
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
int nthreads = 0;
word total_size = 0;
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
+ size_t stack_limit;
if (!EXPECT(GC_thr_initialized, TRUE))
GC_thr_init();
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
}
if (altstack_lo) {
+ // When a thread goes into a coroutine, we lose its original sp until
+ // control flow returns to the thread.
+ // While in the coroutine, the sp points outside the thread stack,
+ // so we can detect this and push the entire thread stack instead,
+ // as an approximation.
+ // We assume that the coroutine has similarly added its entire stack.
+ // This could be made accurate by cooperating with the application
+ // via new functions and/or callbacks.
+ stack_limit = pthread_get_stacksize_np(p->id);
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
+ altstack_lo = altstack_hi - stack_limit;
+ }
+
total_size += altstack_hi - altstack_lo;
GC_push_all_stack(altstack_lo, altstack_hi);
}
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
index 4b2c429..1fb4c52 100644
--- a/pthread_stop_world.c
+++ b/pthread_stop_world.c
@@ -673,6 +673,8 @@ GC_INNER void GC_push_all_stacks(void)
struct GC_traced_stack_sect_s *traced_stack_sect;
pthread_t self = pthread_self();
word total_size = 0;
+ size_t stack_limit;
+ pthread_attr_t pattr;
if (!EXPECT(GC_thr_initialized, TRUE))
GC_thr_init();
@@ -722,6 +724,31 @@ GC_INNER void GC_push_all_stacks(void)
hi = p->altstack + p->altstack_size;
/* FIXME: Need to scan the normal stack too, but how ? */
/* FIXME: Assume stack grows down */
+ } else {
+ if (pthread_getattr_np(p->id, &pattr)) {
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
+ }
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
+ }
+ if (pthread_attr_destroy(&pattr)) {
+ ABORT("GC_push_all_stacks: pthread_attr_destroy failed!");
+ }
+ // When a thread goes into a coroutine, we lose its original sp until
+ // control flow returns to the thread.
+ // While in the coroutine, the sp points outside the thread stack,
+ // so we can detect this and push the entire thread stack instead,
+ // as an approximation.
+ // We assume that the coroutine has similarly added its entire stack.
+ // This could be made accurate by cooperating with the application
+ // via new functions and/or callbacks.
+ #ifndef STACK_GROWS_UP
+ if (lo >= hi || lo < hi - stack_limit) { // sp outside stack
+ lo = hi - stack_limit;
+ }
+ #else
+ #error "STACK_GROWS_UP not supported in boost_coroutine2 (as of june 2021), so we don't support it in Nix."
+ #endif
}
GC_push_all_stack_sections(lo, hi, traced_stack_sect);
# ifdef STACK_GROWS_UP

View file

@ -41,8 +41,6 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
CFLAGS=
CXXFLAGS=
AC_PROG_CC AC_PROG_CC
AC_PROG_CXX AC_PROG_CXX
AC_PROG_CPP AC_PROG_CPP
@ -177,7 +175,7 @@ fi
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"]) PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
# Checks for libarchive # Look for libarchive.
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"]) PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed # Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
if test "$shared" != yes; then if test "$shared" != yes; then

View file

@ -1,6 +1,11 @@
[book]
title = "Nix Reference Manual"
[output.html] [output.html]
additional-css = ["custom.css"] additional-css = ["custom.css"]
additional-js = ["redirects.js"] additional-js = ["redirects.js"]
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
git-repository-url = "https://github.com/NixOS/nix"
[preprocessor.anchors] [preprocessor.anchors]
renderers = ["html"] renderers = ["html"]

View file

@ -1,16 +1,20 @@
with builtins; builtinsDump:
with import ./utils.nix; let
showBuiltin = name:
let
inherit (builtinsDump.${name}) doc args;
in
''
<dt id="builtins-${name}">
<a href="#builtins-${name}"><code>${name} ${listArgs args}</code></a>
</dt>
<dd>
builtins: ${doc}
</dd>
'';
listArgs = args: builtins.concatStringsSep " " (map (s: "<var>${s}</var>") args);
in
with builtins; concatStringsSep "\n" (map showBuiltin (attrNames builtinsDump))
concatStrings (map
(name:
let builtin = builtins.${name}; in
"<dt id=\"builtins-${name}\"><a href=\"#builtins-${name}\"><code>${name} "
+ concatStringsSep " " (map (s: "<var>${s}</var>") builtin.args)
+ "</code></a></dt>"
+ "<dd>\n\n"
+ builtin.doc
+ "\n\n</dd>"
)
(attrNames builtins))

View file

@ -99,6 +99,7 @@ let
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {}); in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
parsedToplevel = builtins.fromJSON toplevel; parsedToplevel = builtins.fromJSON toplevel;
manpages = processCommand { manpages = processCommand {
command = "nix"; command = "nix";
details = parsedToplevel; details = parsedToplevel;

View file

@ -1,29 +1,41 @@
with builtins; let
with import ./utils.nix; inherit (builtins) attrNames concatStringsSep isAttrs isBool;
inherit (import ./utils.nix) concatStrings squash splitLines;
in
options: optionsInfo:
let
showOption = name:
let
inherit (optionsInfo.${name}) description documentDefault defaultValue aliases;
result = squash ''
- <span id="conf-${name}">[`${name}`](#conf-${name})</span>
concatStrings (map ${indent " " body}
(name: '';
let option = options.${name}; in # separate body to cleanly handle indentation
" - [`${name}`](#conf-${name})" body = ''
+ "<p id=\"conf-${name}\"></p>\n\n" ${description}
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
+ (if option.documentDefault **Default:** ${showDefault documentDefault defaultValue}
then " **Default:** " + (
if option.value == "" || option.value == [] ${showAliases aliases}
'';
showDefault = documentDefault: defaultValue:
if documentDefault then
# a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is `null` in
# JSON, but that converts to `{ }` here.
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
then "*empty*" then "*empty*"
else if isBool option.value else if isBool defaultValue then
then (if option.value then "`true`" else "`false`") if defaultValue then "`true`" else "`false`"
else else "`${toString defaultValue}`"
# n.b. a StringMap value type is specified as a string, but else "*machine-specific*";
# this shows the value type. The empty stringmap is "null" in showAliases = aliases:
# JSON, but that converts to "{ }" here. if aliases == [] then "" else
(if isAttrs option.value then "`\"\"`" "**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
else "`" + toString option.value + "`")) + "\n\n" indent = prefix: s:
else " **Default:** *machine-specific*\n") concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
+ (if option.aliases != [] in result;
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n" in concatStrings (map showOption (attrNames optionsInfo))
else "")
)
(attrNames options))

View file

@ -29,19 +29,19 @@ nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -
$(d)/%.1: $(d)/src/command-ref/%.md $(d)/%.1: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp @printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
@cat $^ >> $^.tmp @cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=1 $^.tmp -o $@ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
@rm $^.tmp @rm $^.tmp
$(d)/%.8: $(d)/src/command-ref/%.md $(d)/%.8: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp @printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
@cat $^ >> $^.tmp @cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=8 $^.tmp -o $@ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=8 $^.tmp -o $@
@rm $^.tmp @rm $^.tmp
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md $(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp @printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
@cat $^ >> $^.tmp @cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=5 $^.tmp -o $@ $(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
@rm $^.tmp @rm $^.tmp
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli

View file

@ -1,330 +1,421 @@
// Redirects from old DocBook manual. // redirect rules for anchors ensure backwards compatibility of URLs.
var redirects = { // this must be done on the client side, as web servers do not see the anchor part of the URL.
"#part-advanced-topics": "advanced-topics/advanced-topics.html",
"#chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", // redirections are declared as follows:
"#chap-diff-hook": "advanced-topics/diff-hook.html", // each entry has as its key a path matching the requested URL path, relative to the mdBook document root.
"#check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", //
"#chap-distributed-builds": "advanced-topics/distributed-builds.html", // IMPORTANT: it must specify the full path with file name and suffix
"#chap-post-build-hook": "advanced-topics/post-build-hook.html", //
"#chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", // each entry is itself a set of key-value pairs, where
"#part-command-ref": "command-ref/command-ref.html", // - keys are anchors on the matched path.
"#conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", // - values are redirection targets relative to the current path.
"#conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
"#conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris", const redirects = {
"#conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users", "index.html": {
"#conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store", "part-advanced-topics": "advanced-topics/advanced-topics.html",
"#conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys", "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
"#conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches", "chap-diff-hook": "advanced-topics/diff-hook.html",
"#conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log", "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
"#conf-build-cores": "command-ref/conf-file.html#conf-build-cores", "chap-distributed-builds": "advanced-topics/distributed-builds.html",
"#conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs", "chap-post-build-hook": "advanced-topics/post-build-hook.html",
"#conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
"#conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback", "part-command-ref": "command-ref/command-ref.html",
"#conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
"#conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
"#conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time", "conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
"#conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat", "conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
"#conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout", "conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
"#conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot", "conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
"#conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox", "conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
"#conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes", "conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
"#conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group", "conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
"#conf-builders": "command-ref/conf-file.html#conf-builders", "conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
"#conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes", "conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
"#conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log", "conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
"#conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout", "conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
"#conf-cores": "command-ref/conf-file.html#conf-cores", "conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
"#conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook", "conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
"#conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism", "conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
"#conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations", "conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
"#conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches", "conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
"#conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms", "conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
"#conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths", "conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
"#conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters", "conf-builders": "command-ref/conf-file.html#conf-builders",
"#conf-fallback": "command-ref/conf-file.html#conf-fallback", "conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
"#conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata", "conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
"#conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations", "conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
"#conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs", "conf-cores": "command-ref/conf-file.html#conf-cores",
"#conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors", "conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
"#conf-http-connections": "command-ref/conf-file.html#conf-http-connections", "conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
"#conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log", "conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
"#conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations", "conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
"#conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations", "conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
"#conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs", "conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
"#conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size", "conf-fallback": "command-ref/conf-file.html#conf-fallback",
"#conf-max-free": "command-ref/conf-file.html#conf-max-free", "conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
"#conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs", "conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
"#conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time", "conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
"#conf-min-free": "command-ref/conf-file.html#conf-min-free", "conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
"#conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl", "conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
"#conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl", "conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
"#conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file", "conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
"#conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files", "conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
"#conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook", "conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
"#conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook", "conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
"#conf-repeat": "command-ref/conf-file.html#conf-repeat", "conf-max-free": "command-ref/conf-file.html#conf-max-free",
"#conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs", "conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
"#conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval", "conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
"#conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook", "conf-min-free": "command-ref/conf-file.html#conf-min-free",
"#conf-sandbox": "command-ref/conf-file.html#conf-sandbox", "conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
"#conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size", "conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
"#conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths", "conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
"#conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files", "conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
"#conf-show-trace": "command-ref/conf-file.html#conf-show-trace", "conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
"#conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout", "conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
"#conf-substitute": "command-ref/conf-file.html#conf-substitute", "conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
"#conf-substituters": "command-ref/conf-file.html#conf-substituters", "conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
"#conf-system": "command-ref/conf-file.html#conf-system", "conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
"#conf-system-features": "command-ref/conf-file.html#conf-system-features", "conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
"#conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl", "conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
"#conf-timeout": "command-ref/conf-file.html#conf-timeout", "conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
"#conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls", "conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
"#conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches", "conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
"#conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys", "conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
"#conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters", "conf-substitute": "command-ref/conf-file.html#conf-substitute",
"#conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users", "conf-substituters": "command-ref/conf-file.html#conf-substituters",
"#extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths", "conf-system": "command-ref/conf-file.html#conf-system",
"#sec-conf-file": "command-ref/conf-file.html", "conf-system-features": "command-ref/conf-file.html#conf-system-features",
"#env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH", "conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
"#env-common": "command-ref/env-common.html", "conf-timeout": "command-ref/conf-file.html#conf-timeout",
"#envar-remote": "command-ref/env-common.html#env-NIX_REMOTE", "conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
"#sec-common-env": "command-ref/env-common.html", "conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
"#ch-files": "command-ref/files.html", "conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
"#ch-main-commands": "command-ref/main-commands.html", "conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
"#opt-out-link": "command-ref/nix-build.html#opt-out-link", "conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
"#sec-nix-build": "command-ref/nix-build.html", "extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
"#sec-nix-channel": "command-ref/nix-channel.html", "sec-conf-file": "command-ref/conf-file.html",
"#sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html", "env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
"#sec-nix-copy-closure": "command-ref/nix-copy-closure.html", "env-common": "command-ref/env-common.html",
"#sec-nix-daemon": "command-ref/nix-daemon.html", "envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
"#refsec-nix-env-install-examples": "command-ref/nix-env.html#examples", "sec-common-env": "command-ref/env-common.html",
"#rsec-nix-env-install": "command-ref/nix-env.html#operation---install", "ch-files": "command-ref/files.html",
"#rsec-nix-env-set": "command-ref/nix-env.html#operation---set", "ch-main-commands": "command-ref/main-commands.html",
"#rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag", "opt-out-link": "command-ref/nix-build.html#opt-out-link",
"#rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade", "sec-nix-build": "command-ref/nix-build.html",
"#sec-nix-env": "command-ref/nix-env.html", "sec-nix-channel": "command-ref/nix-channel.html",
"#ssec-version-comparisons": "command-ref/nix-env.html#versions", "sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
"#sec-nix-hash": "command-ref/nix-hash.html", "sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
"#sec-nix-instantiate": "command-ref/nix-instantiate.html", "sec-nix-daemon": "command-ref/nix-daemon.html",
"#sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html", "refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
"#sec-nix-shell": "command-ref/nix-shell.html", "rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
"#ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter", "rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
"#nixref-queries": "command-ref/nix-store.html#queries", "rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
"#opt-add-root": "command-ref/nix-store.html#opt-add-root", "rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
"#refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump", "sec-nix-env": "command-ref/nix-env.html",
"#refsec-nix-store-export": "command-ref/nix-store.html#operation---export", "ssec-version-comparisons": "command-ref/nix-env.html#versions",
"#refsec-nix-store-import": "command-ref/nix-store.html#operation---import", "sec-nix-hash": "command-ref/nix-hash.html",
"#refsec-nix-store-query": "command-ref/nix-store.html#operation---query", "sec-nix-instantiate": "command-ref/nix-instantiate.html",
"#refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify", "sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
"#rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc", "sec-nix-shell": "command-ref/nix-shell.html",
"#rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key", "ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
"#rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise", "nixref-queries": "command-ref/nix-store.html#queries",
"#rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve", "opt-add-root": "command-ref/nix-store.html#opt-add-root",
"#sec-nix-store": "command-ref/nix-store.html", "refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
"#opt-I": "command-ref/opt-common.html#opt-I", "refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
"#opt-attr": "command-ref/opt-common.html#opt-attr", "refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
"#opt-common": "command-ref/opt-common.html", "refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
"#opt-cores": "command-ref/opt-common.html#opt-cores", "refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
"#opt-log-format": "command-ref/opt-common.html#opt-log-format", "rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
"#opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs", "rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
"#opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time", "rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
"#opt-timeout": "command-ref/opt-common.html#opt-timeout", "rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
"#sec-common-options": "command-ref/opt-common.html", "sec-nix-store": "command-ref/nix-store.html",
"#ch-utilities": "command-ref/utilities.html", "opt-I": "command-ref/opt-common.html#opt-I",
"#chap-hacking": "contributing/hacking.html", "opt-attr": "command-ref/opt-common.html#opt-attr",
"#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes", "opt-common": "command-ref/opt-common.html",
"#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences", "opt-cores": "command-ref/opt-common.html#opt-cores",
"#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites", "opt-log-format": "command-ref/opt-common.html#opt-log-format",
"#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences", "opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
"#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites", "opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
"#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph", "opt-timeout": "command-ref/opt-common.html#opt-timeout",
"#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars", "sec-common-options": "command-ref/opt-common.html",
"#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash", "ch-utilities": "command-ref/utilities.html",
"#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo", "chap-hacking": "contributing/hacking.html",
"#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode", "adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
"#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile", "adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
"#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild", "adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
"#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash", "adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
"#sec-advanced-attributes": "language/advanced-attributes.html", "adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
"#builtin-abort": "language/builtins.html#builtins-abort", "adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
"#builtin-add": "language/builtins.html#builtins-add", "adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
"#builtin-all": "language/builtins.html#builtins-all", "adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
"#builtin-any": "language/builtins.html#builtins-any", "adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
"#builtin-attrNames": "language/builtins.html#builtins-attrNames", "adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
"#builtin-attrValues": "language/builtins.html#builtins-attrValues", "adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
"#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf", "adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
"#builtin-bitAnd": "language/builtins.html#builtins-bitAnd", "fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
"#builtin-bitOr": "language/builtins.html#builtins-bitOr", "sec-advanced-attributes": "language/advanced-attributes.html",
"#builtin-bitXor": "language/builtins.html#builtins-bitXor", "builtin-abort": "language/builtins.html#builtins-abort",
"#builtin-builtins": "language/builtins.html#builtins-builtins", "builtin-add": "language/builtins.html#builtins-add",
"#builtin-compareVersions": "language/builtins.html#builtins-compareVersions", "builtin-all": "language/builtins.html#builtins-all",
"#builtin-concatLists": "language/builtins.html#builtins-concatLists", "builtin-any": "language/builtins.html#builtins-any",
"#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep", "builtin-attrNames": "language/builtins.html#builtins-attrNames",
"#builtin-currentSystem": "language/builtins.html#builtins-currentSystem", "builtin-attrValues": "language/builtins.html#builtins-attrValues",
"#builtin-deepSeq": "language/builtins.html#builtins-deepSeq", "builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
"#builtin-derivation": "language/builtins.html#builtins-derivation", "builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
"#builtin-dirOf": "language/builtins.html#builtins-dirOf", "builtin-bitOr": "language/builtins.html#builtins-bitOr",
"#builtin-div": "language/builtins.html#builtins-div", "builtin-bitXor": "language/builtins.html#builtins-bitXor",
"#builtin-elem": "language/builtins.html#builtins-elem", "builtin-builtins": "language/builtins.html#builtins-builtins",
"#builtin-elemAt": "language/builtins.html#builtins-elemAt", "builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
"#builtin-fetchGit": "language/builtins.html#builtins-fetchGit", "builtin-concatLists": "language/builtins.html#builtins-concatLists",
"#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball", "builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
"#builtin-fetchurl": "language/builtins.html#builtins-fetchurl", "builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
"#builtin-filterSource": "language/builtins.html#builtins-filterSource", "builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
"#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime", "builtin-derivation": "language/builtins.html#builtins-derivation",
"#builtin-fromJSON": "language/builtins.html#builtins-fromJSON", "builtin-dirOf": "language/builtins.html#builtins-dirOf",
"#builtin-functionArgs": "language/builtins.html#builtins-functionArgs", "builtin-div": "language/builtins.html#builtins-div",
"#builtin-genList": "language/builtins.html#builtins-genList", "builtin-elem": "language/builtins.html#builtins-elem",
"#builtin-getAttr": "language/builtins.html#builtins-getAttr", "builtin-elemAt": "language/builtins.html#builtins-elemAt",
"#builtin-getEnv": "language/builtins.html#builtins-getEnv", "builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
"#builtin-hasAttr": "language/builtins.html#builtins-hasAttr", "builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
"#builtin-hashFile": "language/builtins.html#builtins-hashFile", "builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
"#builtin-hashString": "language/builtins.html#builtins-hashString", "builtin-filterSource": "language/builtins.html#builtins-filterSource",
"#builtin-head": "language/builtins.html#builtins-head", "builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
"#builtin-import": "language/builtins.html#builtins-import", "builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
"#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs", "builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
"#builtin-isAttrs": "language/builtins.html#builtins-isAttrs", "builtin-genList": "language/builtins.html#builtins-genList",
"#builtin-isBool": "language/builtins.html#builtins-isBool", "builtin-getAttr": "language/builtins.html#builtins-getAttr",
"#builtin-isFloat": "language/builtins.html#builtins-isFloat", "builtin-getEnv": "language/builtins.html#builtins-getEnv",
"#builtin-isFunction": "language/builtins.html#builtins-isFunction", "builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
"#builtin-isInt": "language/builtins.html#builtins-isInt", "builtin-hashFile": "language/builtins.html#builtins-hashFile",
"#builtin-isList": "language/builtins.html#builtins-isList", "builtin-hashString": "language/builtins.html#builtins-hashString",
"#builtin-isNull": "language/builtins.html#builtins-isNull", "builtin-head": "language/builtins.html#builtins-head",
"#builtin-isString": "language/builtins.html#builtins-isString", "builtin-import": "language/builtins.html#builtins-import",
"#builtin-length": "language/builtins.html#builtins-length", "builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
"#builtin-lessThan": "language/builtins.html#builtins-lessThan", "builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
"#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs", "builtin-isBool": "language/builtins.html#builtins-isBool",
"#builtin-map": "language/builtins.html#builtins-map", "builtin-isFloat": "language/builtins.html#builtins-isFloat",
"#builtin-match": "language/builtins.html#builtins-match", "builtin-isFunction": "language/builtins.html#builtins-isFunction",
"#builtin-mul": "language/builtins.html#builtins-mul", "builtin-isInt": "language/builtins.html#builtins-isInt",
"#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName", "builtin-isList": "language/builtins.html#builtins-isList",
"#builtin-path": "language/builtins.html#builtins-path", "builtin-isNull": "language/builtins.html#builtins-isNull",
"#builtin-pathExists": "language/builtins.html#builtins-pathExists", "builtin-isString": "language/builtins.html#builtins-isString",
"#builtin-placeholder": "language/builtins.html#builtins-placeholder", "builtin-length": "language/builtins.html#builtins-length",
"#builtin-readDir": "language/builtins.html#builtins-readDir", "builtin-lessThan": "language/builtins.html#builtins-lessThan",
"#builtin-readFile": "language/builtins.html#builtins-readFile", "builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
"#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs", "builtin-map": "language/builtins.html#builtins-map",
"#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings", "builtin-match": "language/builtins.html#builtins-match",
"#builtin-seq": "language/builtins.html#builtins-seq", "builtin-mul": "language/builtins.html#builtins-mul",
"#builtin-sort": "language/builtins.html#builtins-sort", "builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
"#builtin-split": "language/builtins.html#builtins-split", "builtin-path": "language/builtins.html#builtins-path",
"#builtin-splitVersion": "language/builtins.html#builtins-splitVersion", "builtin-pathExists": "language/builtins.html#builtins-pathExists",
"#builtin-stringLength": "language/builtins.html#builtins-stringLength", "builtin-placeholder": "language/builtins.html#builtins-placeholder",
"#builtin-sub": "language/builtins.html#builtins-sub", "builtin-readDir": "language/builtins.html#builtins-readDir",
"#builtin-substring": "language/builtins.html#builtins-substring", "builtin-readFile": "language/builtins.html#builtins-readFile",
"#builtin-tail": "language/builtins.html#builtins-tail", "builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
"#builtin-throw": "language/builtins.html#builtins-throw", "builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
"#builtin-toFile": "language/builtins.html#builtins-toFile", "builtin-seq": "language/builtins.html#builtins-seq",
"#builtin-toJSON": "language/builtins.html#builtins-toJSON", "builtin-sort": "language/builtins.html#builtins-sort",
"#builtin-toPath": "language/builtins.html#builtins-toPath", "builtin-split": "language/builtins.html#builtins-split",
"#builtin-toString": "language/builtins.html#builtins-toString", "builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
"#builtin-toXML": "language/builtins.html#builtins-toXML", "builtin-stringLength": "language/builtins.html#builtins-stringLength",
"#builtin-trace": "language/builtins.html#builtins-trace", "builtin-sub": "language/builtins.html#builtins-sub",
"#builtin-tryEval": "language/builtins.html#builtins-tryEval", "builtin-substring": "language/builtins.html#builtins-substring",
"#builtin-typeOf": "language/builtins.html#builtins-typeOf", "builtin-tail": "language/builtins.html#builtins-tail",
"#ssec-builtins": "language/builtins.html", "builtin-throw": "language/builtins.html#builtins-throw",
"#attr-system": "language/derivations.html#attr-system", "builtin-toFile": "language/builtins.html#builtins-toFile",
"#ssec-derivation": "language/derivations.html", "builtin-toJSON": "language/builtins.html#builtins-toJSON",
"#ch-expression-language": "language/index.html", "builtin-toPath": "language/builtins.html#builtins-toPath",
"#sec-constructs": "language/constructs.html", "builtin-toString": "language/builtins.html#builtins-toString",
"#sect-let-language": "language/constructs.html#let-language", "builtin-toXML": "language/builtins.html#builtins-toXML",
"#ss-functions": "language/constructs.html#functions", "builtin-trace": "language/builtins.html#builtins-trace",
"#sec-language-operators": "language/operators.html", "builtin-tryEval": "language/builtins.html#builtins-tryEval",
"#table-operators": "language/operators.html", "builtin-typeOf": "language/builtins.html#builtins-typeOf",
"#ssec-values": "language/values.html", "ssec-builtins": "language/builtins.html",
"#gloss-closure": "glossary.html#gloss-closure", "attr-system": "language/derivations.html#attr-system",
"#gloss-derivation": "glossary.html#gloss-derivation", "ssec-derivation": "language/derivations.html",
"#gloss-deriver": "glossary.html#gloss-deriver", "ch-expression-language": "language/index.html",
"#gloss-nar": "glossary.html#gloss-nar", "sec-constructs": "language/constructs.html",
"#gloss-output-path": "glossary.html#gloss-output-path", "sect-let-language": "language/constructs.html#let-language",
"#gloss-profile": "glossary.html#gloss-profile", "ss-functions": "language/constructs.html#functions",
"#gloss-reachable": "glossary.html#gloss-reachable", "sec-language-operators": "language/operators.html",
"#gloss-reference": "glossary.html#gloss-reference", "table-operators": "language/operators.html",
"#gloss-substitute": "glossary.html#gloss-substitute", "ssec-values": "language/values.html",
"#gloss-user-env": "glossary.html#gloss-user-env", "gloss-closure": "glossary.html#gloss-closure",
"#gloss-validity": "glossary.html#gloss-validity", "gloss-derivation": "glossary.html#gloss-derivation",
"#part-glossary": "glossary.html", "gloss-deriver": "glossary.html#gloss-deriver",
"#sec-building-source": "installation/building-source.html", "gloss-nar": "glossary.html#gloss-nar",
"#ch-env-variables": "installation/env-variables.html", "gloss-output-path": "glossary.html#gloss-output-path",
"#sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables", "gloss-profile": "glossary.html#gloss-profile",
"#sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file", "gloss-reachable": "glossary.html#gloss-reachable",
"#sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon", "gloss-reference": "glossary.html#gloss-reference",
"#chap-installation": "installation/installation.html", "gloss-substitute": "glossary.html#gloss-substitute",
"#ch-installing-binary": "installation/installing-binary.html", "gloss-user-env": "glossary.html#gloss-user-env",
"#sect-macos-installation": "installation/installing-binary.html#macos-installation", "gloss-validity": "glossary.html#gloss-validity",
"#sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation", "part-glossary": "glossary.html",
"#sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation", "sec-building-source": "installation/building-source.html",
"#sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation", "ch-env-variables": "installation/env-variables.html",
"#sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation", "sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
"#sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation", "sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
"#sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball", "sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
"#sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url", "chap-installation": "installation/installation.html",
"#sect-single-user-installation": "installation/installing-binary.html#single-user-installation", "ch-installing-binary": "installation/installing-binary.html",
"#ch-installing-source": "installation/installing-source.html", "sect-macos-installation": "installation/installing-binary.html#macos-installation",
"#ssec-multi-user": "installation/multi-user.html", "sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
"#ch-nix-security": "installation/nix-security.html", "sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
"#sec-obtaining-source": "installation/obtaining-source.html", "sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
"#sec-prerequisites-source": "installation/prerequisites-source.html", "sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
"#sec-single-user": "installation/single-user.html", "sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
"#ch-supported-platforms": "installation/supported-platforms.html", "sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
"#ch-upgrading-nix": "installation/upgrading.html", "sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
"#ch-about-nix": "introduction.html", "sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
"#chap-introduction": "introduction.html", "ch-installing-source": "installation/installing-source.html",
"#ch-basic-package-mgmt": "package-management/basic-package-mgmt.html", "ssec-multi-user": "installation/multi-user.html",
"#ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html", "ch-nix-security": "installation/nix-security.html",
"#sec-channels": "package-management/channels.html", "sec-obtaining-source": "installation/obtaining-source.html",
"#ssec-copy-closure": "package-management/copy-closure.html", "sec-prerequisites-source": "installation/prerequisites-source.html",
"#sec-garbage-collection": "package-management/garbage-collection.html", "sec-single-user": "installation/single-user.html",
"#ssec-gc-roots": "package-management/garbage-collector-roots.html", "ch-supported-platforms": "installation/supported-platforms.html",
"#chap-package-management": "package-management/package-management.html", "ch-upgrading-nix": "installation/upgrading.html",
"#sec-profiles": "package-management/profiles.html", "ch-about-nix": "introduction.html",
"#ssec-s3-substituter": "package-management/s3-substituter.html", "chap-introduction": "introduction.html",
"#ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", "ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
"#ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache", "ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
"#ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache", "sec-channels": "package-management/channels.html",
"#sec-sharing-packages": "package-management/sharing-packages.html", "ssec-copy-closure": "package-management/copy-closure.html",
"#ssec-ssh-substituter": "package-management/ssh-substituter.html", "sec-garbage-collection": "package-management/garbage-collection.html",
"#chap-quick-start": "quick-start.html", "ssec-gc-roots": "package-management/garbage-collector-roots.html",
"#sec-relnotes": "release-notes/release-notes.html", "chap-package-management": "package-management/package-management.html",
"#ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html", "sec-profiles": "package-management/profiles.html",
"#ch-relnotes-0.10": "release-notes/rl-0.10.html", "ssec-s3-substituter": "package-management/s3-substituter.html",
"#ssec-relnotes-0.11": "release-notes/rl-0.11.html", "ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
"#ssec-relnotes-0.12": "release-notes/rl-0.12.html", "ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
"#ssec-relnotes-0.13": "release-notes/rl-0.13.html", "ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
"#ssec-relnotes-0.14": "release-notes/rl-0.14.html", "sec-sharing-packages": "package-management/sharing-packages.html",
"#ssec-relnotes-0.15": "release-notes/rl-0.15.html", "ssec-ssh-substituter": "package-management/ssh-substituter.html",
"#ssec-relnotes-0.16": "release-notes/rl-0.16.html", "chap-quick-start": "quick-start.html",
"#ch-relnotes-0.5": "release-notes/rl-0.5.html", "sec-relnotes": "release-notes/release-notes.html",
"#ch-relnotes-0.6": "release-notes/rl-0.6.html", "ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
"#ch-relnotes-0.7": "release-notes/rl-0.7.html", "ch-relnotes-0.10": "release-notes/rl-0.10.html",
"#ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html", "ssec-relnotes-0.11": "release-notes/rl-0.11.html",
"#ch-relnotes-0.8": "release-notes/rl-0.8.html", "ssec-relnotes-0.12": "release-notes/rl-0.12.html",
"#ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html", "ssec-relnotes-0.13": "release-notes/rl-0.13.html",
"#ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html", "ssec-relnotes-0.14": "release-notes/rl-0.14.html",
"#ch-relnotes-0.9": "release-notes/rl-0.9.html", "ssec-relnotes-0.15": "release-notes/rl-0.15.html",
"#ssec-relnotes-1.0": "release-notes/rl-1.0.html", "ssec-relnotes-0.16": "release-notes/rl-0.16.html",
"#ssec-relnotes-1.1": "release-notes/rl-1.1.html", "ch-relnotes-0.5": "release-notes/rl-0.5.html",
"#ssec-relnotes-1.10": "release-notes/rl-1.10.html", "ch-relnotes-0.6": "release-notes/rl-0.6.html",
"#ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html", "ch-relnotes-0.7": "release-notes/rl-0.7.html",
"#ssec-relnotes-1.11": "release-notes/rl-1.11.html", "ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
"#ssec-relnotes-1.2": "release-notes/rl-1.2.html", "ch-relnotes-0.8": "release-notes/rl-0.8.html",
"#ssec-relnotes-1.3": "release-notes/rl-1.3.html", "ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
"#ssec-relnotes-1.4": "release-notes/rl-1.4.html", "ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
"#ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html", "ch-relnotes-0.9": "release-notes/rl-0.9.html",
"#ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html", "ssec-relnotes-1.0": "release-notes/rl-1.0.html",
"#ssec-relnotes-1.5": "release-notes/rl-1.5.html", "ssec-relnotes-1.1": "release-notes/rl-1.1.html",
"#ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html", "ssec-relnotes-1.10": "release-notes/rl-1.10.html",
"#ssec-relnotes-1.6.0": "release-notes/rl-1.6.html", "ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
"#ssec-relnotes-1.7": "release-notes/rl-1.7.html", "ssec-relnotes-1.11": "release-notes/rl-1.11.html",
"#ssec-relnotes-1.8": "release-notes/rl-1.8.html", "ssec-relnotes-1.2": "release-notes/rl-1.2.html",
"#ssec-relnotes-1.9": "release-notes/rl-1.9.html", "ssec-relnotes-1.3": "release-notes/rl-1.3.html",
"#ssec-relnotes-2.0": "release-notes/rl-2.0.html", "ssec-relnotes-1.4": "release-notes/rl-1.4.html",
"#ssec-relnotes-2.1": "release-notes/rl-2.1.html", "ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
"#ssec-relnotes-2.2": "release-notes/rl-2.2.html", "ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
"#ssec-relnotes-2.3": "release-notes/rl-2.3.html" "ssec-relnotes-1.5": "release-notes/rl-1.5.html",
"ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
"ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
"ssec-relnotes-1.7": "release-notes/rl-1.7.html",
"ssec-relnotes-1.8": "release-notes/rl-1.8.html",
"ssec-relnotes-1.9": "release-notes/rl-1.9.html",
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
"ssec-relnotes-2.3": "release-notes/rl-2.3.html"
},
"language/values.html": {
"simple-values": "#primitives",
"lists": "#list",
"strings": "#string",
"lists": "#list",
"attribute-sets": "#attribute-set"
}
}; };
var isRoot = (document.location.pathname.endsWith('/') || document.location.pathname.endsWith('/index.html')) && path_to_root === ''; // the following code matches the current page's URL against the set of redirects.
if (isRoot && redirects[document.location.hash]) { //
document.location.href = path_to_root + redirects[document.location.hash]; // it is written to minimize the latency between page load and redirect.
// therefore we avoid function calls, copying data, and unnecessary loops.
// IMPORTANT: we use stateful array operations and their order matters!
//
// matching URLs is more involved than it should be:
//
// 1. `document.location.pathname` can have an arbitrary prefix.
//
// 2. `path_to_root` is set by mdBook. it consists only of `../`s and
// determines the depth of `<path>` relative to the prefix:
//
// `document.location.pathname`
// |------------------------------|
// /<prefix>/<path>/[<file>[.html]][#<anchor>]
// |----|
// `path_to_root` has same number of path segments
//
// source: https://phaiax.github.io/mdBook/format/theme/index-hbs.html#data
//
// 3. the following paths are equivalent:
//
// /foo/bar/
// /foo/bar/index.html
// /foo/bar/index
//
// 4. the following paths are also equivalent:
//
// /foo/bar/baz
// /foo/bar/baz.html
//
let segments = document.location.pathname.split('/');
let file = segments.pop();
// normalize file name
if (file === '') { file = "index.html"; }
else if (!file.endsWith('.html')) { file = file + '.html'; }
segments.push(file);
// use `path_to_root` to discern prefix from path.
const depth = path_to_root.split('/').length;
// remove segments containing prefix. the following works because
// 1. the original `document.location.pathname` is absolute,
// hence first element of `segments` is always empty.
// 2. last element of splitting `path_to_root` is also always empty.
// 3. last element of `segments` is the file name.
//
// visual example:
//
// '/foo/bar/baz.html'.split('/') -> [ '', 'foo', 'bar', 'baz.html' ]
// '../'.split('/') -> [ '..', '' ]
//
// the following operations will then result in
//
// path = 'bar/baz.html'
//
segments.splice(0, segments.length - depth);
const path = segments.join('/');
// anchor starts with the hash character (`#`),
// but our redirect declarations don't, so we strip it.
// example:
// document.location.hash -> '#foo'
// document.location.hash.substring(1) -> 'foo'
const anchor = document.location.hash.substring(1);
const redirect = redirects[path];
if (redirect) {
const target = redirect[anchor];
if (target) {
document.location.href = target;
}
} }

View file

@ -29,6 +29,7 @@
- [Nix Language](language/index.md) - [Nix Language](language/index.md)
- [Data Types](language/values.md) - [Data Types](language/values.md)
- [Language Constructs](language/constructs.md) - [Language Constructs](language/constructs.md)
- [String interpolation](language/string-interpolation.md)
- [Operators](language/operators.md) - [Operators](language/operators.md)
- [Derivations](language/derivations.md) - [Derivations](language/derivations.md)
- [Advanced Attributes](language/advanced-attributes.md) - [Advanced Attributes](language/advanced-attributes.md)
@ -59,12 +60,14 @@
@manpages@ @manpages@
- [Files](command-ref/files.md) - [Files](command-ref/files.md)
- [nix.conf](command-ref/conf-file.md) - [nix.conf](command-ref/conf-file.md)
- [Architecture](architecture/architecture.md)
- [Glossary](glossary.md) - [Glossary](glossary.md)
- [Contributing](contributing/contributing.md) - [Contributing](contributing/contributing.md)
- [Hacking](contributing/hacking.md) - [Hacking](contributing/hacking.md)
- [CLI guideline](contributing/cli-guideline.md) - [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md) - [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md) - [Release X.Y (202?-??-??)](release-notes/rl-next.md)
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md) - [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md) - [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md) - [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)

View file

@ -121,37 +121,3 @@ error:
are not valid, so checking is not possible are not valid, so checking is not possible
Run the build without `--check`, and then try with `--check` again. Run the build without `--check`, and then try with `--check` again.
# Automatic and Optionally Enforced Determinism Verification
Automatically verify every build at build time by executing the build
multiple times.
Setting `repeat` and `enforce-determinism` in your `nix.conf` permits
the automated verification of every build Nix performs.
The following configuration will run each build three times, and will
require the build to be deterministic:
enforce-determinism = true
repeat = 2
Setting `enforce-determinism` to false as in the following
configuration will run the build multiple times, execute the build
hook, but will allow the build to succeed even if it does not build
reproducibly:
enforce-determinism = false
repeat = 1
An example output of this configuration:
```console
$ nix-build ./test.nix -A unstable
this derivation will be built:
/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...
output '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable' of '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' differs from '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable.check' from previous round
/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable
```

View file

@ -33,12 +33,17 @@ distribute the public key for verifying the authenticity of the paths.
example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
``` ```
Then, add the public key and the cache URL to your `nix.conf`'s Then update [`nix.conf`](../command-ref/conf-file.md) on any machine that will access the cache.
`trusted-public-keys` and `substituters` options: Add the cache URL to [`substituters`](../command-ref/conf-file.md#conf-substituters) and the public key to [`trusted-public-keys`](../command-ref/conf-file.md#conf-trusted-public-keys):
substituters = https://cache.nixos.org/ s3://example-nix-cache substituters = https://cache.nixos.org/ s3://example-nix-cache
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM= trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
Machines that build for the cache must sign derivations using the private key.
On those machines, add the path to the key file to the [`secret-key-files`](../command-ref/conf-file.md#conf-secret-key-files) field in their [`nix.conf`](../command-ref/conf-file.md):
secret-key-files = /etc/nix/key.private
We will restart the Nix daemon in a later step. We will restart the Nix daemon in a later step.
# Implementing the build hook # Implementing the build hook
@ -52,10 +57,8 @@ set -eu
set -f # disable globbing set -f # disable globbing
export IFS=' ' export IFS=' '
echo "Signing paths" $OUT_PATHS
nix store sign --key-file /etc/nix/key.private $OUT_PATHS
echo "Uploading paths" $OUT_PATHS echo "Uploading paths" $OUT_PATHS
exec nix copy --to 's3://example-nix-cache' $OUT_PATHS exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
``` ```
> **Note** > **Note**

View file

@ -0,0 +1,115 @@
# Architecture
This chapter describes how Nix works.
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
## Overview
Nix consists of [hierarchical layers].
[hierarchical layers]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
The following [concept map] shows its main components (rectangles), the objects they operate on (rounded rectangles), and their interactions (connecting phrases):
[concept map]: https://en.m.wikipedia.org/wiki/Concept_map
```
.----------------.
| Nix expression |----------.
'----------------' |
| passed to
| |
+----------|-------------------|--------------------------------+
| Nix | V |
| | +-------------------------+ |
| | | commmand line interface |------. |
| | +-------------------------+ | |
| | | | |
| evaluated by calls manages |
| | | | |
| | V | |
| | +--------------------+ | |
| '-------->| language evaluator | | |
| +--------------------+ | |
| | | |
| produces | |
| | V |
| +----------------------------|------------------------------+ |
| | store | | |
| | referenced by V builds | |
| | .-------------. .------------. .--------------. | |
| | | build input |----->| build plan |----->| build result | | |
| | '-------------' '------------' '--------------' | |
| +-------------------------------------------------|---------+ |
+---------------------------------------------------|-----------+
|
represented as
|
V
.---------------.
| file |
'---------------'
```
At the top is the [command line interface](../command-ref/command-ref.md) that drives the underlying layers.
The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*.
The command line interface and Nix expressions are what users deal with most.
> **Note**
> The Nix language itself does not have a notion of *packages* or *configurations*.
> As far as we are concerned here, the inputs and results of a build plan are just data.
Underlying the command line interface and the Nix language evaluator is the [Nix store](../glossary.md#gloss-store), a mechanism to keep track of build plans, data, and references between them.
It can also execute build plans to produce new data, which are made available to the operating system as files.
A build plan itself is a series of *build tasks*, together with their build inputs.
> **Important**
> A build task in Nix is called [derivation](../glossary#gloss-derivation).
Each build task has a special build input executed as *build instructions* in order to perform the build.
The result of a build task can be input to another build task.
The following [data flow diagram] shows a build plan for illustration.
Build inputs used as instructions to a build task are marked accordingly:
[data flow diagram]: https://en.m.wikipedia.org/wiki/Data-flow_diagram
```
+--------------------------------------------------------------------+
| build plan |
| |
| .-------------. |
| | build input |---------. |
| '-------------' | |
| instructions |
| | |
| v |
| .-------------. .----------. |
| | build input |-->( build task )-------. |
| '-------------' '----------' | |
| instructions |
| | |
| v |
| .-------------. .----------. .--------------. |
| | build input |---------. ( build task )--->| build result | |
| '-------------' | '----------' '--------------' |
| instructions ^ |
| | | |
| v | |
| .-------------. .----------. | |
| | build input |-->( build task )-------' |
| '-------------' '----------' |
| ^ |
| | |
| | |
| .-------------. | |
| | build input |---------' |
| '-------------' |
| |
+--------------------------------------------------------------------+
```

View file

@ -7,42 +7,11 @@ Most Nix commands interpret the following environment variables:
`nix-shell`. It can have the values `pure` or `impure`. `nix-shell`. It can have the values `pure` or `impure`.
- [`NIX_PATH`]{#env-NIX_PATH}\ - [`NIX_PATH`]{#env-NIX_PATH}\
A colon-separated list of directories used to look up Nix A colon-separated list of directories used to look up the location of Nix
expressions enclosed in angle brackets (i.e., `<path>`). For expressions using [paths](../language/values.md#type-path)
instance, the value enclosed in angle brackets (i.e., `<path>`),
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
/home/eelco/Dev:/etc/nixos [`-I` option](./opt-common#opt-I).
will cause Nix to look for paths relative to `/home/eelco/Dev` and
`/etc/nixos`, in this order. It is also possible to match paths
against a prefix. For example, the value
nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
will cause Nix to search for `<nixpkgs/path>` in
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
If a path in the Nix search path starts with `http://` or
`https://`, it is interpreted as the URL of a tarball that will be
downloaded and unpacked to a temporary location. The tarball must
consist of a single top-level directory. For example, setting
`NIX_PATH` to
nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
tells Nix to download and use the current contents of the
`master` branch in the `nixpkgs` repository.
The URLs of the tarballs from the official nixos.org channels (see
[the manual for `nix-channel`](nix-channel.md)) can be abbreviated
as `channel:<channel-name>`. For instance, the following two
values of `NIX_PATH` are equivalent:
nixpkgs=channel:nixos-21.05
nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
The Nix search path can also be extended using the `-I` option to
many Nix commands, which takes precedence over `NIX_PATH`.
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\ - [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
Normally, the Nix store directory (typically `/nix/store`) is not Normally, the Nix store directory (typically `/nix/store`) is not

View file

@ -37,10 +37,12 @@ directory containing at least a file named `default.nix`.
`nix-build` is essentially a wrapper around `nix-build` is essentially a wrapper around
[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix [`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix
expression to a low-level store derivation) and [`nix-store expression to a low-level [store derivation]) and [`nix-store
--realise`](nix-store.md#operation---realise) (to build the store --realise`](nix-store.md#operation---realise) (to build the store
derivation). derivation).
[store derivation]: ../glossary.md#gloss-store-derivation
> **Warning** > **Warning**
> >
> The result of the build is automatically registered as a root of the > The result of the build is automatically registered as a root of the
@ -53,16 +55,18 @@ All options not listed here are passed to `nix-store
--realise`, except for `--arg` and `--attr` / `-A` which are passed to --realise`, except for `--arg` and `--attr` / `-A` which are passed to
`nix-instantiate`. `nix-instantiate`.
- [`--no-out-link`]{#opt-no-out-link}\ - <span id="opt-no-out-link">[`--no-out-link`](#opt-no-out-link)<span>
Do not create a symlink to the output path. Note that as a result Do not create a symlink to the output path. Note that as a result
the output does not become a root of the garbage collector, and so the output does not become a root of the garbage collector, and so
might be deleted by `nix-store might be deleted by `nix-store --gc`.
--gc`.
- <span id="opt-dry-run">[`--dry-run`](#opt-dry-run)</span>
- [`--dry-run`]{#opt-dry-run}\
Show what store paths would be built or downloaded. Show what store paths would be built or downloaded.
- [`--out-link`]{#opt-out-link} / `-o` *outlink*\ - <span id="opt-out-link">[`--out-link`](#opt-out-link)</span> / `-o` *outlink*
Change the name of the symlink to the output path created from Change the name of the symlink to the output path created from
`result` to *outlink*. `result` to *outlink*.

View file

@ -47,7 +47,9 @@ authentication, you can avoid typing the passphrase with `ssh-agent`.
Enable compression of the SSH connection. Enable compression of the SSH connection.
- `--include-outputs`\ - `--include-outputs`\
Also copy the outputs of store derivations included in the closure. Also copy the outputs of [store derivation]s included in the closure.
[store derivation]: ../../glossary.md#gloss-store-derivation
- `--use-substitutes` / `-s`\ - `--use-substitutes` / `-s`\
Attempt to download missing paths on the target machine using Nixs Attempt to download missing paths on the target machine using Nixs

View file

@ -8,6 +8,6 @@
# Description # Description
The Nix daemon is necessary in multi-user Nix installations. It performs The Nix daemon is necessary in multi-user Nix installations. It runs
build actions and other operations on the Nix store on behalf of build tasks and other operations on the Nix store on behalf of
unprivileged users. unprivileged users.

View file

@ -205,10 +205,12 @@ a number of possible ways:
unambiguous way, which is necessary if there are multiple unambiguous way, which is necessary if there are multiple
derivations with the same name. derivations with the same name.
- If *args* are store derivations, then these are - If *args* are [store derivation]s, then these are
[realised](nix-store.md#operation---realise), and the resulting output paths [realised](nix-store.md#operation---realise), and the resulting output paths
are installed. are installed.
[store derivation]: ../glossary.md#gloss-store-derivation
- If *args* are store paths that are not store derivations, then these - If *args* are store paths that are not store derivations, then these
are [realised](nix-store.md#operation---realise) and installed. are [realised](nix-store.md#operation---realise) and installed.
@ -280,7 +282,7 @@ To copy the store path with symbolic name `gcc` from another profile:
$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc $ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
``` ```
To install a specific store derivation (typically created by To install a specific [store derivation] (typically created by
`nix-instantiate`): `nix-instantiate`):
```console ```console
@ -665,7 +667,7 @@ derivation is shown unless `--no-name` is specified.
Print the `system` attribute of the derivation. Print the `system` attribute of the derivation.
- `--drv-path`\ - `--drv-path`\
Print the path of the store derivation. Print the path of the [store derivation].
- `--out-path`\ - `--out-path`\
Print the output path of the derivation. Print the output path of the derivation.

View file

@ -17,13 +17,14 @@
# Description # Description
The command `nix-instantiate` generates [store The command `nix-instantiate` produces [store derivation]s from (high-level) Nix expressions.
derivations](../glossary.md) from (high-level) Nix expressions. It It evaluates the Nix expressions in each of *files* (which defaults to
evaluates the Nix expressions in each of *files* (which defaults to
*./default.nix*). Each top-level expression should evaluate to a *./default.nix*). Each top-level expression should evaluate to a
derivation, a list of derivations, or a set of derivations. The paths derivation, a list of derivations, or a set of derivations. The paths
of the resulting store derivations are printed on standard output. of the resulting store derivations are printed on standard output.
[store derivation]: ../glossary.md#gloss-store-derivation
If *files* is the character `-`, then a Nix expression will be read from If *files* is the character `-`, then a Nix expression will be read from
standard input. standard input.
@ -79,8 +80,7 @@ standard input.
# Examples # Examples
Instantiating store derivations from a Nix expression, and building them Instantiate [store derivation]s from a Nix expression, and build them using `nix-store`:
using `nix-store`:
```console ```console
$ nix-instantiate test.nix (instantiate) $ nix-instantiate test.nix (instantiate)

View file

@ -22,7 +22,8 @@ This section lists the options that are common to all operations. These
options are allowed for every subcommand, though they may not always options are allowed for every subcommand, though they may not always
have an effect. have an effect.
- [`--add-root`]{#opt-add-root} *path*\ - <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
Causes the result of a realisation (`--realise` and Causes the result of a realisation (`--realise` and
`--force-realise`) to be registered as a root of the garbage `--force-realise`) to be registered as a root of the garbage
collector. *path* will be created as a symlink to the resulting collector. *path* will be created as a symlink to the resulting
@ -71,7 +72,7 @@ paths. Realisation is a somewhat overloaded term:
outputs are already valid, in which case we are done outputs are already valid, in which case we are done
immediately. Otherwise, there may be [substitutes](../glossary.md) immediately. Otherwise, there may be [substitutes](../glossary.md)
that produce the outputs (e.g., by downloading them). Finally, the that produce the outputs (e.g., by downloading them). Finally, the
outputs can be produced by performing the build action described outputs can be produced by running the build task described
by the derivation. by the derivation.
- If the store path is not a derivation, realisation ensures that the - If the store path is not a derivation, realisation ensures that the
@ -104,10 +105,6 @@ The following flags are available:
previous build, the new output path is left in previous build, the new output path is left in
`/nix/store/name.check.` `/nix/store/name.check.`
See also the `build-repeat` configuration option, which repeats a
derivation a number of times and prevents its outputs from being
registered as “valid” in the Nix store unless they are identical.
Special exit codes: Special exit codes:
- `100`\ - `100`\
@ -140,8 +137,10 @@ or.
## Examples ## Examples
This operation is typically used to build store derivations produced by This operation is typically used to build [store derivation]s produced by
[`nix-instantiate`](nix-instantiate.md): [`nix-instantiate`](./nix-instantiate.md):
[store derivation]: ../glossary.md#gloss-store-derivation
```console ```console
$ nix-store -r $(nix-instantiate ./test.nix) $ nix-store -r $(nix-instantiate ./test.nix)
@ -301,7 +300,7 @@ symlink.
## Common query options ## Common query options
- `--use-output`; `-u`\ - `--use-output`; `-u`\
For each argument to the query that is a store derivation, apply the For each argument to the query that is a [store derivation], apply the
query to the output path of the derivation instead. query to the output path of the derivation instead.
- `--force-realise`; `-f`\ - `--force-realise`; `-f`\
@ -321,7 +320,7 @@ symlink.
This query has one option: This query has one option:
- `--include-outputs` - `--include-outputs`
Also include the existing output paths of store derivations, Also include the existing output paths of [store derivation]s,
and their closures. and their closures.
This query can be used to implement various kinds of deployment. A This query can be used to implement various kinds of deployment. A
@ -375,12 +374,12 @@ symlink.
Prints the references graph of the store paths *paths* in the Prints the references graph of the store paths *paths* in the
[GraphML](http://graphml.graphdrawing.org/) file format. This can be [GraphML](http://graphml.graphdrawing.org/) file format. This can be
used to visualise dependency graphs. To obtain a build-time used to visualise dependency graphs. To obtain a build-time
dependency graph, apply this to a store derivation. To obtain a dependency graph, apply this to a [store derivation]. To obtain a
runtime dependency graph, apply it to an output path. runtime dependency graph, apply it to an output path.
- `--binding` *name*; `-b` *name*\ - `--binding` *name*; `-b` *name*\
Prints the value of the attribute *name* (i.e., environment Prints the value of the attribute *name* (i.e., environment
variable) of the store derivations *paths*. It is an error for a variable) of the [store derivation]s *paths*. It is an error for a
derivation to not have the specified attribute. derivation to not have the specified attribute.
- `--hash`\ - `--hash`\

View file

@ -99,8 +99,79 @@ You can run the whole testsuite with `make check`, or the tests for a specific c
### Functional tests ### Functional tests
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`. The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
The whole testsuite can be run with `make install && make installcheck`. Each test is a bash script.
Individual tests can be run with `make tests/{testName}.sh.test`.
The whole test suite can be run with:
```shell-session
$ make install && make installcheck
ran test tests/foo.sh... [PASS]
ran test tests/bar.sh... [PASS]
...
```
Individual tests can be run with `make`:
```shell-session
$ make tests/${testName}.sh.test
ran test tests/${testName}.sh... [PASS]
```
or without `make`:
```shell-session
$ ./mk/run-test.sh tests/${testName}.sh
ran test tests/${testName}.sh... [PASS]
```
To see the complete output, one can also run:
```shell-session
$ ./mk/debug-test.sh tests/${testName}.sh
+ foo
output from foo
+ bar
output from bar
...
```
The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails.
#### Debugging failing functional tests
When a functional test fails, it usually does so somewhere in the middle of the script.
To figure out what's wrong, it is convenient to run the test regularly up to the failing `nix` command, and then run that command with a debugger like GDB.
For example, if the script looks like:
```bash
foo
nix blah blub
bar
```
edit it like so:
```diff
foo
-nix blah blub
+gdb --args nix blah blub
bar
```
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
```shell-session
$ ./mk/debug-test.sh tests/${testName}.sh
...
+ gdb blash blub
GNU gdb (GDB) 12.1
...
(gdb)
```
One can debug the Nix invocation in all the usual ways.
For example, enter `run` to start the Nix invocation.
### Integration tests ### Integration tests

View file

@ -1,20 +1,32 @@
# Glossary # Glossary
- [derivation]{#gloss-derivation}\ - [derivation]{#gloss-derivation}\
A description of a build action. The result of a derivation is a A description of a build task. The result of a derivation is a
store object. Derivations are typically specified in Nix expressions store object. Derivations are typically specified in Nix expressions
using the [`derivation` primitive](language/derivations.md). These are using the [`derivation` primitive](./language/derivations.md). These are
translated into low-level *store derivations* (implicitly by translated into low-level *store derivations* (implicitly by
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`). `nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
[derivation]: #gloss-derivation
- [store derivation]{#gloss-store-derivation}\
A [derivation] represented as a `.drv` file in the [store].
It has a [store path], like any [store object].
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
See [`nix show-derivation`](./command-ref/new-cli/nix3-show-derivation.md) (experimental) for displaying the contents of store derivations.
[store derivation]: #gloss-store-derivation
- [content-addressed derivation]{#gloss-content-addressed-derivation}\ - [content-addressed derivation]{#gloss-content-addressed-derivation}\
A derivation which has the A derivation which has the
[`__contentAddressed`](language/advanced-attributes.md#adv-attr-__contentAddressed) [`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
attribute set to `true`. attribute set to `true`.
- [fixed-output derivation]{#gloss-fixed-output-derivation}\ - [fixed-output derivation]{#gloss-fixed-output-derivation}\
A derivation which includes the A derivation which includes the
[`outputHash`](language/advanced-attributes.md#adv-attr-outputHash) attribute. [`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute.
- [store]{#gloss-store}\ - [store]{#gloss-store}\
The location in the file system where store objects live. Typically The location in the file system where store objects live. Typically
@ -34,6 +46,8 @@
directory on another machine, accessed via `ssh` or directory on another machine, accessed via `ssh` or
served by the `nix-serve` Perl script. served by the `nix-serve` Perl script.
[store]: #gloss-store
- [chroot store]{#gloss-chroot-store}\ - [chroot store]{#gloss-chroot-store}\
A local store whose canonical path is anything other than `/nix/store`. A local store whose canonical path is anything other than `/nix/store`.
@ -46,15 +60,21 @@
cache](https://cache.nixos.org). cache](https://cache.nixos.org).
- [store path]{#gloss-store-path}\ - [store path]{#gloss-store-path}\
The location in the file system of a store object, i.e., an The location of a [store object] in the file system, i.e., an
immediate child of the Nix store directory. immediate child of the Nix store directory.
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
[store path]: #gloss-store-path
- [store object]{#gloss-store-object}\ - [store object]{#gloss-store-object}\
A file that is an immediate child of the Nix store directory. These A file that is an immediate child of the Nix store directory. These
can be regular files, but also entire directory trees. Store objects can be regular files, but also entire directory trees. Store objects
can be sources (objects copied from outside of the store), can be sources (objects copied from outside of the store),
derivation outputs (objects produced by running a build action), or derivation outputs (objects produced by running a build task), or
derivations (files describing a build action). derivations (files describing a build task).
[store object]: #gloss-store-object
- [input-addressed store object]{#gloss-input-addressed-store-object}\ - [input-addressed store object]{#gloss-input-addressed-store-object}\
A store object produced by building a A store object produced by building a
@ -79,7 +99,7 @@
- [substituter]{#gloss-substituter}\ - [substituter]{#gloss-substituter}\
A *substituter* is an additional store from which Nix will A *substituter* is an additional store from which Nix will
copy store objects it doesn't have. For details, see the copy store objects it doesn't have. For details, see the
[`substituters` option](command-ref/conf-file.html#conf-substituters). [`substituters` option](./command-ref/conf-file.md#conf-substituters).
- [purity]{#gloss-purity}\ - [purity]{#gloss-purity}\
The assumption that equal Nix derivations when run always produce The assumption that equal Nix derivations when run always produce
@ -124,7 +144,9 @@
references `R` then `R` is also in the closure of `P`. references `R` then `R` is also in the closure of `P`.
- [output path]{#gloss-output-path}\ - [output path]{#gloss-output-path}\
A store path produced by a derivation. A [store path] produced by a [derivation].
[output path]: #gloss-output-path
- [deriver]{#gloss-deriver}\ - [deriver]{#gloss-deriver}\
The deriver of an *output path* is the store The deriver of an *output path* is the store
@ -139,7 +161,7 @@
An automatically generated store object that consists of a set of An automatically generated store object that consists of a set of
symlinks to “active” applications, i.e., other store paths. These symlinks to “active” applications, i.e., other store paths. These
are generated automatically by are generated automatically by
[`nix-env`](command-ref/nix-env.md). See *profiles*. [`nix-env`](./command-ref/nix-env.md). See *profiles*.
- [profile]{#gloss-profile}\ - [profile]{#gloss-profile}\
A symlink to the current *user environment* of a user, e.g., A symlink to the current *user environment* of a user, e.g.,
@ -150,7 +172,18 @@
store. It can contain regular files, directories and symbolic store. It can contain regular files, directories and symbolic
links. NARs are generated and unpacked using `nix-store --dump` links. NARs are generated and unpacked using `nix-store --dump`
and `nix-store --restore`. and `nix-store --restore`.
- [`∅`]{#gloss-emtpy-set}\ - [`∅`]{#gloss-emtpy-set}\
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile. The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
- [`ε`]{#gloss-epsilon}\ - [`ε`]{#gloss-epsilon}\
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
- [string interpolation]{#gloss-string-interpolation}\
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
See [String interpolation](./language/string-interpolation.md) for details.
[string]: ./language/values.md#type-string
[path]: ./language/values.md#type-path
[attribute name]: ./language/values.md#attribute-set

View file

@ -88,19 +88,51 @@ extension. The installer will also create `/etc/profile.d/nix.sh`.
### Linux ### Linux
```console If you are on Linux with systemd:
sudo rm -rf /etc/profile/nix.sh /etc/nix /nix ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
# If you are on Linux with systemd, you will need to run: 1. Remove the Nix daemon service:
sudo systemctl stop nix-daemon.socket
sudo systemctl stop nix-daemon.service ```console
sudo systemctl disable nix-daemon.socket sudo systemctl stop nix-daemon.service
sudo systemctl disable nix-daemon.service sudo systemctl disable nix-daemon.socket nix-daemon.service
sudo systemctl daemon-reload sudo systemctl daemon-reload
```
1. Remove systemd service files:
```console
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
```
1. The installer script uses systemd-tmpfiles to create the socket directory.
You may also want to remove the configuration for that:
```console
sudo rm /etc/tmpfiles.d/nix-daemon.conf
```
Remove files created by Nix:
```console
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
``` ```
There may also be references to Nix in `/etc/profile`, `/etc/bashrc`, Remove build users and their group:
and `/etc/zshrc` which you may remove.
```console
for i in $(seq 30001 30032); do
sudo userdel $i
done
sudo groupdel 30000
```
There may also be references to Nix in
- `/etc/profile`
- `/etc/bashrc`
- `/etc/zshrc`
which you may remove.
### macOS ### macOS

View file

@ -104,7 +104,7 @@ a currently running program.
Packages are built from _Nix expressions_, which is a simple Packages are built from _Nix expressions_, which is a simple
functional language. A Nix expression describes everything that goes functional language. A Nix expression describes everything that goes
into a package build action (a “derivation”): other packages, sources, into a package build task (a “derivation”): other packages, sources,
the build script, environment variables for the build script, etc. the build script, environment variables for the build script, etc.
Nix tries very hard to ensure that Nix expressions are Nix tries very hard to ensure that Nix expressions are
_deterministic_: building a Nix expression twice should yield the same _deterministic_: building a Nix expression twice should yield the same

View file

@ -1,7 +1,7 @@
# Derivations # Derivations
The most important built-in function is `derivation`, which is used to The most important built-in function is `derivation`, which is used to
describe a single derivation (a build action). It takes as input a set, describe a single derivation (a build task). It takes as input a set,
the attributes of which specify the inputs of the build. the attributes of which specify the inputs of the build.
- There must be an attribute named [`system`]{#attr-system} whose value must be a - There must be an attribute named [`system`]{#attr-system} whose value must be a

View file

@ -31,3 +31,551 @@ The Nix language is
Type errors are only detected when expressions are evaluated. Type errors are only detected when expressions are evaluated.
# Overview
This is an incomplete overview of language features, by example.
<table>
<tr>
<th>
Example
</th>
<th>
Description
</th>
</tr>
<tr>
<td>
*Basic values*
</td>
<td>
</td>
</tr>
<tr>
<td>
`"hello world"`
</td>
<td>
A string
</td>
</tr>
<tr>
<td>
```
''
multi
line
string
''
```
</td>
<td>
A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`.
</td>
</tr>
<tr>
<td>
`"hello ${ { a = "world" }.a }"`
`"1 2 ${toString 3}"`
`"${pkgs.bash}/bin/sh"`
</td>
<td>
String interpolation (expands to `"hello world"`, `"1 2 3"`, `"/nix/store/<hash>-bash-<version>/bin/sh"`)
</td>
</tr>
<tr>
<td>
`true`, `false`
</td>
<td>
Booleans
</td>
</tr>
<tr>
<td>
`null`
</td>
<td>
Null value
</td>
</tr>
<tr>
<td>
`123`
</td>
<td>
An integer
</td>
</tr>
<tr>
<td>
`3.141`
</td>
<td>
A floating point number
</td>
</tr>
<tr>
<td>
`/etc`
</td>
<td>
An absolute path
</td>
</tr>
<tr>
<td>
`./foo.png`
</td>
<td>
A path relative to the file containing this Nix expression
</td>
</tr>
<tr>
<td>
`~/.config`
</td>
<td>
A home path. Evaluates to the `"<user's home directory>/.config"`.
</td>
</tr>
<tr>
<td>
<nixpkgs>
</td>
<td>
Search path. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
</td>
</tr>
<tr>
<td>
*Compound values*
</td>
<td>
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }`
</td>
<td>
A set with attributes named `x` and `y`
</td>
</tr>
<tr>
<td>
`{ foo.bar = 1; }`
</td>
<td>
A nested set, equivalent to `{ foo = { bar = 1; }; }`
</td>
</tr>
<tr>
<td>
`rec { x = "foo"; y = x + "bar"; }`
</td>
<td>
A recursive set, equivalent to `{ x = "foo"; y = "foobar"; }`
</td>
</tr>
<tr>
<td>
`[ "foo" "bar" "baz" ]`
`[ 1 2 3 ]`
`[ (f 1) { a = 1; b = 2; } [ "c" ] ]`
</td>
<td>
Lists with three elements.
</td>
</tr>
<tr>
<td>
*Operators*
</td>
<td>
</td>
</tr>
<tr>
<td>
`"foo" + "bar"`
</td>
<td>
String concatenation
</td>
</tr>
<tr>
<td>
`1 + 2`
</td>
<td>
Integer addition
</td>
</tr>
<tr>
<td>
`"foo" == "f" + "oo"`
</td>
<td>
Equality test (evaluates to `true`)
</td>
</tr>
<tr>
<td>
`"foo" != "bar"`
</td>
<td>
Inequality test (evaluates to `true`)
</td>
</tr>
<tr>
<td>
`!true`
</td>
<td>
Boolean negation
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }.x`
</td>
<td>
Attribute selection (evaluates to `1`)
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }.z or 3`
</td>
<td>
Attribute selection with default (evaluates to `3`)
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; } // { z = 3; }`
</td>
<td>
Merge two sets (attributes in the right-hand set taking precedence)
</td>
</tr>
<tr>
<td>
*Control structures*
</td>
<td>
</td>
</tr>
<tr>
<td>
`if 1 + 1 == 2 then "yes!" else "no!"`
</td>
<td>
Conditional expression
</td>
</tr>
<tr>
<td>
`assert 1 + 1 == 2; "yes!"`
</td>
<td>
Assertion check (evaluates to `"yes!"`).
</td>
</tr>
<tr>
<td>
`let x = "foo"; y = "bar"; in x + y`
</td>
<td>
Variable definition
</td>
</tr>
<tr>
<td>
`with builtins; head [ 1 2 3 ]`
</td>
<td>
Add all attributes from the given set to the scope (evaluates to `1`)
</td>
</tr>
<tr>
<td>
*Functions (lambdas)*
</td>
<td>
</td>
</tr>
<tr>
<td>
`x: x + 1`
</td>
<td>
A function that expects an integer and returns it increased by 1
</td>
</tr>
<tr>
<td>
`x: y: x + y`
</td>
<td>
Curried function, equivalent to `x: (y: x + y)`. Can be used like a function that takes two arguments and returns their sum.
</td>
</tr>
<tr>
<td>
`(x: x + 1) 100`
</td>
<td>
A function call (evaluates to 101)
</td>
</tr>
<tr>
<td>
`let inc = x: x + 1; in inc (inc (inc 100))`
</td>
<td>
A function bound to a variable and subsequently called by name (evaluates to 103)
</td>
</tr>
<tr>
<td>
`{ x, y }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y` and concatenates them
</td>
</tr>
<tr>
<td>
`{ x, y ? "bar" }: x + y`
</td>
<td>
A function that expects a set with required attribute `x` and optional `y`, using `"bar"` as default value for `y`
</td>
</tr>
<tr>
<td>
`{ x, y, ... }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y` and ignores any other attributes
</td>
</tr>
<tr>
<td>
`{ x, y } @ args: x + y`
`args @ { x, y }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y`, and binds the whole set to `args`
</td>
</tr>
<tr>
<td>
*Built-in functions*
</td>
<td>
</td>
</tr>
<tr>
<td>
`import ./foo.nix`
</td>
<td>
Load and return Nix expression in given file
</td>
</tr>
<tr>
<td>
`map (x: x + x) [ 1 2 3 ]`
</td>
<td>
Apply a function to every element of a list (evaluates to `[ 2 4 6 ]`)
</td>
</tr>
</table>

View file

@ -0,0 +1,82 @@
# String interpolation
String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*.
Interpolated expressions must evaluate to one of the following:
- a [string]
- a [path]
- a [derivation]
[string]: ./values.md#type-string
[path]: ./values.md#type-path
[attribute name]: ./values.md#attribute-set
[derivation]: ../glossary.md#gloss-derivation
## Examples
### String
Rather than writing
```nix
"--with-freetype2-library=" + freetype + "/lib"
```
(where `freetype` is a [derivation]), you can instead write
```nix
"--with-freetype2-library=${freetype}/lib"
```
The latter is automatically translated to the former.
A more complicated example (from the Nix expression for [Qt](http://www.trolltech.com/products/qt)):
```nix
configureFlags = "
-system-zlib -system-libpng -system-libjpeg
${if openglSupport then "-dlopen-opengl
-L${mesa}/lib -I${mesa}/include
-L${libXmu}/lib -I${libXmu}/include" else ""}
${if threadSupport then "-thread" else "-no-thread"}
";
```
Note that Nix expressions and strings can be arbitrarily nested;
in this case the outer string contains various interpolated expressions that themselves contain strings (e.g., `"-thread"`), some of which in turn contain interpolated expressions (e.g., `${mesa}`).
### Path
Rather than writing
```nix
./. + "/" + foo + "-" + bar + ".nix"
```
or
```nix
./. + "/${foo}-${bar}.nix"
```
you can instead write
```nix
./${foo}-${bar}.nix
```
### Attribute name
Attribute names can be created dynamically with string interpolation:
```nix
let name = "foo"; in
{
${name} = "bar";
}
```
{ foo = "bar"; }

View file

@ -13,41 +13,9 @@
returns and tabs can be written as `\n`, `\r` and `\t`, returns and tabs can be written as `\n`, `\r` and `\t`,
respectively. respectively.
You can include the result of an expression into a string by You can include the results of other expressions into a string by enclosing them in `${ }`, a feature known as [string interpolation].
enclosing it in `${...}`, a feature known as *antiquotation*. The
enclosed expression must evaluate to something that can be coerced
into a string (meaning that it must be a string, a path, or a
derivation). For instance, rather than writing
```nix [string interpolation]: ./string-interpolation.md
"--with-freetype2-library=" + freetype + "/lib"
```
(where `freetype` is a derivation), you can instead write the more
natural
```nix
"--with-freetype2-library=${freetype}/lib"
```
The latter is automatically translated to the former. A more
complicated example (from the Nix expression for
[Qt](http://www.trolltech.com/products/qt)):
```nix
configureFlags = "
-system-zlib -system-libpng -system-libjpeg
${if openglSupport then "-dlopen-opengl
-L${mesa}/lib -I${mesa}/include
-L${libXmu}/lib -I${libXmu}/include" else ""}
${if threadSupport then "-thread" else "-no-thread"}
";
```
Note that Nix expressions and strings can be arbitrarily nested; in
this case the outer string contains various antiquotations that
themselves contain strings (e.g., `"-thread"`), some of which in
turn contain expressions (e.g., `${mesa}`).
The second way to write string literals is as an *indented string*, The second way to write string literals is as an *indented string*,
which is enclosed between pairs of *double single-quotes*, like so: which is enclosed between pairs of *double single-quotes*, like so:
@ -75,7 +43,7 @@
Note that the whitespace and newline following the opening `''` is Note that the whitespace and newline following the opening `''` is
ignored if there is no non-whitespace text on the initial line. ignored if there is no non-whitespace text on the initial line.
Antiquotation (`${expr}`) is supported in indented strings. Indented strings support [string interpolation].
Since `${` and `''` have special meaning in indented strings, you Since `${` and `''` have special meaning in indented strings, you
need a way to quote them. `$` can be escaped by prefixing it with need a way to quote them. `$` can be escaped by prefixing it with
@ -143,12 +111,23 @@
environment variable `NIX_PATH` will be searched for the given file environment variable `NIX_PATH` will be searched for the given file
or directory name. or directory name.
Antiquotation is supported in any paths except those in angle brackets. When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object].
`./${foo}-${bar}.nix` is a more convenient way of writing
`./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At [store path]: ../glossary.md#gloss-store-path
least one slash must appear *before* any antiquotations for this to be [store object]: ../glossary.md#gloss-store-object
recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
operation. `./a.${foo}/b.${bar}` is a path. For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
For example, assume you used a file path in an interpolated string during a `nix repl` session.
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents.
Paths themselves, except those in angle brackets (`< >`), support [string interpolation].
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
`a.${foo}/b.${bar}` is a syntactically valid division operation.
`./a.${foo}/b.${bar}` is a path.
- <a id="type-boolean" href="#type-boolean">Boolean</a> - <a id="type-boolean" href="#type-boolean">Boolean</a>
@ -221,23 +200,33 @@ will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
You can use arbitrary double-quoted strings as attribute names: You can use arbitrary double-quoted strings as attribute names:
```nix ```nix
{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}" { "$!@#?" = 123; }."$!@#?"
``` ```
This will evaluate to `123` (Assuming `bar` is antiquotable). In the
case where an attribute name is just a single antiquotation, the quotes
can be dropped:
```nix ```nix
{ foo = 123; }.${bar} or 456 let bar = "bar";
{ "foo ${bar}" = 123; }."foo ${bar}"
``` ```
This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced Both will evaluate to `123`.
to a string and `456` otherwise (again assuming `bar` is antiquotable).
Attribute names support [string interpolation]:
```nix
let bar = "foo"; in
{ foo = 123; }.${bar}
```
```nix
let bar = "foo"; in
{ ${bar} = 123; }.foo
```
Both will evaluate to `123`.
In the special case where an attribute name inside of a set declaration In the special case where an attribute name inside of a set declaration
evaluates to `null` (which is normally an error, as `null` is not evaluates to `null` (which is normally an error, as `null` cannot be coerced to
antiquotable), that attribute is simply not added to the set: a string), that attribute is simply not added to the set:
```nix ```nix
{ ${if foo then "bar" else null} = true; } { ${if foo then "bar" else null} = true; }

View file

@ -0,0 +1,43 @@
# Release 2.12 (2022-12-06)
* On Linux, Nix can now run builds in a user namespace where they run
as root (UID 0) and have 65,536 UIDs available.
<!-- FIXME: move this to its own section about system features -->
This is primarily useful for running containers such as `systemd-nspawn`
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
[nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
A build can enable this by setting the derivation attribute:
```
requiredSystemFeatures = [ "uid-range" ];
```
The `uid-range` [system feature] requires the [`auto-allocate-uids`]
setting to be enabled.
[system feature]: ../command-ref/conf-file.md#conf-system-features
* Nix can now automatically pick UIDs for builds, removing the need to
create `nixbld*` user accounts. See [`auto-allocate-uids`].
[`auto-allocate-uids`]: ../command-ref/conf-file.md#conf-auto-allocate-uids
* On Linux, Nix has experimental support for running builds inside a
cgroup. See
[`use-cgroups`](../command-ref/conf-file.md#conf-use-cgroups).
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
defaults to `false`. If it is set to `true`, the `hash` and `sha256`
arguments will be ignored and the resulting derivation will have
`__impure` set to `true`, making it an impure derivation.
* If `builtins.readFile` is called on a file with context, then only
the parts of the context that appear in the content of the file are
retained. This avoids a lot of spurious errors where strings end up
having a context just because they are read from a store path
([#7260](https://github.com/NixOS/nix/pull/7260)).
* `nix build --json` now prints some statistics about top-level
derivations, such as CPU statistics when cgroups are enabled.

View file

@ -1,9 +1,23 @@
# Release X.Y (202?-??-??) # Release X.Y (202?-??-??)
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which * The `repeat` and `enforce-determinism` options have been removed
defaults to `false`. If it is set to `true`, the `hash` and `sha256` since they had been broken under many circumstances for a long time.
arguments will be ignored and the resulting derivation will have
`__impure` set to `true`, making it an impure derivation. * You can now use [flake references] in the [old command line interface], e.g.
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
[old command line interface]: ../command-ref/main-commands.md
```
# nix-build flake:nixpkgs -A hello
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
'<nixpkgs>' -A hello
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
```
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
Historical release notes were not changed.
* Error traces have been reworked to provide detailed explanations and more * Error traces have been reworked to provide detailed explanations and more
accurate error locations. A short excerpt of the trace is now shown by accurate error locations. A short excerpt of the trace is now shown by
default when an error occurs. default when an error occurs.

View file

@ -36,6 +36,17 @@ let
shell = "${pkgs.bashInteractive}/bin/bash"; shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root"; home = "/root";
gid = 0; gid = 0;
groups = [ "root" ];
description = "System administrator";
};
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
}; };
} // lib.listToAttrs ( } // lib.listToAttrs (
@ -57,6 +68,7 @@ let
groups = { groups = {
root.gid = 0; root.gid = 0;
nixbld.gid = 30000; nixbld.gid = 30000;
nobody.gid = 65534;
}; };
userToPasswd = ( userToPasswd = (

View file

@ -18,16 +18,16 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1657693803, "lastModified": 1670461440,
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=", "narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2", "rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-22.05-small", "ref": "nixos-22.11-small",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View file

@ -1,7 +1,7 @@
{ {
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
@ -9,14 +9,14 @@
let let
version = builtins.readFile ./.version + versionSuffix; officialRelease = false;
version = nixpkgs.lib.fileContents ./.version + versionSuffix;
versionSuffix = versionSuffix =
if officialRelease if officialRelease
then "" then ""
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
officialRelease = false;
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
linuxSystems = linux64BitSystems ++ [ "i686-linux" ]; linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ]; systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
@ -108,7 +108,7 @@
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]; ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps = buildDeps =
[ (curl.override { patchNetrcRegression = true; }) [ curl
bzip2 xz brotli editline bzip2 xz brotli editline
openssl sqlite openssl sqlite
libarchive libarchive
@ -127,13 +127,9 @@
}); });
propagatedDeps = propagatedDeps =
[ ((boehmgc.override { [ (boehmgc.override {
enableLargeConfig = true; enableLargeConfig = true;
}).overrideAttrs(o: { })
patches = (o.patches or []) ++ [
./boehmgc-coroutine-sp-fallback.diff
];
}))
nlohmann_json nlohmann_json
]; ];
}; };
@ -364,7 +360,7 @@
buildInputs = buildInputs =
[ nix [ nix
(curl.override { patchNetrcRegression = true; }) curl
bzip2 bzip2
xz xz
pkgs.perl pkgs.perl
@ -420,6 +416,8 @@
buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem: buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}")); nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
buildNoGc = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
# Perl bindings for various platforms. # Perl bindings for various platforms.
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings); perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
@ -506,6 +504,12 @@
overlay = self.overlays.default; overlay = self.overlays.default;
}); });
tests.containers = (import ./tests/containers.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
overlay = self.overlays.default;
});
tests.setuid = nixpkgs.lib.genAttrs tests.setuid = nixpkgs.lib.genAttrs
["i686-linux" "x86_64-linux"] ["i686-linux" "x86_64-linux"]
(system: (system:

79
maintainers/README.md Normal file
View file

@ -0,0 +1,79 @@
# Nix maintainers team
## Motivation
The goal of the team is to help other people to contribute to Nix.
## Members
- Eelco Dolstra (@edolstra) Team lead
- Théophane Hufschmitt (@thufschmitt)
- Valentin Gagarin (@fricklerhandwerk)
- Thomas Bereknyei (@tomberek)
- Robert Hensing (@roberth)
## Meeting protocol
The team meets twice a week:
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
1. Triage issues and pull requests from the _No Status_ column (30 min)
2. Discuss issues and pull requests from the _To discuss_ column (30 min)
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
1. Code review on pull requests from _In review_.
2. Other chores and tasks.
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
## Project board protocol
The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19/views/1) for tracking its work.
Issues on the board progress through the following states:
- No Status
Team members can add pull requests or issues to discuss or review together.
During the discussion meeting, the team triages new items.
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
- To discuss
Pull requests and issues that are important and controversial are discussed by the team during discussion meetings.
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
- In review
Pull requests in this column are reviewed together during work meetings.
This is both for spreading implementation knowledge and for establishing common values in code reviews.
When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member.
- Assigned for merging
One team member is assigned to each of these pull requests.
They will communicate with the authors, and make the final approval once all remaining issues are addressed.
If more substantive issues arise, the assignee can move the pull request back to _To discuss_ to involve the team again.
The process is illustrated in the following diagram:
```mermaid
flowchart TD
discuss[To discuss]
review[To review]
New --> |Disagreement on idea| discuss
New & discuss --> |Consensus on idea| review
review --> |Consensus on implementation| Assigned
Assigned --> |Implementation issues arise| review
Assigned --> |Remaining issues fixed| Merged
```

View file

@ -115,10 +115,6 @@ sub downloadFile {
write_file("$tmpFile.sha256", $sha256_actual); write_file("$tmpFile.sha256", $sha256_actual);
if (! -e "$tmpFile.asc") {
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
}
return $sha256_expected; return $sha256_expected;
} }
@ -194,7 +190,7 @@ for my $fn (glob "$tmpDir/*") {
my $configuration = (); my $configuration = ();
$configuration->{content_type} = "application/octet-stream"; $configuration->{content_type} = "application/octet-stream";
if ($fn =~ /.sha256|.asc|install/) { if ($fn =~ /.sha256|install/) {
# Text files # Text files
$configuration->{content_type} = "text/plain"; $configuration->{content_type} = "text/plain";
} }

View file

@ -28,7 +28,7 @@
<key>SoftResourceLimits</key> <key>SoftResourceLimits</key>
<dict> <dict>
<key>NumberOfFiles</key> <key>NumberOfFiles</key>
<integer>4096</integer> <integer>1048576</integer>
</dict> </dict>
</dict> </dict>
</plist> </plist>

View file

@ -9,7 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
[Service] [Service]
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
KillMode=process KillMode=process
LimitNOFILE=4096 LimitNOFILE=1048576
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

11
mk/common-test.sh Normal file
View file

@ -0,0 +1,11 @@
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
: ${BASH:=/usr/bin/env bash}
init_test () {
cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
}
run_test_proper () {
cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
}

11
mk/debug-test.sh Executable file
View file

@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -eu
test=$1
dir="$(dirname "${BASH_SOURCE[0]}")"
source "$dir/common-test.sh"
(init_test)
run_test_proper

View file

@ -1,4 +1,4 @@
#!/bin/sh #!/usr/bin/env bash
set -u set -u
@ -7,7 +7,12 @@ green=""
yellow="" yellow=""
normal="" normal=""
post_run_msg="ran test $1..." test=$1
dir="$(dirname "${BASH_SOURCE[0]}")"
source "$dir/common-test.sh"
post_run_msg="ran test $test..."
if [ -t 1 ]; then if [ -t 1 ]; then
red="" red=""
green="" green=""
@ -16,12 +21,12 @@ if [ -t 1 ]; then
fi fi
run_test () { run_test () {
(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null) (init_test 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)" log="$(run_test_proper 2>&1)"
status=$? status=$?
} }
run_test "$1" run_test
# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these # Hack: Retry the test if it fails with “unexpected EOF reading a line” as these
# appear randomly without anyone knowing why. # appear randomly without anyone knowing why.
@ -32,7 +37,7 @@ if [[ $status -ne 0 && $status -ne 99 && \
]]; then ]]; then
echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)" echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)"
echo "$log" | sed 's/^/ /' echo "$log" | sed 's/^/ /'
run_test "$1" run_test
fi fi
if [ $status -eq 0 ]; then if [ $status -eq 0 ]; then

View file

@ -8,7 +8,11 @@ define run-install-test
.PHONY: $1.test .PHONY: $1.test
$1.test: $1 $(test-deps) $1.test: $1 $(test-deps)
@env TEST_NAME=$(basename $1) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null @env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
.PHONY: $1.test-debug
$1.test-debug: $1 $(test-deps)
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
endef endef

View file

@ -58,7 +58,7 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
readonly ROOT_HOME=~root readonly ROOT_HOME=~root
if [ -t 0 ]; then if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
readonly IS_HEADLESS='no' readonly IS_HEADLESS='no'
else else
readonly IS_HEADLESS='yes' readonly IS_HEADLESS='yes'
@ -97,13 +97,10 @@ is_os_darwin() {
} }
contact_us() { contact_us() {
echo "You can open an issue at https://github.com/nixos/nix/issues" echo "You can open an issue at"
echo "https://github.com/NixOS/nix/issues/new?labels=installer&template=installer.md"
echo "" echo ""
echo "Or feel free to contact the team:" echo "Or get in touch with the community: https://nixos.org/community"
echo " - Matrix: #nix:nixos.org"
echo " - IRC: in #nixos on irc.libera.chat"
echo " - twitter: @nixos_org"
echo " - forum: https://discourse.nixos.org"
} }
get_help() { get_help() {
echo "We'd love to help if you need it." echo "We'd love to help if you need it."
@ -578,7 +575,7 @@ EOF
# to extract _just_ the user's note, instead it is prefixed with # to extract _just_ the user's note, instead it is prefixed with
# some plist junk. This was causing the user note to always be set, # some plist junk. This was causing the user note to always be set,
# even if there was no reason for it. # even if there was no reason for it.
if ! poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then if poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then
row " Note" "Nix build user $coreid" row " Note" "Nix build user $coreid"
else else
poly_user_note_set "$username" "Nix build user $coreid" poly_user_note_set "$username" "Nix build user $coreid"

View file

@ -71,6 +71,8 @@ while [ $# -gt 0 ]; do
# # intentional tail space # # intentional tail space
# ACTIONS="${ACTIONS}uninstall " # ACTIONS="${ACTIONS}uninstall "
# ;; # ;;
--yes)
export NIX_INSTALLER_YES=1;;
--no-channel-add) --no-channel-add)
export NIX_INSTALLER_NO_CHANNEL_ADD=1;; export NIX_INSTALLER_NO_CHANNEL_ADD=1;;
--daemon-user-count) --daemon-user-count)
@ -90,7 +92,7 @@ while [ $# -gt 0 ]; do
shift;; shift;;
*) *)
{ {
echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]" echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--yes] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]"
echo "Choose installation method." echo "Choose installation method."
echo "" echo ""
@ -104,6 +106,8 @@ while [ $# -gt 0 ]; do
echo " trivial to uninstall." echo " trivial to uninstall."
echo " (default)" echo " (default)"
echo "" echo ""
echo " --yes: Run the script non-interactively, accepting all prompts."
echo ""
echo " --no-channel-add: Don't add any channels. nixpkgs-unstable is installed by default." echo " --no-channel-add: Don't add any channels. nixpkgs-unstable is installed by default."
echo "" echo ""
echo " --no-modify-profile: Don't modify the user profile to automatically load nix." echo " --no-modify-profile: Don't modify the user profile to automatically load nix."

View file

@ -28,7 +28,9 @@ if test -n "$HOME" && test -n "$USER"
# Only use MANPATH if it is already set. In general `man` will just simply # Only use MANPATH if it is already set. In general `man` will just simply
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin` # pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
# which is in the $PATH. For more info, run `manpath -d`. # which is in the $PATH. For more info, run `manpath -d`.
if set --query MANPATH
set --export --prepend --path MANPATH "$NIX_LINK/share/man" set --export --prepend --path MANPATH "$NIX_LINK/share/man"
end
fish_add_path --prepend --global "$NIX_LINK/bin" fish_add_path --prepend --global "$NIX_LINK/bin"
set --erase NIX_LINK set --erase NIX_LINK

View file

@ -186,12 +186,12 @@ static int main_build_remote(int argc, char * * argv)
// build the hint template. // build the hint template.
std::string errorText = std::string errorText =
"Failed to find a machine for remote build!\n" "Failed to find a machine for remote build!\n"
"derivation: %s\nrequired (system, features): (%s, %s)"; "derivation: %s\nrequired (system, features): (%s, [%s])";
errorText += "\n%s available machines:"; errorText += "\n%s available machines:";
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)"; errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
for (unsigned int i = 0; i < machines.size(); ++i) for (unsigned int i = 0; i < machines.size(); ++i)
errorText += "\n(%s, %s, %s, %s)"; errorText += "\n([%s], %s, [%s], [%s])";
// add the template values. // add the template values.
std::string drvstr; std::string drvstr;

View file

@ -226,7 +226,7 @@ MixProfile::MixProfile()
{ {
addFlag({ addFlag({
.longName = "profile", .longName = "profile",
.description = "The profile to update.", .description = "The profile to operate on.",
.labels = {"path"}, .labels = {"path"},
.handler = {&profile}, .handler = {&profile},
.completer = completePath .completer = completePath

View file

@ -32,7 +32,77 @@ MixEvalArgs::MixEvalArgs()
addFlag({ addFlag({
.longName = "include", .longName = "include",
.shortName = 'I', .shortName = 'I',
.description = "Add *path* to the list of locations used to look up `<...>` file names.", .description = R"(
Add *path* to the Nix search path. The Nix search path is
initialized from the colon-separated [`NIX_PATH`](./env-common.md#env-NIX_PATH) environment
variable, and is used to look up the location of Nix expressions using [paths](../language/values.md#type-path) enclosed in angle
brackets (i.e., `<nixpkgs>`).
For instance, passing
```
-I /home/eelco/Dev
-I /etc/nixos
```
will cause Nix to look for paths relative to `/home/eelco/Dev` and
`/etc/nixos`, in that order. This is equivalent to setting the
`NIX_PATH` environment variable to
```
/home/eelco/Dev:/etc/nixos
```
It is also possible to match paths against a prefix. For example,
passing
```
-I nixpkgs=/home/eelco/Dev/nixpkgs-branch
-I /etc/nixos
```
will cause Nix to search for `<nixpkgs/path>` in
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
If a path in the Nix search path starts with `http://` or `https://`,
it is interpreted as the URL of a tarball that will be downloaded and
unpacked to a temporary location. The tarball must consist of a single
top-level directory. For example, passing
```
-I nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
```
tells Nix to download and use the current contents of the `master`
branch in the `nixpkgs` repository.
The URLs of the tarballs from the official `nixos.org` channels
(see [the manual page for `nix-channel`](../nix-channel.md)) can be
abbreviated as `channel:<channel-name>`. For instance, the
following two flags are equivalent:
```
-I nixpkgs=channel:nixos-21.05
-I nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
```
You can also fetch source trees using [flake URLs](./nix3-flake.md#url-like-syntax) and add them to the
search path. For instance,
```
-I nixpkgs=flake:nixpkgs
```
specifies that the prefix `nixpkgs` shall refer to the source tree
downloaded from the `nixpkgs` entry in the flake registry. Similarly,
```
-I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
```
makes `<nixpkgs>` refer to a particular branch of the
`NixOS/nixpkgs` repository on GitHub.
)",
.category = category, .category = category,
.labels = {"path"}, .labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }} .handler = {[&](std::string s) { searchPath.push_back(s); }}
@ -89,14 +159,25 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, std::string_view s) Path lookupFileArg(EvalState & state, std::string_view s)
{ {
if (isUri(s)) { if (EvalSettings::isPseudoUrl(s)) {
return state.store->toRealPath( auto storePath = fetchers::downloadTarball(
fetchers::downloadTarball( state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
state.store, resolveUri(s), "source", false).first.storePath); return state.store->toRealPath(storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { }
else if (hasPrefix(s, "flake:")) {
settings.requireExperimentalFeature(Xp::Flakes);
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
return state.store->toRealPath(storePath);
}
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p(s.substr(1, s.size() - 2)); Path p(s.substr(1, s.size() - 2));
return state.findFile(p); return state.findFile(p);
} else }
else
return absPath(std::string(s)); return absPath(std::string(s));
} }

View file

@ -168,7 +168,7 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
addFlag({ addFlag({
.longName = "derivation", .longName = "derivation",
.description = "Operate on the store derivation rather than its outputs.", .description = "Operate on the [store derivation](../../glossary.md#gloss-store-derivation) rather than its outputs.",
.category = installablesCategory, .category = installablesCategory,
.handler = {&operateOn, OperateOn::Derivation}, .handler = {&operateOn, OperateOn::Derivation},
}); });
@ -207,6 +207,7 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
void SourceExprCommand::completeInstallable(std::string_view prefix) void SourceExprCommand::completeInstallable(std::string_view prefix)
{ {
try {
if (file) { if (file) {
completionType = ctAttrs; completionType = ctAttrs;
@ -257,6 +258,9 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
getDefaultFlakeAttrPaths(), getDefaultFlakeAttrPaths(),
prefix); prefix);
} }
} catch (EvalError&) {
// Don't want eval errors to mess-up with the completion engine, so let's just swallow them
}
} }
void completeFlakeRefWithFragment( void completeFlakeRefWithFragment(
@ -395,44 +399,56 @@ static StorePath getDeriver(
struct InstallableStorePath : Installable struct InstallableStorePath : Installable
{ {
ref<Store> store; ref<Store> store;
StorePath storePath; DerivedPath req;
InstallableStorePath(ref<Store> store, StorePath && storePath) InstallableStorePath(ref<Store> store, StorePath && storePath)
: store(store), storePath(std::move(storePath)) { } : store(store),
req(storePath.isDerivation()
? (DerivedPath) DerivedPath::Built {
.drvPath = std::move(storePath),
.outputs = {},
}
: (DerivedPath) DerivedPath::Opaque {
.path = std::move(storePath),
})
{ }
std::string what() const override { return store->printStorePath(storePath); } InstallableStorePath(ref<Store> store, DerivedPath && req)
: store(store), req(std::move(req))
{ }
std::string what() const override
{
return req.to_string(*store);
}
DerivedPaths toDerivedPaths() override DerivedPaths toDerivedPaths() override
{ {
if (storePath.isDerivation()) { return { req };
auto drv = store->readDerivation(storePath);
return {
DerivedPath::Built {
.drvPath = storePath,
.outputs = drv.outputNames(),
}
};
} else {
return {
DerivedPath::Opaque {
.path = storePath,
}
};
}
} }
StorePathSet toDrvPaths(ref<Store> store) override StorePathSet toDrvPaths(ref<Store> store) override
{ {
if (storePath.isDerivation()) { return std::visit(overloaded {
return {storePath}; [&](const DerivedPath::Built & bfd) -> StorePathSet {
} else { return { bfd.drvPath };
return {getDeriver(store, *this, storePath)}; },
} [&](const DerivedPath::Opaque & bo) -> StorePathSet {
return { getDeriver(store, *this, bo.path) };
},
}, req.raw());
} }
std::optional<StorePath> getStorePath() override std::optional<StorePath> getStorePath() override
{ {
return storePath; return std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
return bfd.drvPath;
},
[&](const DerivedPath::Opaque & bo) {
return bo.path;
},
}, req.raw());
} }
}; };
@ -777,7 +793,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
if (file == "-") { if (file == "-") {
auto e = state->parseStdin(); auto e = state->parseStdin();
state->eval(e, *vFile); state->eval(e, *vFile);
} else if (file) }
else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile); state->evalFile(lookupFileArg(*state, *file), *vFile);
else { else {
auto e = state->parseExprFromString(*expr, absPath(".")); auto e = state->parseExprFromString(*expr, absPath("."));
@ -798,7 +815,22 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) { for (auto & s : ss) {
std::exception_ptr ex; std::exception_ptr ex;
if (s.find('/') != std::string::npos) { auto found = s.rfind('^');
if (found != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(
store,
DerivedPath::Built::parse(*store, s.substr(0, found), s.substr(found + 1))));
continue;
} catch (BadStorePath &) {
} catch (...) {
if (!ex)
ex = std::current_exception();
}
}
found = s.find('/');
if (found != std::string::npos) {
try { try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s))); result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
continue; continue;
@ -840,20 +872,20 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
return installables.front(); return installables.front();
} }
BuiltPaths Installable::build( std::vector<BuiltPathWithResult> Installable::build(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables, const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode) BuildMode bMode)
{ {
BuiltPaths res; std::vector<BuiltPathWithResult> res;
for (auto & [_, builtPath] : build2(evalStore, store, mode, installables, bMode)) for (auto & [_, builtPathWithResult] : build2(evalStore, store, mode, installables, bMode))
res.push_back(builtPath); res.push_back(builtPathWithResult);
return res; return res;
} }
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::build2( std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Installable::build2(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, Realise mode,
@ -873,7 +905,7 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
} }
} }
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> res; std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> res;
switch (mode) { switch (mode) {
@ -914,10 +946,10 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
output, *drvOutput->second); output, *drvOutput->second);
} }
} }
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }}); res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }}});
}, },
[&](const DerivedPath::Opaque & bo) { [&](const DerivedPath::Opaque & bo) {
res.push_back({installable, BuiltPath::Opaque { bo.path }}); res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }}});
}, },
}, path.raw()); }, path.raw());
} }
@ -939,10 +971,10 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
std::map<std::string, StorePath> outputs; std::map<std::string, StorePath> outputs;
for (auto & path : buildResult.builtOutputs) for (auto & path : buildResult.builtOutputs)
outputs.emplace(path.first.outputName, path.second.outPath); outputs.emplace(path.first.outputName, path.second.outPath);
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }}); res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }, .result = buildResult}});
}, },
[&](const DerivedPath::Opaque & bo) { [&](const DerivedPath::Opaque & bo) {
res.push_back({installable, BuiltPath::Opaque { bo.path }}); res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }, .result = buildResult}});
}, },
}, buildResult.path.raw()); }, buildResult.path.raw());
} }
@ -965,9 +997,12 @@ BuiltPaths Installable::toBuiltPaths(
OperateOn operateOn, OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables) const std::vector<std::shared_ptr<Installable>> & installables)
{ {
if (operateOn == OperateOn::Output) if (operateOn == OperateOn::Output) {
return Installable::build(evalStore, store, mode, installables); BuiltPaths res;
else { for (auto & p : Installable::build(evalStore, store, mode, installables))
res.push_back(p.path);
return res;
} else {
if (mode == Realise::Nothing) if (mode == Realise::Nothing)
settings.readOnlyMode = true; settings.readOnlyMode = true;

View file

@ -7,6 +7,7 @@
#include "eval.hh" #include "eval.hh"
#include "store-api.hh" #include "store-api.hh"
#include "flake/flake.hh" #include "flake/flake.hh"
#include "build-result.hh"
#include <optional> #include <optional>
@ -51,6 +52,12 @@ enum class OperateOn {
Derivation Derivation
}; };
struct BuiltPathWithResult
{
BuiltPath path;
std::optional<BuildResult> result;
};
struct Installable struct Installable
{ {
virtual ~Installable() { } virtual ~Installable() { }
@ -91,14 +98,14 @@ struct Installable
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}); return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
} }
static BuiltPaths build( static std::vector<BuiltPathWithResult> build(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables, const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode = bmNormal); BuildMode bMode = bmNormal);
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> build2( static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> build2(
ref<Store> evalStore, ref<Store> evalStore,
ref<Store> store, ref<Store> store,
Realise mode, Realise mode,

View file

@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers libcmd_LIBS = libstore libutil libexpr libmain libfetchers

View file

@ -215,17 +215,15 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
out << dt.hint.str() << "\n"; out << dt.hint.str() << "\n";
// prefer direct pos, but if noPos then try the expr. // prefer direct pos, but if noPos then try the expr.
auto pos = *dt.pos auto pos = dt.pos
? *dt.pos ? dt.pos
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; : static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
if (pos) { if (pos) {
printAtPos(pos, out); out << pos;
if (auto loc = pos->getCodeLines()) {
auto loc = getCodeLines(pos);
if (loc.has_value()) {
out << "\n"; out << "\n";
printCodeLines(out, "", pos, *loc); printCodeLines(out, "", *pos, *loc);
out << "\n"; out << "\n";
} }
} }
@ -270,6 +268,7 @@ void NixRepl::mainLoop()
// ctrl-D should exit the debugger. // ctrl-D should exit the debugger.
state->debugStop = false; state->debugStop = false;
state->debugQuit = true; state->debugQuit = true;
logger->cout("");
break; break;
} }
try { try {
@ -384,6 +383,10 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
i++; i++;
} }
} else { } else {
/* Temporarily disable the debugger, to avoid re-entering readline. */
auto debug_repl = state->debugRepl;
state->debugRepl = nullptr;
Finally restoreDebug([&]() { state->debugRepl = debug_repl; });
try { try {
/* This is an expression that should evaluate to an /* This is an expression that should evaluate to an
attribute set. Evaluate it to get the names of the attribute set. Evaluate it to get the names of the
@ -584,15 +587,17 @@ bool NixRepl::processLine(std::string line)
Value v; Value v;
evalString(arg, v); evalString(arg, v);
const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> { const auto [path, line] = [&] () -> std::pair<Path, uint32_t> {
if (v.type() == nPath || v.type() == nString) { if (v.type() == nPath || v.type() == nString) {
PathSet context; PathSet context;
auto filename = state->coerceToString(noPos, v, context, "while evaluating the filename to edit").toOwned(); auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
state->symbols.create(filename); return {path, 0};
return {filename, 0};
} else if (v.isLambda()) { } else if (v.isLambda()) {
auto pos = state->positions[v.lambda.fun->pos]; auto pos = state->positions[v.lambda.fun->pos];
return {pos.file, pos.line}; if (auto path = std::get_if<Path>(&pos.origin))
return {*path, pos.line};
else
throw Error("'%s' cannot be shown in an editor", pos);
} else { } else {
// assume it's a derivation // assume it's a derivation
return findPackageFilename(*state, v, arg); return findPackageFilename(*state, v, arg);
@ -600,7 +605,7 @@ bool NixRepl::processLine(std::string line)
}(); }();
// Open in EDITOR // Open in EDITOR
auto args = editorFor(file, line); auto args = editorFor(path, line);
auto editor = args.front(); auto editor = args.front();
args.pop_front(); args.pop_front();
@ -782,7 +787,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
flake::LockFlags { flake::LockFlags {
.updateLockFile = false, .updateLockFile = false,
.useRegistries = !evalSettings.pureEval, .useRegistries = !evalSettings.pureEval,
.allowMutable = !evalSettings.pureEval, .allowUnlocked = !evalSettings.pureEval,
}), }),
v); v);
addAttrsToScope(v); addAttrsToScope(v);

View file

@ -645,17 +645,17 @@ NixInt AttrCursor::getInt()
cachedValue = root->db->getAttr(getKey()); cachedValue = root->db->getAttr(getKey());
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) { if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto i = std::get_if<int_t>(&cachedValue->second)) { if (auto i = std::get_if<int_t>(&cachedValue->second)) {
debug("using cached Integer attribute '%s'", getAttrPathStr()); debug("using cached integer attribute '%s'", getAttrPathStr());
return i->x; return i->x;
} else } else
throw TypeError("'%s' is not an Integer", getAttrPathStr()); throw TypeError("'%s' is not an integer", getAttrPathStr());
} }
} }
auto & v = forceValue(); auto & v = forceValue();
if (v.type() != nInt) if (v.type() != nInt)
throw TypeError("'%s' is not an Integer", getAttrPathStr()); throw TypeError("'%s' is not an integer", getAttrPathStr());
return v.integer; return v.integer;
} }

View file

@ -7,7 +7,6 @@
#include "globals.hh" #include "globals.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "filetransfer.hh" #include "filetransfer.hh"
#include "json.hh"
#include "function-trace.hh" #include "function-trace.hh"
#include <algorithm> #include <algorithm>
@ -21,6 +20,7 @@
#include <functional> #include <functional>
#include <sys/resource.h> #include <sys/resource.h>
#include <nlohmann/json.hpp>
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
@ -35,6 +35,8 @@
#endif #endif
using json = nlohmann::json;
namespace nix { namespace nix {
static char * allocString(size_t size) static char * allocString(size_t size)
@ -43,7 +45,7 @@ static char * allocString(size_t size)
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
t = (char *) GC_MALLOC_ATOMIC(size); t = (char *) GC_MALLOC_ATOMIC(size);
#else #else
t = malloc(size); t = (char *) malloc(size);
#endif #endif
if (!t) throw std::bad_alloc(); if (!t) throw std::bad_alloc();
return t; return t;
@ -65,26 +67,19 @@ static char * dupString(const char * s)
// When there's no need to write to the string, we can optimize away empty // When there's no need to write to the string, we can optimize away empty
// string allocations. // string allocations.
// This function handles makeImmutableStringWithLen(null, 0) by returning the // This function handles makeImmutableString(std::string_view()) by returning
// empty string. // the empty string.
static const char * makeImmutableStringWithLen(const char * s, size_t size) static const char * makeImmutableString(std::string_view s)
{ {
char * t; const size_t size = s.size();
if (size == 0) if (size == 0)
return ""; return "";
#if HAVE_BOEHMGC auto t = allocString(size + 1);
t = GC_STRNDUP(s, size); memcpy(t, s.data(), size);
#else t[size] = '\0';
t = strndup(s, size);
#endif
if (!t) throw std::bad_alloc();
return t; return t;
} }
static inline const char * makeImmutableString(std::string_view s) {
return makeImmutableStringWithLen(s.data(), s.size());
}
RootValue allocRootValue(Value * v) RootValue allocRootValue(Value * v)
{ {
@ -404,7 +399,8 @@ static Strings parseNixPath(const std::string & s)
} }
if (*p == ':') { if (*p == ':') {
if (isUri(std::string(start2, s.end()))) { auto prefix = std::string(start2, s.end());
if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) {
++p; ++p;
while (p != s.end() && *p != ':') ++p; while (p != s.end() && *p != ':') ++p;
} }
@ -436,19 +432,21 @@ ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view t
return *this; return *this;
} }
ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s) { ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s)
{
info.suggestions = s; info.suggestions = s;
return *this; return *this;
} }
ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr) { ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr)
{
// NOTE: This is abusing side-effects. // NOTE: This is abusing side-effects.
// TODO: check compatibility with nested debugger calls. // TODO: check compatibility with nested debugger calls.
state.debugTraces.push_front(DebugTrace { state.debugTraces.push_front(DebugTrace {
.pos = std::nullopt, .pos = nullptr,
.expr = expr, .expr = expr,
.env = env, .env = env,
.hint = hintformat("Fake frame for debugg{ing,er} purposes"), .hint = hintformat("Fake frame for debugging purposes"),
.isError = true .isError = true
}); });
return *this; return *this;
@ -508,9 +506,6 @@ EvalState::EvalState(
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr)) , valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
, env1AllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr)) , env1AllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
#else
, valueAllocCache(std::make_shared<void *>(nullptr))
, env1AllocCache(std::make_shared<void *>(nullptr))
#endif #endif
, baseEnv(allocEnv(128)) , baseEnv(allocEnv(128))
, staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)} , staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)}
@ -842,7 +837,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
? std::make_unique<DebugTraceStacker>( ? std::make_unique<DebugTraceStacker>(
*this, *this,
DebugTrace { DebugTrace {
.pos = error->info().errPos ? *error->info().errPos : positions[expr.getPos()], .pos = error->info().errPos ? error->info().errPos : static_cast<std::shared_ptr<AbstractPos>>(positions[expr.getPos()]),
.expr = expr, .expr = expr,
.env = env, .env = env,
.hint = error->info().msg, .hint = error->info().msg,
@ -869,7 +864,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
{ {
e.addTrace(std::nullopt, s, s2); e.addTrace(nullptr, s, s2);
} }
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
@ -881,13 +876,13 @@ static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
EvalState & state, EvalState & state,
Expr & expr, Expr & expr,
Env & env, Env & env,
std::optional<ErrPos> pos, std::shared_ptr<AbstractPos> && pos,
const char * s, const char * s,
const std::string & s2) const std::string & s2)
{ {
return std::make_unique<DebugTraceStacker>(state, return std::make_unique<DebugTraceStacker>(state,
DebugTrace { DebugTrace {
.pos = pos, .pos = std::move(pos),
.expr = expr, .expr = expr,
.env = env, .env = env,
.hint = hintfmt(s, s2), .hint = hintfmt(s, s2),
@ -993,9 +988,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
void EvalState::mkPos(Value & v, PosIdx p) void EvalState::mkPos(Value & v, PosIdx p)
{ {
auto pos = positions[p]; auto pos = positions[p];
if (!pos.file.empty()) { if (auto path = std::get_if<Path>(&pos.origin)) {
auto attrs = buildBindings(3); auto attrs = buildBindings(3);
attrs.alloc(sFile).mkString(pos.file); attrs.alloc(sFile).mkString(*path);
attrs.alloc(sLine).mkInt(pos.line); attrs.alloc(sLine).mkInt(pos.line);
attrs.alloc(sColumn).mkInt(pos.column); attrs.alloc(sColumn).mkInt(pos.column);
v.mkAttrs(attrs); v.mkAttrs(attrs);
@ -1103,7 +1098,7 @@ void EvalState::cacheFile(
*this, *this,
*e, *e,
this->baseEnv, this->baseEnv,
e->getPos() ? std::optional(ErrPos(positions[e->getPos()])) : std::nullopt, e->getPos() ? static_cast<std::shared_ptr<AbstractPos>>(positions[e->getPos()]) : nullptr,
"while evaluating the file '%1%':", resolvedPath) "while evaluating the file '%1%':", resolvedPath)
: nullptr; : nullptr;
@ -1373,10 +1368,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) ); state.forceValue(*vAttrs, (pos2 ? pos2 : this->pos ) );
} catch (Error & e) { } catch (Error & e) {
if (pos2) {
auto pos2r = state.positions[pos2]; auto pos2r = state.positions[pos2];
if (pos2 && pos2r.file != state.derivationNixPath) auto origin = std::get_if<Path>(&pos2r.origin);
if (!(origin && *origin == state.derivationNixPath))
state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'", state.addErrorTrace(e, pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath)); showAttrPath(state, env, attrPath));
}
throw; throw;
} }
@ -1519,7 +1517,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
auto dts = debugRepl auto dts = debugRepl
? makeDebugTraceStacker( ? makeDebugTraceStacker(
*this, *lambda.body, env2, positions[lambda.pos], *this, *lambda.body, env2, positions[lambda.pos],
"while evaluating %s", "while calling %s",
lambda.name lambda.name
? concatStrings("'", symbols[lambda.name], "'") ? concatStrings("'", symbols[lambda.name], "'")
: "anonymous lambda") : "anonymous lambda")
@ -1528,9 +1526,10 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
lambda.body->eval(*this, env2, vCur); lambda.body->eval(*this, env2, vCur);
} catch (Error & e) { } catch (Error & e) {
if (loggerSettings.showTrace.get()) { if (loggerSettings.showTrace.get()) {
addErrorTrace(e, addErrorTrace(
e,
lambda.pos, lambda.pos,
"while evaluating %s", "while calling %s",
lambda.name lambda.name
? concatStrings("'", symbols[lambda.name], "'") ? concatStrings("'", symbols[lambda.name], "'")
: "anonymous lambda", : "anonymous lambda",
@ -1704,7 +1703,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
Nix attempted to evaluate a function as a top level expression; in Nix attempted to evaluate a function as a top level expression; in
this case it must have its arguments supplied either by default this case it must have its arguments supplied either by default
values, or passed explicitly with '--arg' or '--argstr'. See values, or passed explicitly with '--arg' or '--argstr'. See
https://nixos.org/manual/nix/stable/expressions/language-constructs.html#functions.)", symbols[i.name]) https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow<MissingArgumentError>(); .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow<MissingArgumentError>();
} }
} }
@ -2357,97 +2356,99 @@ void EvalState::printStats()
std::fstream fs; std::fstream fs;
if (outPath != "-") if (outPath != "-")
fs.open(outPath, std::fstream::out); fs.open(outPath, std::fstream::out);
JSONObject topObj(outPath == "-" ? std::cerr : fs, true); json topObj = json::object();
topObj.attr("cpuTime",cpuTime); topObj["cpuTime"] = cpuTime;
{ topObj["envs"] = {
auto envs = topObj.object("envs"); {"number", nrEnvs},
envs.attr("number", nrEnvs); {"elements", nrValuesInEnvs},
envs.attr("elements", nrValuesInEnvs); {"bytes", bEnvs},
envs.attr("bytes", bEnvs); };
} topObj["list"] = {
{ {"elements", nrListElems},
auto lists = topObj.object("list"); {"bytes", bLists},
lists.attr("elements", nrListElems); {"concats", nrListConcats},
lists.attr("bytes", bLists); };
lists.attr("concats", nrListConcats); topObj["values"] = {
} {"number", nrValues},
{ {"bytes", bValues},
auto values = topObj.object("values"); };
values.attr("number", nrValues); topObj["symbols"] = {
values.attr("bytes", bValues); {"number", symbols.size()},
} {"bytes", symbols.totalSize()},
{ };
auto syms = topObj.object("symbols"); topObj["sets"] = {
syms.attr("number", symbols.size()); {"number", nrAttrsets},
syms.attr("bytes", symbols.totalSize()); {"bytes", bAttrsets},
} {"elements", nrAttrsInAttrsets},
{ };
auto sets = topObj.object("sets"); topObj["sizes"] = {
sets.attr("number", nrAttrsets); {"Env", sizeof(Env)},
sets.attr("bytes", bAttrsets); {"Value", sizeof(Value)},
sets.attr("elements", nrAttrsInAttrsets); {"Bindings", sizeof(Bindings)},
} {"Attr", sizeof(Attr)},
{ };
auto sizes = topObj.object("sizes"); topObj["nrOpUpdates"] = nrOpUpdates;
sizes.attr("Env", sizeof(Env)); topObj["nrOpUpdateValuesCopied"] = nrOpUpdateValuesCopied;
sizes.attr("Value", sizeof(Value)); topObj["nrThunks"] = nrThunks;
sizes.attr("Bindings", sizeof(Bindings)); topObj["nrAvoided"] = nrAvoided;
sizes.attr("Attr", sizeof(Attr)); topObj["nrLookups"] = nrLookups;
} topObj["nrPrimOpCalls"] = nrPrimOpCalls;
topObj.attr("nrOpUpdates", nrOpUpdates); topObj["nrFunctionCalls"] = nrFunctionCalls;
topObj.attr("nrOpUpdateValuesCopied", nrOpUpdateValuesCopied);
topObj.attr("nrThunks", nrThunks);
topObj.attr("nrAvoided", nrAvoided);
topObj.attr("nrLookups", nrLookups);
topObj.attr("nrPrimOpCalls", nrPrimOpCalls);
topObj.attr("nrFunctionCalls", nrFunctionCalls);
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
{ topObj["gc"] = {
auto gc = topObj.object("gc"); {"heapSize", heapSize},
gc.attr("heapSize", heapSize); {"totalBytes", totalBytes},
gc.attr("totalBytes", totalBytes); };
}
#endif #endif
if (countCalls) { if (countCalls) {
topObj["primops"] = primOpCalls;
{ {
auto obj = topObj.object("primops"); auto& list = topObj["functions"];
for (auto & i : primOpCalls) list = json::array();
obj.attr(i.first, i.second);
}
{
auto list = topObj.list("functions");
for (auto & [fun, count] : functionCalls) { for (auto & [fun, count] : functionCalls) {
auto obj = list.object(); json obj = json::object();
if (fun->name) if (fun->name)
obj.attr("name", (std::string_view) symbols[fun->name]); obj["name"] = (std::string_view) symbols[fun->name];
else else
obj.attr("name", nullptr); obj["name"] = nullptr;
if (auto pos = positions[fun->pos]) { if (auto pos = positions[fun->pos]) {
obj.attr("file", (std::string_view) pos.file); if (auto path = std::get_if<Path>(&pos.origin))
obj.attr("line", pos.line); obj["file"] = *path;
obj.attr("column", pos.column); obj["line"] = pos.line;
obj["column"] = pos.column;
} }
obj.attr("count", count); obj["count"] = count;
list.push_back(obj);
} }
} }
{ {
auto list = topObj.list("attributes"); auto list = topObj["attributes"];
list = json::array();
for (auto & i : attrSelects) { for (auto & i : attrSelects) {
auto obj = list.object(); json obj = json::object();
if (auto pos = positions[i.first]) { if (auto pos = positions[i.first]) {
obj.attr("file", (const std::string &) pos.file); if (auto path = std::get_if<Path>(&pos.origin))
obj.attr("line", pos.line); obj["file"] = *path;
obj.attr("column", pos.column); obj["line"] = pos.line;
obj["column"] = pos.column;
} }
obj.attr("count", i.second); obj["count"] = i.second;
list.push_back(obj);
} }
} }
} }
if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") { if (getEnv("NIX_SHOW_SYMBOLS").value_or("0") != "0") {
auto list = topObj.list("symbols"); // XXX: overrides earlier assignment
symbols.dump([&](const std::string & s) { list.elem(s); }); topObj["symbols"] = json::array();
auto &list = topObj["symbols"];
symbols.dump([&](const std::string & s) { list.emplace_back(s); });
}
if (outPath == "-") {
std::cerr << topObj.dump(2) << std::endl;
} else {
fs << topObj.dump(2) << std::endl;
} }
} }
} }
@ -2502,6 +2503,23 @@ Strings EvalSettings::getDefaultNixPath()
return res; return res;
} }
bool EvalSettings::isPseudoUrl(std::string_view s)
{
if (s.compare(0, 8, "channel:") == 0) return true;
size_t pos = s.find("://");
if (pos == std::string::npos) return false;
std::string scheme(s, 0, pos);
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
}
std::string EvalSettings::resolvePseudoUrl(std::string_view url)
{
if (hasPrefix(url, "channel:"))
return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz";
else
return std::string(url);
}
EvalSettings evalSettings; EvalSettings evalSettings;
static GlobalConfig::Register rEvalSettings(&evalSettings); static GlobalConfig::Register rEvalSettings(&evalSettings);

View file

@ -60,7 +60,6 @@ void copyContext(const Value & v, PathSet & context);
typedef std::map<Path, StorePath> SrcToStore; typedef std::map<Path, StorePath> SrcToStore;
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
std::string printValue(const EvalState & state, const Value & v); std::string printValue(const EvalState & state, const Value & v);
std::ostream & operator << (std::ostream & os, const ValueType t); std::ostream & operator << (std::ostream & os, const ValueType t);
@ -78,7 +77,7 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache(); std::shared_ptr<RegexCache> makeRegexCache();
struct DebugTrace { struct DebugTrace {
std::optional<ErrPos> pos; std::shared_ptr<AbstractPos> pos;
const Expr & expr; const Expr & expr;
const Env & env; const Env & env;
hintformat hint; hintformat hint;
@ -437,8 +436,12 @@ private:
friend struct ExprAttrs; friend struct ExprAttrs;
friend struct ExprLet; friend struct ExprLet;
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path, Expr * parse(
const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv); char * text,
size_t length,
Pos::Origin origin,
Path basePath,
std::shared_ptr<StaticEnv> & staticEnv);
public: public:
@ -570,6 +573,10 @@ struct EvalSettings : Config
static Strings getDefaultNixPath(); static Strings getDefaultNixPath();
static bool isPseudoUrl(std::string_view s);
static std::string resolvePseudoUrl(std::string_view url);
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation", Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."}; "Whether builtin functions that allow executing native code should be enabled."};

View file

@ -43,7 +43,7 @@ let
outputs = flake.outputs (inputs // { self = result; }); outputs = flake.outputs (inputs // { self = result; });
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; }; result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; };
in in
if node.flake or true then if node.flake or true then
assert builtins.isFunction flake.outputs; assert builtins.isFunction flake.outputs;

View file

@ -56,7 +56,7 @@ void ConfigFile::apply()
auto tlname = get(trustedList, name); auto tlname = get(trustedList, name);
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) { if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
trusted = *saved; trusted = *saved;
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS); printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
} else { } else {
// FIXME: filter ANSI escapes, newlines, \r, etc. // FIXME: filter ANSI escapes, newlines, \r, etc.
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') { if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {

View file

@ -143,7 +143,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
} catch (Error & e) { } catch (Error & e) {
e.addTrace( e.addTrace(
state.positions[attr.pos], state.positions[attr.pos],
hintfmt("in flake attribute '%s'", state.symbols[attr.name])); hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
throw; throw;
} }
} }
@ -152,7 +152,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try { try {
input.ref = FlakeRef::fromAttrs(attrs); input.ref = FlakeRef::fromAttrs(attrs);
} catch (Error & e) { } catch (Error & e) {
e.addTrace(state.positions[pos], hintfmt("in flake input")); e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
throw; throw;
} }
else { else {
@ -220,7 +220,7 @@ static Flake getFlake(
Value vInfo; Value vInfo;
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0)); expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) { if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos); expectType(state, nString, *description->value, description->pos);
@ -353,7 +353,7 @@ LockedFlake lockFlake(
std::function<void( std::function<void(
const FlakeInputs & flakeInputs, const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node, ref<Node> node,
const InputPath & inputPathPrefix, const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode, std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath, const InputPath & lockRootPath,
@ -362,9 +362,15 @@ LockedFlake lockFlake(
computeLocks; computeLocks;
computeLocks = [&]( computeLocks = [&](
/* The inputs of this node, either from flake.nix or
flake.lock. */
const FlakeInputs & flakeInputs, const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node, /* The node whose locks are to be updated.*/
ref<Node> node,
/* The path to this node in the lock file graph. */
const InputPath & inputPathPrefix, const InputPath & inputPathPrefix,
/* The old node, if any, from which locks can be
copied. */
std::shared_ptr<const Node> oldNode, std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath, const InputPath & lockRootPath,
const Path & parentPath, const Path & parentPath,
@ -452,7 +458,7 @@ LockedFlake lockFlake(
/* Copy the input from the old lock since its flakeref /* Copy the input from the old lock since its flakeref
didn't change and there is no override from a didn't change and there is no override from a
higher level flake. */ higher level flake. */
auto childNode = std::make_shared<LockedNode>( auto childNode = make_ref<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake); oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
node->inputs.insert_or_assign(id, childNode); node->inputs.insert_or_assign(id, childNode);
@ -481,7 +487,7 @@ LockedFlake lockFlake(
.isFlake = (*lockedNode)->isFlake, .isFlake = (*lockedNode)->isFlake,
}); });
} else if (auto follows = std::get_if<1>(&i.second)) { } else if (auto follows = std::get_if<1>(&i.second)) {
if (! trustLock) { if (!trustLock) {
// It is possible that the flake has changed, // It is possible that the flake has changed,
// so we must confirm all the follows that are in the lock file are also in the flake. // so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath); auto overridePath(inputPath);
@ -521,8 +527,8 @@ LockedFlake lockFlake(
this input. */ this input. */
debug("creating new input '%s'", inputPathS); debug("creating new input '%s'", inputPathS);
if (!lockFlags.allowMutable && !input.ref->input.isLocked()) if (!lockFlags.allowUnlocked && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS); throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use /* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the the *original* ref (input2.ref) for the
@ -544,7 +550,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath); auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref); auto childNode = make_ref<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode); node->inputs.insert_or_assign(id, childNode);
@ -564,15 +570,19 @@ LockedFlake lockFlake(
oldLock oldLock
? std::dynamic_pointer_cast<const Node>(oldLock) ? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read( : LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root, inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
oldLock ? lockRootPath : inputPath, localPath, false); oldLock ? lockRootPath : inputPath,
localPath,
false);
} }
else { else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree( auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache); state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, ref, false)); auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
node->inputs.insert_or_assign(id, childNode);
} }
} }
@ -587,8 +597,13 @@ LockedFlake lockFlake(
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true); auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
computeLocks( computeLocks(
flake.inputs, newLockFile.root, {}, flake.inputs,
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false); newLockFile.root,
{},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root.get_ptr(),
{},
parentPath,
false);
for (auto & i : lockFlags.inputOverrides) for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first)) if (!overridesUsed.count(i.first))
@ -611,9 +626,9 @@ LockedFlake lockFlake(
if (lockFlags.writeLockFile) { if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) { if (auto sourcePath = topRef.input.getSourcePath()) {
if (!newLockFile.isImmutable()) { if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty) if (fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has a mutable input", topRef); warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
} else { } else {
if (!lockFlags.updateLockFile) if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef); throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
@ -737,7 +752,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
.updateLockFile = false, .updateLockFile = false,
.writeLockFile = false, .writeLockFile = false,
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries, .useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
.allowMutable = !evalSettings.pureEval, .allowUnlocked = !evalSettings.pureEval,
}), }),
v); v);
} }

View file

@ -108,11 +108,11 @@ struct LockFlags
bool applyNixConfig = false; bool applyNixConfig = false;
/* Whether mutable flake references (i.e. those without a Git /* Whether unlocked flake references (i.e. those without a Git
revision or similar) without a corresponding lock are revision or similar) without a corresponding lock are
allowed. Mutable flake references with a lock are always allowed. Unlocked flake references with a lock are always
allowed. */ allowed. */
bool allowMutable = true; bool allowUnlocked = true;
/* Whether to commit changes to flake.lock. */ /* Whether to commit changes to flake.lock. */
bool commitLockFile = false; bool commitLockFile = false;

View file

@ -35,7 +35,7 @@ typedef std::string FlakeId;
struct FlakeRef struct FlakeRef
{ {
/* fetcher-specific representation of the input, sufficient to /* Fetcher-specific representation of the input, sufficient to
perform the fetch operation. */ perform the fetch operation. */
fetchers::Input input; fetchers::Input input;

View file

@ -31,7 +31,7 @@ FlakeRef getFlakeRef(
} }
LockedNode::LockedNode(const nlohmann::json & json) LockedNode::LockedNode(const nlohmann::json & json)
: lockedRef(getFlakeRef(json, "locked", "info")) : lockedRef(getFlakeRef(json, "locked", "info")) // FIXME: remove "info"
, originalRef(getFlakeRef(json, "original", nullptr)) , originalRef(getFlakeRef(json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true) , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
{ {
@ -49,15 +49,15 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{ {
auto pos = root; auto pos = root;
if (!pos) return {};
for (auto & elem : path) { for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) { if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i)) if (auto node = std::get_if<0>(&*i))
pos = *node; pos = *node;
else if (auto follows = std::get_if<1>(&*i)) { else if (auto follows = std::get_if<1>(&*i)) {
pos = findInput(*follows); if (auto p = findInput(*follows))
if (!pos) return {}; pos = ref(p);
else
return {};
} }
} else } else
return {}; return {};
@ -72,7 +72,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
if (version < 5 || version > 7) if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version); throw Error("lock file '%s' has unsupported version %d", path, version);
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap; std::map<std::string, ref<Node>> nodeMap;
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs; std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
@ -93,12 +93,12 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
auto jsonNode2 = nodes.find(inputKey); auto jsonNode2 = nodes.find(inputKey);
if (jsonNode2 == nodes.end()) if (jsonNode2 == nodes.end())
throw Error("lock file references missing node '%s'", inputKey); throw Error("lock file references missing node '%s'", inputKey);
auto input = std::make_shared<LockedNode>(*jsonNode2); auto input = make_ref<LockedNode>(*jsonNode2);
k = nodeMap.insert_or_assign(inputKey, input).first; k = nodeMap.insert_or_assign(inputKey, input).first;
getInputs(*input, *jsonNode2); getInputs(*input, *jsonNode2);
} }
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second)) if (auto child = k->second.dynamic_pointer_cast<LockedNode>())
node.inputs.insert_or_assign(i.key(), child); node.inputs.insert_or_assign(i.key(), ref(child));
else else
// FIXME: replace by follows node // FIXME: replace by follows node
throw Error("lock file contains cycle to root node"); throw Error("lock file contains cycle to root node");
@ -122,9 +122,9 @@ nlohmann::json LockFile::toJSON() const
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys; std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
std::unordered_set<std::string> keys; std::unordered_set<std::string> keys;
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode; std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string dumpNode = [&](std::string key, ref<const Node> node) -> std::string
{ {
auto k = nodeKeys.find(node); auto k = nodeKeys.find(node);
if (k != nodeKeys.end()) if (k != nodeKeys.end())
@ -159,10 +159,11 @@ nlohmann::json LockFile::toJSON() const
n["inputs"] = std::move(inputs); n["inputs"] = std::move(inputs);
} }
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) { if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs()); n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs()); n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
if (!lockedNode->isFlake) n["flake"] = false; if (!lockedNode->isFlake)
n["flake"] = false;
} }
nodes[key] = std::move(n); nodes[key] = std::move(n);
@ -201,13 +202,13 @@ void LockFile::write(const Path & path) const
writeFile(path, fmt("%s\n", *this)); writeFile(path, fmt("%s\n", *this));
} }
bool LockFile::isImmutable() const std::optional<FlakeRef> LockFile::isUnlocked() const
{ {
std::unordered_set<std::shared_ptr<const Node>> nodes; std::set<ref<const Node>> nodes;
std::function<void(std::shared_ptr<const Node> node)> visit; std::function<void(ref<const Node> node)> visit;
visit = [&](std::shared_ptr<const Node> node) visit = [&](ref<const Node> node)
{ {
if (!nodes.insert(node).second) return; if (!nodes.insert(node).second) return;
for (auto & i : node->inputs) for (auto & i : node->inputs)
@ -219,11 +220,12 @@ bool LockFile::isImmutable() const
for (auto & i : nodes) { for (auto & i : nodes) {
if (i == root) continue; if (i == root) continue;
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i); auto node = i.dynamic_pointer_cast<const LockedNode>();
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false; if (node && !node->lockedRef.input.isLocked())
return node->lockedRef;
} }
return true; return {};
} }
bool LockFile::operator ==(const LockFile & other) const bool LockFile::operator ==(const LockFile & other) const
@ -247,12 +249,12 @@ InputPath parseInputPath(std::string_view s)
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
{ {
std::unordered_set<std::shared_ptr<Node>> done; std::set<ref<Node>> done;
std::map<InputPath, Node::Edge> res; std::map<InputPath, Node::Edge> res;
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse; std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node) recurse = [&](const InputPath & prefix, ref<Node> node)
{ {
if (!done.insert(node).second) return; if (!done.insert(node).second) return;

View file

@ -20,7 +20,7 @@ struct LockedNode;
type LockedNode. */ type LockedNode. */
struct Node : std::enable_shared_from_this<Node> struct Node : std::enable_shared_from_this<Node>
{ {
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge; typedef std::variant<ref<LockedNode>, InputPath> Edge;
std::map<FlakeId, Edge> inputs; std::map<FlakeId, Edge> inputs;
@ -47,11 +47,13 @@ struct LockedNode : Node
struct LockFile struct LockFile
{ {
std::shared_ptr<Node> root = std::make_shared<Node>(); ref<Node> root = make_ref<Node>();
LockFile() {}; LockFile() {};
LockFile(const nlohmann::json & json, const Path & path); LockFile(const nlohmann::json & json, const Path & path);
typedef std::map<ref<const Node>, std::string> KeyMap;
nlohmann::json toJSON() const; nlohmann::json toJSON() const;
std::string to_string() const; std::string to_string() const;
@ -60,7 +62,8 @@ struct LockFile
void write(const Path & path) const; void write(const Path & path) const;
bool isImmutable() const; /* Check whether this lock file has any unlocked inputs. */
std::optional<FlakeRef> isUnlocked() const;
bool operator ==(const LockFile & other) const; bool operator ==(const LockFile & other) const;

View file

@ -150,7 +150,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */ /* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
const Value * outTI = queryMeta("outputsToInstall"); const Value * outTI = queryMeta("outputsToInstall");
if (!outTI) return outputs; if (!outTI) return outputs;
const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'"); auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
/* ^ this shows during `nix-env -i` right under the bad derivation */ /* ^ this shows during `nix-env -i` right under the bad derivation */
if (!outTI->isList()) throw errMsg; if (!outTI->isList()) throw errMsg;
Outputs result; Outputs result;

View file

@ -8,6 +8,58 @@
namespace nix { namespace nix {
struct PosAdapter : AbstractPos
{
Pos::Origin origin;
PosAdapter(Pos::Origin origin)
: origin(std::move(origin))
{
}
std::optional<std::string> getSource() const override
{
return std::visit(overloaded {
[](const Pos::none_tag &) -> std::optional<std::string> {
return std::nullopt;
},
[](const Pos::Stdin & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const Pos::String & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const Path & path) -> std::optional<std::string> {
try {
return readFile(path);
} catch (Error &) {
return std::nullopt;
}
}
}, origin);
}
void print(std::ostream & out) const override
{
std::visit(overloaded {
[&](const Pos::none_tag &) { out << "«none»"; },
[&](const Pos::Stdin &) { out << "«stdin»"; },
[&](const Pos::String & s) { out << "«string»"; },
[&](const Path & path) { out << path; }
}, origin);
}
};
Pos::operator std::shared_ptr<AbstractPos>() const
{
auto pos = std::make_shared<PosAdapter>(origin);
pos->line = line;
pos->column = column;
return pos;
}
/* Displaying abstract syntax trees. */ /* Displaying abstract syntax trees. */
static void showString(std::ostream & str, std::string_view s) static void showString(std::ostream & str, std::string_view s)
@ -248,24 +300,10 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
std::ostream & operator << (std::ostream & str, const Pos & pos) std::ostream & operator << (std::ostream & str, const Pos & pos)
{ {
if (!pos) if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
str << *pos2;
} else
str << "undefined position"; str << "undefined position";
else
{
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
switch (pos.origin) {
case foFile:
f % (const std::string &) pos.file;
break;
case foStdin:
case foString:
f % "(string)";
break;
default:
throw Error("unhandled Pos origin!");
}
str << (f % pos.line % pos.column).str();
}
return str; return str;
} }
@ -289,7 +327,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
} }
/* Computing levels/displacements for variables. */ /* Computing levels/displacements for variables. */
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)

View file

@ -22,15 +22,22 @@ MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error); MakeError(RestrictedPathError, Error);
/* Position objects. */ /* Position objects. */
struct Pos struct Pos
{ {
std::string file;
FileOrigin origin;
uint32_t line; uint32_t line;
uint32_t column; uint32_t column;
struct none_tag { };
struct Stdin { ref<std::string> source; };
struct String { ref<std::string> source; };
typedef std::variant<none_tag, Stdin, String, Path> Origin;
Origin origin;
explicit operator bool() const { return line > 0; } explicit operator bool() const { return line > 0; }
operator std::shared_ptr<AbstractPos>() const;
}; };
class PosIdx { class PosIdx {
@ -46,7 +53,11 @@ public:
explicit operator bool() const { return id > 0; } explicit operator bool() const { return id > 0; }
bool operator<(const PosIdx other) const { return id < other.id; } bool operator <(const PosIdx other) const { return id < other.id; }
bool operator ==(const PosIdx other) const { return id == other.id; }
bool operator !=(const PosIdx other) const { return id != other.id; }
}; };
class PosTable class PosTable
@ -60,13 +71,13 @@ public:
// current origins.back() can be reused or not. // current origins.back() can be reused or not.
mutable uint32_t idx = std::numeric_limits<uint32_t>::max(); mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {} // Used for searching in PosTable::[].
explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
public: public:
const std::string file; const Pos::Origin origin;
const FileOrigin origin;
Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {} Origin(Pos::Origin origin): origin(origin) {}
}; };
struct Offset { struct Offset {
@ -106,7 +117,7 @@ public:
[] (const auto & a, const auto & b) { return a.idx < b.idx; }); [] (const auto & a, const auto & b) { return a.idx < b.idx; });
const auto origin = *std::prev(pastOrigin); const auto origin = *std::prev(pastOrigin);
const auto offset = offsets[idx]; const auto offset = offsets[idx];
return {origin.file, origin.origin, offset.line, offset.column}; return {offset.line, offset.column, origin.origin};
} }
}; };

View file

@ -34,11 +34,6 @@ namespace nix {
Path basePath; Path basePath;
PosTable::Origin origin; PosTable::Origin origin;
std::optional<ErrorInfo> error; std::optional<ErrorInfo> error;
ParseData(EvalState & state, PosTable::Origin origin)
: state(state)
, symbols(state.symbols)
, origin(std::move(origin))
{ };
}; };
struct ParserFormals { struct ParserFormals {
@ -643,29 +638,26 @@ formal
#include "filetransfer.hh" #include "filetransfer.hh"
#include "fetchers.hh" #include "fetchers.hh"
#include "store-api.hh" #include "store-api.hh"
#include "flake/flake.hh"
namespace nix { namespace nix {
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin, Expr * EvalState::parse(
const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv) char * text,
size_t length,
Pos::Origin origin,
Path basePath,
std::shared_ptr<StaticEnv> & staticEnv)
{ {
yyscan_t scanner; yyscan_t scanner;
std::string file; ParseData data {
switch (origin) { .state = *this,
case foFile: .symbols = symbols,
file = path; .basePath = std::move(basePath),
break; .origin = {origin},
case foStdin: };
case foString:
file = text;
break;
default:
assert(false);
}
ParseData data(*this, {file, origin});
data.basePath = basePath;
yylex_init(&scanner); yylex_init(&scanner);
yy_scan_buffer(text, length, scanner); yy_scan_buffer(text, length, scanner);
@ -717,14 +709,15 @@ Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv
auto buffer = readFile(path); auto buffer = readFile(path);
// readFile should have left some extra space for terminators // readFile should have left some extra space for terminators
buffer.append("\0\0", 2); buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv); return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv);
} }
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv) Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{ {
s.append("\0\0", 2); auto s = make_ref<std::string>(std::move(s_));
return parse(s.data(), s.size(), foString, "", basePath, staticEnv); s->append("\0\0", 2);
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
} }
@ -740,7 +733,8 @@ Expr * EvalState::parseStdin()
auto buffer = drainFD(0); auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators // drainFD should have left some extra space for terminators
buffer.append("\0\0", 2); buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv); auto s = make_ref<std::string>(std::move(buffer));
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv);
} }
@ -805,17 +799,28 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
std::pair<bool, std::string> res; std::pair<bool, std::string> res;
if (isUri(elem.second)) { if (EvalSettings::isPseudoUrl(elem.second)) {
try { try {
res = { true, store->toRealPath(fetchers::downloadTarball( auto storePath = fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).first.storePath) }; store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first.storePath;
res = { true, store->toRealPath(storePath) };
} catch (FileTransferError & e) { } catch (FileTransferError & e) {
logWarning({ logWarning({
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second) .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
}); });
res = { false, "" }; res = { false, "" };
} }
} else { }
else if (hasPrefix(elem.second, "flake:")) {
settings.requireExperimentalFeature(Xp::Flakes);
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", elem.second);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
res = { true, store->toRealPath(storePath) };
}
else {
auto path = absPath(elem.second); auto path = absPath(elem.second);
if (pathExists(path)) if (pathExists(path))
res = { true, path }; res = { true, path };

View file

@ -5,14 +5,15 @@
#include "globals.hh" #include "globals.hh"
#include "json-to-value.hh" #include "json-to-value.hh"
#include "names.hh" #include "names.hh"
#include "references.hh"
#include "store-api.hh" #include "store-api.hh"
#include "util.hh" #include "util.hh"
#include "json.hh"
#include "value-to-json.hh" #include "value-to-json.hh"
#include "value-to-xml.hh" #include "value-to-xml.hh"
#include "primops.hh" #include "primops.hh"
#include <boost/container/small_vector.hpp> #include <boost/container/small_vector.hpp>
#include <nlohmann/json.hpp>
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
@ -361,8 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
auto output = runProgram(program, true, commandArgs); auto output = runProgram(program, true, commandArgs);
Expr * parsed; Expr * parsed;
try { try {
auto base = state.positions[pos]; parsed = state.parseExprFromString(std::move(output), "/");
parsed = state.parseExprFromString(std::move(output), base.file);
} catch (Error & e) { } catch (Error & e) {
e.addTrace(state.positions[pos], "while parsing the output from '%1%'", program); e.addTrace(state.positions[pos], "while parsing the output from '%1%'", program);
throw; throw;
@ -585,7 +585,7 @@ struct CompareValues
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>(); state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
} }
} catch (Error & e) { } catch (Error & e) {
e.addTrace(std::nullopt, errorCtx); e.addTrace(nullptr, errorCtx);
throw; throw;
} }
} }
@ -788,7 +788,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
v = *args[1]; v = *args[1];
} catch (Error & e) { } catch (Error & e) {
PathSet context; PathSet context;
e.addTrace(std::nullopt, state.coerceToString(pos, *args[0], context, e.addTrace(nullptr, state.coerceToString(pos, *args[0], context,
"while evaluating the error message passed to builtins.addErrorContext").toOwned()); "while evaluating the error message passed to builtins.addErrorContext").toOwned());
throw; throw;
} }
@ -1003,6 +1003,7 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
derivation. */ derivation. */
static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{ {
using nlohmann::json;
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict"); state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict");
/* Figure out the name first (for stack backtraces). */ /* Figure out the name first (for stack backtraces). */
@ -1018,11 +1019,10 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
} }
/* Check whether attributes should be passed as a JSON file. */ /* Check whether attributes should be passed as a JSON file. */
std::ostringstream jsonBuf; std::optional<json> jsonObject;
std::unique_ptr<JSONObject> jsonObject;
attr = args[0]->attrs->find(state.sStructuredAttrs); attr = args[0]->attrs->find(state.sStructuredAttrs);
if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")) if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict"))
jsonObject = std::make_unique<JSONObject>(jsonBuf); jsonObject = json::object();
/* Check whether null attributes should be ignored. */ /* Check whether null attributes should be ignored. */
bool ignoreNulls = false; bool ignoreNulls = false;
@ -1128,8 +1128,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
if (i->name == state.sStructuredAttrs) continue; if (i->name == state.sStructuredAttrs) continue;
auto placeholder(jsonObject->placeholder(key)); (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
printValueAsJSON(state, true, *i->value, pos, placeholder, context);
if (i->name == state.sBuilder) if (i->name == state.sBuilder)
drv.builder = state.forceString(*i->value, context, posDrvName, "while evaluating the `builder` attribute passed to builtins.derivationStrict"); drv.builder = state.forceString(*i->value, context, posDrvName, "while evaluating the `builder` attribute passed to builtins.derivationStrict");
@ -1173,8 +1172,8 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
} }
if (jsonObject) { if (jsonObject) {
drv.env.emplace("__json", jsonObject->dump());
jsonObject.reset(); jsonObject.reset();
drv.env.emplace("__json", jsonBuf.str());
} }
/* Everything in the context of the strings in the derivation /* Everything in the context of the strings in the derivation
@ -1533,6 +1532,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
refs = state.store->queryPathInfo(state.store->toStorePath(path).first)->references; refs = state.store->queryPathInfo(state.store->toStorePath(path).first)->references;
} catch (Error &) { // FIXME: should be InvalidPathError } catch (Error &) { // FIXME: should be InvalidPathError
} }
// Re-scan references to filter down to just the ones that actually occur in the file.
auto refsSink = PathRefScanSink::fromPaths(refs);
refsSink << s;
refs = refsSink.getResultPaths();
} }
auto context = state.store->printStorePathSet(refs); auto context = state.store->printStorePathSet(refs);
v.mkString(s, context); v.mkString(s, context);
@ -1913,8 +1916,8 @@ static RegisterPrimOp primop_toFile({
"; ";
``` ```
Note that `${configFile}` is an Note that `${configFile}` is a
[antiquotation](language-values.md), so the result of the [string interpolation](language/values.md#type-string), so the result of the
expression `configFile` expression `configFile`
(i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be (i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
spliced into the resulting string. spliced into the resulting string.
@ -2379,12 +2382,18 @@ static RegisterPrimOp primop_listToAttrs({
Construct a set from a list specifying the names and values of each Construct a set from a list specifying the names and values of each
attribute. Each element of the list should be a set consisting of a attribute. Each element of the list should be a set consisting of a
string-valued attribute `name` specifying the name of the attribute, string-valued attribute `name` specifying the name of the attribute,
and an attribute `value` specifying its value. Example: and an attribute `value` specifying its value.
In case of duplicate occurrences of the same name, the first
takes precedence.
Example:
```nix ```nix
builtins.listToAttrs builtins.listToAttrs
[ { name = "foo"; value = 123; } [ { name = "foo"; value = 123; }
{ name = "bar"; value = 456; } { name = "bar"; value = 456; }
{ name = "bar"; value = 420; }
] ]
``` ```
@ -2402,12 +2411,62 @@ static void prim_intersectAttrs(EvalState & state, const PosIdx pos, Value * * a
state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.intersectAttrs"); state.forceAttrs(*args[0], pos, "while evaluating the first argument passed to builtins.intersectAttrs");
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.intersectAttrs"); state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.intersectAttrs");
auto attrs = state.buildBindings(std::min(args[0]->attrs->size(), args[1]->attrs->size())); Bindings &left = *args[0]->attrs;
Bindings &right = *args[1]->attrs;
for (auto & i : *args[0]->attrs) { auto attrs = state.buildBindings(std::min(left.size(), right.size()));
Bindings::iterator j = args[1]->attrs->find(i.name);
if (j != args[1]->attrs->end()) // The current implementation has good asymptotic complexity and is reasonably
attrs.insert(*j); // simple. Further optimization may be possible, but does not seem productive,
// considering the state of eval performance in 2022.
//
// I have looked for reusable and/or standard solutions and these are my
// findings:
//
// STL
// ===
// std::set_intersection is not suitable, as it only performs a simultaneous
// linear scan; not taking advantage of random access. This is O(n + m), so
// linear in the largest set, which is not acceptable for callPackage in Nixpkgs.
//
// Simultaneous scan, with alternating simple binary search
// ===
// One alternative algorithm scans the attrsets simultaneously, jumping
// forward using `lower_bound` in case of inequality. This should perform
// well on very similar sets, having a local and predictable access pattern.
// On dissimilar sets, it seems to need more comparisons than the current
// algorithm, as few consecutive attrs match. `lower_bound` could take
// advantage of the decreasing remaining search space, but this causes
// the medians to move, which can mean that they don't stay in the cache
// like they would with the current naive `find`.
//
// Double binary search
// ===
// The optimal algorithm may be "Double binary search", which doesn't
// scan at all, but rather divides both sets simultaneously.
// See "Fast Intersection Algorithms for Sorted Sequences" by Baeza-Yates et al.
// https://cs.uwaterloo.ca/~ajsaling/papers/intersection_alg_app10.pdf
// The only downsides I can think of are not having a linear access pattern
// for similar sets, and having to maintain a more intricate algorithm.
//
// Adaptive
// ===
// Finally one could run try a simultaneous scan, count misses and fall back
// to double binary search when the counter hit some threshold and/or ratio.
if (left.size() < right.size()) {
for (auto & l : left) {
Bindings::iterator r = right.find(l.name);
if (r != right.end())
attrs.insert(*r);
}
}
else {
for (auto & r : right) {
Bindings::iterator l = left.find(r.name);
if (l != left.end())
attrs.insert(r);
}
} }
v.mkAttrs(attrs.alreadySorted()); v.mkAttrs(attrs.alreadySorted());
@ -2419,6 +2478,8 @@ static RegisterPrimOp primop_intersectAttrs({
.doc = R"( .doc = R"(
Return a set consisting of the attributes in the set *e2* which have the Return a set consisting of the attributes in the set *e2* which have the
same name as some attribute in *e1*. same name as some attribute in *e1*.
Performs in O(*n* log *m*) where *n* is the size of the smaller set and *m* the larger set's size.
)", )",
.fun = prim_intersectAttrs, .fun = prim_intersectAttrs,
}); });
@ -3999,7 +4060,7 @@ void EvalState::createBaseEnv()
// the parser needs two NUL bytes as terminators; one of them // the parser needs two NUL bytes as terminators; one of them
// is implied by being a C string. // is implied by being a C string.
"\0"; "\0";
eval(parse(code, sizeof(code), foFile, derivationNixPath, "/", staticBaseEnv), *vDerivation); eval(parse(code, sizeof(code), derivationNixPath, "/", staticBaseEnv), *vDerivation);
} }

View file

@ -218,8 +218,6 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
} else } else
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
url = resolveUri(*url);
state.checkURI(*url); state.checkURI(*url);
if (name == "") if (name == "")

View file

@ -12,6 +12,7 @@ namespace nix {
class LibExprTest : public ::testing::Test { class LibExprTest : public ::testing::Test {
public: public:
static void SetUpTestSuite() { static void SetUpTestSuite() {
initLibStore();
initGC(); initGC();
} }
@ -123,7 +124,7 @@ namespace nix {
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) { MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
if (arg.type() != nAttrs) { if (arg.type() != nAttrs) {
*result_listener << "Expexted set got " << arg.type(); *result_listener << "Expected set got " << arg.type();
return false; return false;
} else if (arg.attrs->size() != (size_t)n) { } else if (arg.attrs->size() != (size_t)n) {
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size(); *result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();

View file

@ -151,20 +151,7 @@ namespace nix {
// The `y` attribute is at position // The `y` attribute is at position
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }"; const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
auto v = eval(expr); auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(3)); ASSERT_THAT(v, IsNull());
auto file = v.attrs->find(createSymbol("file"));
ASSERT_NE(file, nullptr);
// FIXME: The file when running these tests is the input string?!?
ASSERT_THAT(*file->value, IsStringEq(expr));
auto line = v.attrs->find(createSymbol("line"));
ASSERT_NE(line, nullptr);
ASSERT_THAT(*line->value, IsIntEq(1));
auto column = v.attrs->find(createSymbol("column"));
ASSERT_NE(column, nullptr);
ASSERT_THAT(*column->value, IsIntEq(33));
} }
TEST_F(PrimOpTest, hasAttr) { TEST_F(PrimOpTest, hasAttr) {
@ -617,7 +604,7 @@ namespace nix {
TEST_F(PrimOpTest, storeDir) { TEST_F(PrimOpTest, storeDir) {
auto v = eval("builtins.storeDir"); auto v = eval("builtins.storeDir");
ASSERT_THAT(v, IsStringEq("/nix/store")); ASSERT_THAT(v, IsStringEq(settings.nixStore));
} }
TEST_F(PrimOpTest, nixVersion) { TEST_F(PrimOpTest, nixVersion) {

View file

@ -1,84 +1,82 @@
#include "value-to-json.hh" #include "value-to-json.hh"
#include "json.hh"
#include "eval-inline.hh" #include "eval-inline.hh"
#include "util.hh" #include "util.hh"
#include <cstdlib> #include <cstdlib>
#include <iomanip> #include <iomanip>
#include <nlohmann/json.hpp>
namespace nix { namespace nix {
using json = nlohmann::json;
void printValueAsJSON(EvalState & state, bool strict, json printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore) Value & v, const PosIdx pos, PathSet & context, bool copyToStore)
{ {
checkInterrupt(); checkInterrupt();
if (strict) state.forceValue(v, pos); if (strict) state.forceValue(v, pos);
json out;
switch (v.type()) { switch (v.type()) {
case nInt: case nInt:
out.write(v.integer); out = v.integer;
break; break;
case nBool: case nBool:
out.write(v.boolean); out = v.boolean;
break; break;
case nString: case nString:
copyContext(v, context); copyContext(v, context);
out.write(v.string.s); out = v.string.s;
break; break;
case nPath: case nPath:
if (copyToStore) if (copyToStore)
out.write(state.copyPathToStore(context, v.path)); out = state.copyPathToStore(context, v.path);
else else
out.write(v.path); out = v.path;
break; break;
case nNull: case nNull:
out.write(nullptr);
break; break;
case nAttrs: { case nAttrs: {
auto maybeString = state.tryAttrsToString(pos, v, context, false, false); auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
if (maybeString) { if (maybeString) {
out.write(*maybeString); out = *maybeString;
break; break;
} }
auto i = v.attrs->find(state.sOutPath); auto i = v.attrs->find(state.sOutPath);
if (i == v.attrs->end()) { if (i == v.attrs->end()) {
auto obj(out.object()); out = json::object();
StringSet names; StringSet names;
for (auto & j : *v.attrs) for (auto & j : *v.attrs)
names.emplace(state.symbols[j.name]); names.emplace(state.symbols[j.name]);
for (auto & j : names) { for (auto & j : names) {
Attr & a(*v.attrs->find(state.symbols.create(j))); Attr & a(*v.attrs->find(state.symbols.create(j)));
auto placeholder(obj.placeholder(j)); out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
printValueAsJSON(state, strict, *a.value, a.pos, placeholder, context, copyToStore);
} }
} else } else
printValueAsJSON(state, strict, *i->value, i->pos, out, context, copyToStore); return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
break; break;
} }
case nList: { case nList: {
auto list(out.list()); out = json::array();
for (auto elem : v.listItems()) { for (auto elem : v.listItems())
auto placeholder(list.placeholder()); out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
printValueAsJSON(state, strict, *elem, pos, placeholder, context, copyToStore);
}
break; break;
} }
case nExternal: case nExternal:
v.external->printValueAsJSON(state, strict, out, context, copyToStore); return v.external->printValueAsJSON(state, strict, context, copyToStore);
break; break;
case nFloat: case nFloat:
out.write(v.fpoint); out = v.fpoint;
break; break;
case nThunk: case nThunk:
@ -91,17 +89,17 @@ void printValueAsJSON(EvalState & state, bool strict,
state.debugThrowLastTrace(e); state.debugThrowLastTrace(e);
throw e; throw e;
} }
return out;
} }
void printValueAsJSON(EvalState & state, bool strict, void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore) Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore)
{ {
JSONPlaceholder out(str); str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
printValueAsJSON(state, strict, v, pos, out, context, copyToStore);
} }
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
JSONPlaceholder & out, PathSet & context, bool copyToStore) const PathSet & context, bool copyToStore) const
{ {
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
} }

View file

@ -5,13 +5,12 @@
#include <string> #include <string>
#include <map> #include <map>
#include <nlohmann/json_fwd.hpp>
namespace nix { namespace nix {
class JSONPlaceholder; nlohmann::json printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, PathSet & context, bool copyToStore = true);
void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, JSONPlaceholder & out, PathSet & context, bool copyToStore = true);
void printValueAsJSON(EvalState & state, bool strict, void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true); Value & v, const PosIdx pos, std::ostream & str, PathSet & context, bool copyToStore = true);

View file

@ -24,7 +24,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos) static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
{ {
xmlAttrs["path"] = pos.file; if (auto path = std::get_if<Path>(&pos.origin))
xmlAttrs["path"] = *path;
xmlAttrs["line"] = (format("%1%") % pos.line).str(); xmlAttrs["line"] = (format("%1%") % pos.line).str();
xmlAttrs["column"] = (format("%1%") % pos.column).str(); xmlAttrs["column"] = (format("%1%") % pos.column).str();
} }

View file

@ -7,6 +7,7 @@
#if HAVE_BOEHMGC #if HAVE_BOEHMGC
#include <gc/gc_allocator.h> #include <gc/gc_allocator.h>
#endif #endif
#include <nlohmann/json_fwd.hpp>
namespace nix { namespace nix {
@ -62,7 +63,6 @@ class StorePath;
class Store; class Store;
class EvalState; class EvalState;
class XMLWriter; class XMLWriter;
class JSONPlaceholder;
typedef int64_t NixInt; typedef int64_t NixInt;
@ -98,8 +98,8 @@ class ExternalValueBase
virtual bool operator ==(const ExternalValueBase & b) const; virtual bool operator ==(const ExternalValueBase & b) const;
/* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */ /* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
virtual void printValueAsJSON(EvalState & state, bool strict, virtual nlohmann::json printValueAsJSON(EvalState & state, bool strict,
JSONPlaceholder & out, PathSet & context, bool copyToStore = true) const; PathSet & context, bool copyToStore = true) const;
/* Print the value as XML. Defaults to unevaluated */ /* Print the value as XML. Defaults to unevaluated */
virtual void printValueAsXML(EvalState & state, bool strict, bool location, virtual void printValueAsXML(EvalState & state, bool strict, bool location,

View file

@ -71,7 +71,12 @@ struct FetchSettings : public Config
"Whether to warn about dirty Git/Mercurial trees."}; "Whether to warn about dirty Git/Mercurial trees."};
Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry", Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."}; R"(
Path or URI of the global flake registry.
When empty, disables the global flake registry.
)"};
Setting<bool> useRegistries{this, true, "use-registries", Setting<bool> useRegistries{this, true, "use-registries",
"Whether to use flake registries to resolve flake references."}; "Whether to use flake registries to resolve flake references."};

View file

@ -266,7 +266,7 @@ std::optional<time_t> Input::getLastModified() const
return {}; return {};
} }
ParsedURL InputScheme::toURL(const Input & input) ParsedURL InputScheme::toURL(const Input & input) const
{ {
throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs)); throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs));
} }
@ -274,7 +274,7 @@ ParsedURL InputScheme::toURL(const Input & input)
Input InputScheme::applyOverrides( Input InputScheme::applyOverrides(
const Input & input, const Input & input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) std::optional<Hash> rev) const
{ {
if (ref) if (ref)
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref); throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
@ -293,7 +293,7 @@ void InputScheme::markChangedFile(const Input & input, std::string_view file, st
assert(false); assert(false);
} }
void InputScheme::clone(const Input & input, const Path & destDir) void InputScheme::clone(const Input & input, const Path & destDir) const
{ {
throw Error("do not know how to clone input '%s'", input.to_string()); throw Error("do not know how to clone input '%s'", input.to_string());
} }

View file

@ -107,26 +107,25 @@ public:
* recognized. The Input object contains the information the fetcher * recognized. The Input object contains the information the fetcher
* needs to actually perform the "fetch()" when called. * needs to actually perform the "fetch()" when called.
*/ */
struct InputScheme struct InputScheme
{ {
virtual ~InputScheme() virtual ~InputScheme()
{ } { }
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0; virtual std::optional<Input> inputFromURL(const ParsedURL & url) const = 0;
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0; virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
virtual ParsedURL toURL(const Input & input); virtual ParsedURL toURL(const Input & input) const;
virtual bool hasAllInfo(const Input & input) = 0; virtual bool hasAllInfo(const Input & input) const = 0;
virtual Input applyOverrides( virtual Input applyOverrides(
const Input & input, const Input & input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev); std::optional<Hash> rev) const;
virtual void clone(const Input & input, const Path & destDir); virtual void clone(const Input & input, const Path & destDir) const;
virtual std::optional<Path> getSourcePath(const Input & input); virtual std::optional<Path> getSourcePath(const Input & input);

View file

@ -18,6 +18,7 @@
using namespace std::string_literals; using namespace std::string_literals;
namespace nix::fetchers { namespace nix::fetchers {
namespace { namespace {
// Explicit initial branch of our bare repo to suppress warnings from new version of git. // Explicit initial branch of our bare repo to suppress warnings from new version of git.
@ -26,12 +27,12 @@ namespace {
// old version of git, which will ignore unrecognized `-c` options. // old version of git, which will ignore unrecognized `-c` options.
const std::string gitInitialBranch = "__nix_dummy_branch"; const std::string gitInitialBranch = "__nix_dummy_branch";
bool isCacheFileWithinTtl(const time_t now, const struct stat & st) bool isCacheFileWithinTtl(time_t now, const struct stat & st)
{ {
return st.st_mtime + settings.tarballTtl > now; return st.st_mtime + settings.tarballTtl > now;
} }
bool touchCacheFile(const Path& path, const time_t& touch_time) bool touchCacheFile(const Path & path, time_t touch_time)
{ {
struct timeval times[2]; struct timeval times[2];
times[0].tv_sec = touch_time; times[0].tv_sec = touch_time;
@ -42,7 +43,7 @@ bool touchCacheFile(const Path& path, const time_t& touch_time)
return lutimes(path.c_str(), times) == 0; return lutimes(path.c_str(), times) == 0;
} }
Path getCachePath(std::string key) Path getCachePath(std::string_view key)
{ {
return getCacheDir() + "/nix/gitv3/" + return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false); hashString(htSHA256, key).to_string(Base32, false);
@ -57,13 +58,12 @@ Path getCachePath(std::string key)
// ... // ...
std::optional<std::string> readHead(const Path & path) std::optional<std::string> readHead(const Path & path)
{ {
auto [exit_code, output] = runProgram(RunOptions { auto [status, output] = runProgram(RunOptions {
.program = "git", .program = "git",
// FIXME: use 'HEAD' to avoid returning all refs
.args = {"ls-remote", "--symref", path}, .args = {"ls-remote", "--symref", path},
}); });
if (exit_code != 0) { if (status != 0) return std::nullopt;
return std::nullopt;
}
std::string_view line = output; std::string_view line = output;
line = line.substr(0, line.find("\n")); line = line.substr(0, line.find("\n"));
@ -82,12 +82,11 @@ std::optional<std::string> readHead(const Path & path)
} }
// Persist the HEAD ref from the remote repo in the local cached repo. // Persist the HEAD ref from the remote repo in the local cached repo.
bool storeCachedHead(const std::string& actualUrl, const std::string& headRef) bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
{ {
Path cacheDir = getCachePath(actualUrl); Path cacheDir = getCachePath(actualUrl);
auto gitDir = ".";
try { try {
runProgram("git", true, { "-C", cacheDir, "--git-dir", gitDir, "symbolic-ref", "--", "HEAD", headRef }); runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
} catch (ExecError &e) { } catch (ExecError &e) {
if (!WIFEXITED(e.status)) throw; if (!WIFEXITED(e.status)) throw;
return false; return false;
@ -96,7 +95,7 @@ bool storeCachedHead(const std::string& actualUrl, const std::string& headRef)
return true; return true;
} }
std::optional<std::string> readHeadCached(const std::string& actualUrl) std::optional<std::string> readHeadCached(const std::string & actualUrl)
{ {
// Create a cache path to store the branch of the HEAD ref. Append something // Create a cache path to store the branch of the HEAD ref. Append something
// in front of the URL to prevent collision with the repository itself. // in front of the URL to prevent collision with the repository itself.
@ -110,16 +109,15 @@ std::optional<std::string> readHeadCached(const std::string& actualUrl)
cachedRef = readHead(cacheDir); cachedRef = readHead(cacheDir);
if (cachedRef != std::nullopt && if (cachedRef != std::nullopt &&
*cachedRef != gitInitialBranch && *cachedRef != gitInitialBranch &&
isCacheFileWithinTtl(now, st)) { isCacheFileWithinTtl(now, st))
{
debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl); debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
return cachedRef; return cachedRef;
} }
} }
auto ref = readHead(actualUrl); auto ref = readHead(actualUrl);
if (ref) { if (ref) return ref;
return ref;
}
if (cachedRef) { if (cachedRef) {
// If the cached git ref is expired in fetch() below, and the 'git fetch' // If the cached git ref is expired in fetch() below, and the 'git fetch'
@ -250,7 +248,7 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
struct GitInputScheme : InputScheme struct GitInputScheme : InputScheme
{ {
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (url.scheme != "git" && if (url.scheme != "git" &&
url.scheme != "git+http" && url.scheme != "git+http" &&
@ -265,7 +263,7 @@ struct GitInputScheme : InputScheme
Attrs attrs; Attrs attrs;
attrs.emplace("type", "git"); attrs.emplace("type", "git");
for (auto &[name, value] : url.query) { for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref") if (name == "rev" || name == "ref")
attrs.emplace(name, value); attrs.emplace(name, value);
else if (name == "shallow" || name == "submodules") else if (name == "shallow" || name == "submodules")
@ -279,7 +277,7 @@ struct GitInputScheme : InputScheme
return inputFromAttrs(attrs); return inputFromAttrs(attrs);
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
if (maybeGetStrAttr(attrs, "type") != "git") return {}; if (maybeGetStrAttr(attrs, "type") != "git") return {};
@ -302,7 +300,7 @@ struct GitInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
auto url = parseURL(getStrAttr(input.attrs, "url")); auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme != "git") url.scheme = "git+" + url.scheme; if (url.scheme != "git") url.scheme = "git+" + url.scheme;
@ -313,7 +311,7 @@ struct GitInputScheme : InputScheme
return url; return url;
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
bool maybeDirty = !input.getRef(); bool maybeDirty = !input.getRef();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false); bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
@ -325,7 +323,7 @@ struct GitInputScheme : InputScheme
Input applyOverrides( Input applyOverrides(
const Input & input, const Input & input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) override std::optional<Hash> rev) const override
{ {
auto res(input); auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev()); if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
@ -335,7 +333,7 @@ struct GitInputScheme : InputScheme
return res; return res;
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) const override
{ {
auto [isLocal, actualUrl] = getActualUrl(input); auto [isLocal, actualUrl] = getActualUrl(input);
@ -485,6 +483,10 @@ struct GitInputScheme : InputScheme
} }
input.attrs.insert_or_assign("ref", *head); input.attrs.insert_or_assign("ref", *head);
unlockedAttrs.insert_or_assign("ref", *head); unlockedAttrs.insert_or_assign("ref", *head);
} else {
if (!input.getRev()) {
unlockedAttrs.insert_or_assign("ref", input.getRef().value());
}
} }
if (auto res = getCache()->lookup(store, unlockedAttrs)) { if (auto res = getCache()->lookup(store, unlockedAttrs)) {

View file

@ -26,11 +26,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme struct GitArchiveInputScheme : InputScheme
{ {
virtual std::string type() = 0; virtual std::string type() const = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0; virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (url.scheme != type()) return {}; if (url.scheme != type()) return {};
@ -100,7 +100,7 @@ struct GitArchiveInputScheme : InputScheme
return input; return input;
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
if (maybeGetStrAttr(attrs, "type") != type()) return {}; if (maybeGetStrAttr(attrs, "type") != type()) return {};
@ -116,7 +116,7 @@ struct GitArchiveInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
auto owner = getStrAttr(input.attrs, "owner"); auto owner = getStrAttr(input.attrs, "owner");
auto repo = getStrAttr(input.attrs, "repo"); auto repo = getStrAttr(input.attrs, "repo");
@ -132,7 +132,7 @@ struct GitArchiveInputScheme : InputScheme
}; };
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified"); return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
} }
@ -140,7 +140,7 @@ struct GitArchiveInputScheme : InputScheme
Input applyOverrides( Input applyOverrides(
const Input & _input, const Input & _input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) override std::optional<Hash> rev) const override
{ {
auto input(_input); auto input(_input);
if (rev && ref) if (rev && ref)
@ -227,7 +227,7 @@ struct GitArchiveInputScheme : InputScheme
struct GitHubInputScheme : GitArchiveInputScheme struct GitHubInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "github"; } std::string type() const override { return "github"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{ {
@ -240,14 +240,29 @@ struct GitHubInputScheme : GitArchiveInputScheme
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token)); return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
} }
std::string getHost(const Input & input) const
{
return maybeGetStrAttr(input.attrs, "host").value_or("github.com");
}
std::string getOwner(const Input & input) const
{
return getStrAttr(input.attrs, "owner");
}
std::string getRepo(const Input & input) const
{
return getStrAttr(input.attrs, "repo");
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{ {
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); auto host = getHost(input);
auto url = fmt( auto url = fmt(
host == "github.com" host == "github.com"
? "https://api.%s/repos/%s/%s/commits/%s" ? "https://api.%s/repos/%s/%s/commits/%s"
: "https://%s/api/v3/repos/%s/%s/commits/%s", : "https://%s/api/v3/repos/%s/%s/commits/%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); host, getOwner(input), getRepo(input), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host); Headers headers = makeHeadersWithAuthTokens(host);
@ -262,25 +277,30 @@ struct GitHubInputScheme : GitArchiveInputScheme
DownloadUrl getDownloadUrl(const Input & input) const override DownloadUrl getDownloadUrl(const Input & input) const override
{ {
// FIXME: use regular /archive URLs instead? api.github.com auto host = getHost(input);
// might have stricter rate limits.
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt(
host == "github.com"
? "https://api.%s/repos/%s/%s/tarball/%s"
: "https://%s/api/v3/repos/%s/%s/tarball/%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
Headers headers = makeHeadersWithAuthTokens(host); Headers headers = makeHeadersWithAuthTokens(host);
// If we have no auth headers then we default to the public archive
// urls so we do not run into rate limits.
const auto urlFmt =
host != "github.com"
? "https://%s/api/v3/repos/%s/%s/tarball/%s"
: headers.empty()
? "https://%s/%s/%s/archive/%s.tar.gz"
: "https://api.%s/repos/%s/%s/tarball/%s";
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false));
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) const override
{ {
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); auto host = getHost(input);
Input::fromURL(fmt("git+https://%s/%s/%s.git", Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) host, getOwner(input), getRepo(input)))
.applyOverrides(input.getRef(), input.getRev()) .applyOverrides(input.getRef(), input.getRev())
.clone(destDir); .clone(destDir);
} }
@ -288,7 +308,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
struct GitLabInputScheme : GitArchiveInputScheme struct GitLabInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "gitlab"; } std::string type() const override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{ {
@ -343,7 +363,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) const override
{ {
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere // FIXME: get username somewhere
@ -356,7 +376,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
struct SourceHutInputScheme : GitArchiveInputScheme struct SourceHutInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "sourcehut"; } std::string type() const override { return "sourcehut"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{ {
@ -430,7 +450,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
return DownloadUrl { url, headers }; return DownloadUrl { url, headers };
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) const override
{ {
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
Input::fromURL(fmt("git+https://%s/%s/%s", Input::fromURL(fmt("git+https://%s/%s/%s",

View file

@ -7,7 +7,7 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
struct IndirectInputScheme : InputScheme struct IndirectInputScheme : InputScheme
{ {
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (url.scheme != "flake") return {}; if (url.scheme != "flake") return {};
@ -50,7 +50,7 @@ struct IndirectInputScheme : InputScheme
return input; return input;
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
if (maybeGetStrAttr(attrs, "type") != "indirect") return {}; if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
@ -68,7 +68,7 @@ struct IndirectInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
ParsedURL url; ParsedURL url;
url.scheme = "flake"; url.scheme = "flake";
@ -78,7 +78,7 @@ struct IndirectInputScheme : InputScheme
return url; return url;
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
return false; return false;
} }
@ -86,7 +86,7 @@ struct IndirectInputScheme : InputScheme
Input applyOverrides( Input applyOverrides(
const Input & _input, const Input & _input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) override std::optional<Hash> rev) const override
{ {
auto input(_input); auto input(_input);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());

View file

@ -43,7 +43,7 @@ static std::string runHg(const Strings & args, const std::optional<std::string>
struct MercurialInputScheme : InputScheme struct MercurialInputScheme : InputScheme
{ {
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (url.scheme != "hg+http" && if (url.scheme != "hg+http" &&
url.scheme != "hg+https" && url.scheme != "hg+https" &&
@ -69,7 +69,7 @@ struct MercurialInputScheme : InputScheme
return inputFromAttrs(attrs); return inputFromAttrs(attrs);
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
if (maybeGetStrAttr(attrs, "type") != "hg") return {}; if (maybeGetStrAttr(attrs, "type") != "hg") return {};
@ -89,7 +89,7 @@ struct MercurialInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
auto url = parseURL(getStrAttr(input.attrs, "url")); auto url = parseURL(getStrAttr(input.attrs, "url"));
url.scheme = "hg+" + url.scheme; url.scheme = "hg+" + url.scheme;
@ -98,7 +98,7 @@ struct MercurialInputScheme : InputScheme
return url; return url;
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
// FIXME: ugly, need to distinguish between dirty and clean // FIXME: ugly, need to distinguish between dirty and clean
// default trees. // default trees.
@ -108,7 +108,7 @@ struct MercurialInputScheme : InputScheme
Input applyOverrides( Input applyOverrides(
const Input & input, const Input & input,
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) override std::optional<Hash> rev) const override
{ {
auto res(input); auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev()); if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());

View file

@ -6,7 +6,7 @@ namespace nix::fetchers {
struct PathInputScheme : InputScheme struct PathInputScheme : InputScheme
{ {
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (url.scheme != "path") return {}; if (url.scheme != "path") return {};
@ -32,7 +32,7 @@ struct PathInputScheme : InputScheme
return input; return input;
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
if (maybeGetStrAttr(attrs, "type") != "path") return {}; if (maybeGetStrAttr(attrs, "type") != "path") return {};
@ -54,7 +54,7 @@ struct PathInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
auto query = attrsToQuery(input.attrs); auto query = attrsToQuery(input.attrs);
query.erase("path"); query.erase("path");
@ -66,7 +66,7 @@ struct PathInputScheme : InputScheme
}; };
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
return true; return true;
} }

View file

@ -153,6 +153,9 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
{ {
static auto reg = [&]() { static auto reg = [&]() {
auto path = fetchSettings.flakeRegistry.get(); auto path = fetchSettings.flakeRegistry.get();
if (path == "") {
return std::make_shared<Registry>(Registry::Global); // empty registry
}
if (!hasPrefix(path, "/")) { if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath; auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;

View file

@ -185,7 +185,7 @@ struct CurlInputScheme : InputScheme
virtual bool isValidURL(const ParsedURL & url) const = 0; virtual bool isValidURL(const ParsedURL & url) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) const override
{ {
if (!isValidURL(url)) if (!isValidURL(url))
return std::nullopt; return std::nullopt;
@ -203,7 +203,7 @@ struct CurlInputScheme : InputScheme
return input; return input;
} }
std::optional<Input> inputFromAttrs(const Attrs & attrs) override std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{ {
auto type = maybeGetStrAttr(attrs, "type"); auto type = maybeGetStrAttr(attrs, "type");
if (type != inputType()) return {}; if (type != inputType()) return {};
@ -220,16 +220,17 @@ struct CurlInputScheme : InputScheme
return input; return input;
} }
ParsedURL toURL(const Input & input) override ParsedURL toURL(const Input & input) const override
{ {
auto url = parseURL(getStrAttr(input.attrs, "url")); auto url = parseURL(getStrAttr(input.attrs, "url"));
// NAR hashes are preferred over file hashes since tar/zip files // don't have a canonical representation. // NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation.
if (auto narHash = input.getNarHash()) if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true)); url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
return url; return url;
} }
bool hasAllInfo(const Input & input) override bool hasAllInfo(const Input & input) const override
{ {
return true; return true;
} }

View file

@ -132,7 +132,7 @@ public:
log(*state, lvl, fs.s); log(*state, lvl, fs.s);
} }
void logEI(const ErrorInfo &ei) override void logEI(const ErrorInfo & ei) override
{ {
auto state(state_.lock()); auto state(state_.lock());
@ -180,10 +180,12 @@ public:
auto machineName = getS(fields, 1); auto machineName = getS(fields, 1);
if (machineName != "") if (machineName != "")
i->s += fmt(" on " ANSI_BOLD "%s" ANSI_NORMAL, machineName); i->s += fmt(" on " ANSI_BOLD "%s" ANSI_NORMAL, machineName);
auto curRound = getI(fields, 2);
auto nrRounds = getI(fields, 3); // Used to be curRound and nrRounds, but the
if (nrRounds != 1) // implementation was broken for a long time.
i->s += fmt(" (round %d/%d)", curRound, nrRounds); if (getI(fields, 2) != 1 || getI(fields, 3) != 1) {
throw Error("log message indicated repeating builds, but this is not currently implemented");
}
i->name = DrvName(name).name; i->name = DrvName(name).name;
} }

View file

@ -33,6 +33,7 @@
namespace nix { namespace nix {
char * * savedArgv;
static bool gcWarning = true; static bool gcWarning = true;
@ -234,6 +235,7 @@ void initNix()
#endif #endif
preloadNSS(); preloadNSS();
initLibStore();
} }

View file

@ -9,7 +9,6 @@
#include "remote-fs-accessor.hh" #include "remote-fs-accessor.hh"
#include "nar-info-disk-cache.hh" #include "nar-info-disk-cache.hh"
#include "nar-accessor.hh" #include "nar-accessor.hh"
#include "json.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "callback.hh" #include "callback.hh"
@ -194,19 +193,12 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
/* Optionally write a JSON file containing a listing of the /* Optionally write a JSON file containing a listing of the
contents of the NAR. */ contents of the NAR. */
if (writeNARListing) { if (writeNARListing) {
std::ostringstream jsonOut; nlohmann::json j = {
{"version", 1},
{"root", listNar(ref<FSAccessor>(narAccessor), "", true)},
};
{ upsertFile(std::string(info.path.hashPart()) + ".ls", j.dump(), "application/json");
JSONObject jsonRoot(jsonOut);
jsonRoot.attr("version", 1);
{
auto res = jsonRoot.placeholder("root");
listNar(res, ref<FSAccessor>(narAccessor), "", true);
}
}
upsertFile(std::string(info.path.hashPart()) + ".ls", jsonOut.str(), "application/json");
} }
/* Optionally maintain an index of DWARF debug info files /* Optionally maintain an index of DWARF debug info files
@ -331,6 +323,17 @@ bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath)
return fileExists(narInfoFileFor(storePath)); return fileExists(narInfoFileFor(storePath));
} }
std::optional<StorePath> BinaryCacheStore::queryPathFromHashPart(const std::string & hashPart)
{
auto pseudoPath = StorePath(hashPart + "-" + MissingName);
try {
auto info = queryPathInfo(pseudoPath);
return info->path;
} catch (InvalidPath &) {
return std::nullopt;
}
}
void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink) void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
{ {
auto info = queryPathInfo(storePath).cast<const NarInfo>(); auto info = queryPathInfo(storePath).cast<const NarInfo>();
@ -343,7 +346,7 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
try { try {
getFile(info->url, *decompressor); getFile(info->url, *decompressor);
} catch (NoSuchBinaryCacheFile & e) { } catch (NoSuchBinaryCacheFile & e) {
throw SubstituteGone(e.info()); throw SubstituteGone(std::move(e.info()));
} }
decompressor->finish(); decompressor->finish();

View file

@ -95,8 +95,7 @@ public:
void queryPathInfoUncached(const StorePath & path, void queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override; Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
{ unsupported("queryPathFromHashPart"); }
void addToStore(const ValidPathInfo & info, Source & narSource, void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;

View file

@ -5,7 +5,7 @@
#include <string> #include <string>
#include <chrono> #include <chrono>
#include <optional>
namespace nix { namespace nix {
@ -78,6 +78,9 @@ struct BuildResult
was repeated). */ was repeated). */
time_t startTime = 0, stopTime = 0; time_t startTime = 0, stopTime = 0;
/* User and system CPU time the build took. */
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
bool success() bool success()
{ {
return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid; return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid;

View file

@ -7,7 +7,6 @@
#include "finally.hh" #include "finally.hh"
#include "util.hh" #include "util.hh"
#include "archive.hh" #include "archive.hh"
#include "json.hh"
#include "compression.hh" #include "compression.hh"
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "topo-sort.hh" #include "topo-sort.hh"
@ -40,7 +39,6 @@
#include <sys/ioctl.h> #include <sys/ioctl.h>
#include <net/if.h> #include <net/if.h>
#include <netinet/ip.h> #include <netinet/ip.h>
#include <sys/personality.h>
#include <sys/mman.h> #include <sys/mman.h>
#include <sched.h> #include <sched.h>
#include <sys/param.h> #include <sys/param.h>
@ -135,7 +133,7 @@ void DerivationGoal::killChild()
void DerivationGoal::timedOut(Error && ex) void DerivationGoal::timedOut(Error && ex)
{ {
killChild(); killChild();
done(BuildResult::TimedOut, {}, ex); done(BuildResult::TimedOut, {}, std::move(ex));
} }
@ -502,6 +500,14 @@ void DerivationGoal::inputsRealised()
now-known results of dependencies. If so, we become a now-known results of dependencies. If so, we become a
stub goal aliasing that resolved derivation goal. */ stub goal aliasing that resolved derivation goal. */
std::optional attempt = fullDrv.tryResolve(worker.store, inputDrvOutputs); std::optional attempt = fullDrv.tryResolve(worker.store, inputDrvOutputs);
if (!attempt) {
/* TODO (impure derivations-induced tech debt) (see below):
The above attempt should have found it, but because we manage
inputDrvOutputs statefully, sometimes it gets out of sync with
the real source of truth (store). So we query the store
directly if there's a problem. */
attempt = fullDrv.tryResolve(worker.store);
}
assert(attempt); assert(attempt);
Derivation drvResolved { *std::move(attempt) }; Derivation drvResolved { *std::move(attempt) };
@ -528,14 +534,32 @@ void DerivationGoal::inputsRealised()
/* Add the relevant output closures of the input derivation /* Add the relevant output closures of the input derivation
`i' as input paths. Only add the closures of output paths `i' as input paths. Only add the closures of output paths
that are specified as inputs. */ that are specified as inputs. */
for (auto & j : wantedDepOutputs) for (auto & j : wantedDepOutputs) {
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) /* TODO (impure derivations-induced tech debt):
Tracking input derivation outputs statefully through the
goals is error prone and has led to bugs.
For a robust nix, we need to move towards the `else` branch,
which does not rely on goal state to match up with the
reality of the store, which is our real source of truth.
However, the impure derivations feature still relies on this
fragile way of doing things, because its builds do not have
a representation in the store, which is a usability problem
in itself */
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) {
worker.store.computeFSClosure(*outPath, inputPaths); worker.store.computeFSClosure(*outPath, inputPaths);
else }
else {
auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath);
auto outMapPath = outMap.find(j);
if (outMapPath == outMap.end()) {
throw Error( throw Error(
"derivation '%s' requires non-existent output '%s' from input derivation '%s'", "derivation '%s' requires non-existent output '%s' from input derivation '%s'",
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath)); worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath));
} }
worker.store.computeFSClosure(outMapPath->second, inputPaths);
}
}
}
} }
/* Second, the input sources. */ /* Second, the input sources. */
@ -546,10 +570,6 @@ void DerivationGoal::inputsRealised()
/* What type of derivation are we building? */ /* What type of derivation are we building? */
derivationType = drv->type(); derivationType = drv->type();
/* Don't repeat fixed-output derivations since they're already
verified by their output hash.*/
nrRounds = derivationType.isFixed() ? 1 : settings.buildRepeat + 1;
/* Okay, try to build. Note that here we don't wait for a build /* Okay, try to build. Note that here we don't wait for a build
slot to become available, since we don't need one if there is a slot to become available, since we don't need one if there is a
build hook. */ build hook. */
@ -564,12 +584,11 @@ void DerivationGoal::started()
auto msg = fmt( auto msg = fmt(
buildMode == bmRepair ? "repairing outputs of '%s'" : buildMode == bmRepair ? "repairing outputs of '%s'" :
buildMode == bmCheck ? "checking outputs of '%s'" : buildMode == bmCheck ? "checking outputs of '%s'" :
nrRounds > 1 ? "building '%s' (round %d/%d)" : "building '%s'", worker.store.printStorePath(drvPath));
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
fmt("building '%s'", worker.store.printStorePath(drvPath)); fmt("building '%s'", worker.store.printStorePath(drvPath));
if (hook) msg += fmt(" on '%s'", machineName); if (hook) msg += fmt(" on '%s'", machineName);
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg, act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds}); Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", 1, 1});
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds); mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
worker.updateProgress(); worker.updateProgress();
} }
@ -869,6 +888,14 @@ void DerivationGoal::buildDone()
cleanupPostChildKill(); cleanupPostChildKill();
if (buildResult.cpuUser && buildResult.cpuSystem) {
debug("builder for '%s' terminated with status %d, user CPU %.3fs, system CPU %.3fs",
worker.store.printStorePath(drvPath),
status,
((double) buildResult.cpuUser->count()) / 1000000,
((double) buildResult.cpuSystem->count()) / 1000000);
}
bool diskFull = false; bool diskFull = false;
try { try {
@ -915,14 +942,6 @@ void DerivationGoal::buildDone()
cleanupPostOutputsRegisteredModeNonCheck(); cleanupPostOutputsRegisteredModeNonCheck();
/* Repeat the build if necessary. */
if (curRound++ < nrRounds) {
outputLocks.unlock();
state = &DerivationGoal::tryToBuild;
worker.wakeUp(shared_from_this());
return;
}
/* It is now safe to delete the lock files, since all future /* It is now safe to delete the lock files, since all future
lockers will see that the output paths are valid; they will lockers will see that the output paths are valid; they will
not create new lock files with the same names as the old not create new lock files with the same names as the old
@ -951,7 +970,7 @@ void DerivationGoal::buildDone()
BuildResult::PermanentFailure; BuildResult::PermanentFailure;
} }
done(st, {}, e); done(st, {}, std::move(e));
return; return;
} }
} }
@ -983,22 +1002,34 @@ void DerivationGoal::resolvedFinished()
throw Error( throw Error(
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)", "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,resolve)",
worker.store.printStorePath(drvPath), wantedOutput); worker.store.printStorePath(drvPath), wantedOutput);
auto realisation = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
if (!realisation) auto realisation = [&]{
auto take1 = get(resolvedResult.builtOutputs, DrvOutput { *resolvedHash, wantedOutput });
if (take1) return *take1;
/* The above `get` should work. But sateful tracking of
outputs in resolvedResult, this can get out of sync with the
store, which is our actual source of truth. For now we just
check the store directly if it fails. */
auto take2 = worker.evalStore.queryRealisation(DrvOutput { *resolvedHash, wantedOutput });
if (take2) return *take2;
throw Error( throw Error(
"derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)", "derivation '%s' doesn't have expected output '%s' (derivation-goal.cc/resolvedFinished,realisation)",
worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput); worker.store.printStorePath(resolvedDrvGoal->drvPath), wantedOutput);
}();
if (drv->type().isPure()) { if (drv->type().isPure()) {
auto newRealisation = *realisation; auto newRealisation = realisation;
newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput }; newRealisation.id = DrvOutput { initialOutput->outputHash, wantedOutput };
newRealisation.signatures.clear(); newRealisation.signatures.clear();
if (!drv->type().isFixed()) if (!drv->type().isFixed())
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath); newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
signRealisation(newRealisation); signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation); worker.store.registerDrvOutput(newRealisation);
} }
outputPaths.insert(realisation->outPath); outputPaths.insert(realisation.outPath);
builtOutputs.emplace(realisation->id, *realisation); builtOutputs.emplace(realisation.id, realisation);
} }
runPostBuildHook( runPostBuildHook(
@ -1402,7 +1433,7 @@ void DerivationGoal::done(
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl; fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
} }
amDone(buildResult.success() ? ecSuccess : ecFailed, ex); amDone(buildResult.success() ? ecSuccess : ecFailed, std::move(ex));
} }

View file

@ -115,11 +115,6 @@ struct DerivationGoal : public Goal
BuildMode buildMode; BuildMode buildMode;
/* The current round, if we're building multiple times. */
size_t curRound = 1;
size_t nrRounds;
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds; std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds;
std::unique_ptr<Activity> act; std::unique_ptr<Activity> act;

View file

@ -30,7 +30,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
if (ex) if (ex)
logError(i->ex->info()); logError(i->ex->info());
else else
ex = i->ex; ex = std::move(i->ex);
} }
if (i->exitCode != Goal::ecSuccess) { if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath); if (auto i2 = dynamic_cast<DerivationGoal *>(i.get())) failed.insert(i2->drvPath);
@ -40,7 +40,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
if (failed.size() == 1 && ex) { if (failed.size() == 1 && ex) {
ex->status = worker.exitStatus(); ex->status = worker.exitStatus();
throw *ex; throw std::move(*ex);
} else if (!failed.empty()) { } else if (!failed.empty()) {
if (ex) logError(ex->info()); if (ex) logError(ex->info());
throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed)); throw Error(worker.exitStatus(), "build of %s failed", showPaths(failed));
@ -109,7 +109,7 @@ void Store::ensurePath(const StorePath & path)
if (goal->exitCode != Goal::ecSuccess) { if (goal->exitCode != Goal::ecSuccess) {
if (goal->ex) { if (goal->ex) {
goal->ex->status = worker.exitStatus(); goal->ex->status = worker.exitStatus();
throw *goal->ex; throw std::move(*goal->ex);
} else } else
throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); throw Error(worker.exitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
} }

View file

@ -16,11 +16,11 @@ HookInstance::HookInstance()
buildHookArgs.pop_front(); buildHookArgs.pop_front();
Strings args; Strings args;
args.push_back(std::string(baseNameOf(buildHook)));
for (auto & arg : buildHookArgs) for (auto & arg : buildHookArgs)
args.push_back(arg); args.push_back(arg);
args.push_back(std::string(baseNameOf(settings.buildHook.get())));
args.push_back(std::to_string(verbosity)); args.push_back(std::to_string(verbosity));
/* Create a pipe to get the output of the child. */ /* Create a pipe to get the output of the child. */

Some files were not shown because too many files have changed in this diff Show more