Merge remote-tracking branch 'nixos/master' into pr-flake-show-foreign

This commit is contained in:
Théophane Hufschmitt 2023-01-27 09:46:46 +01:00
commit ab424a39a9
333 changed files with 10975 additions and 5233 deletions

15
.github/CODEOWNERS vendored Normal file
View file

@ -0,0 +1,15 @@
# Pull requests concerning the listed files will automatically invite the respective maintainers as reviewers.
# This file is not used for denoting any kind of ownership, but is merely a tool for handling notifications.
#
# Merge permissions are required for maintaining an entry in this file.
# For documentation on this mechanism, see https://help.github.com/articles/about-codeowners/
# Default reviewers if nothing else matches
* @edolstra @thufschmitt
# This file
.github/CODEOWNERS @edolstra
# Public documentation
/doc @fricklerhandwerk
*.md @fricklerhandwerk

View file

@ -30,3 +30,7 @@ A clear and concise description of what you expected to happen.
**Additional context**
Add any other context about the problem here.
**Priorities**
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -2,7 +2,7 @@
name: Feature request
about: Suggest an idea for this project
title: ''
labels: improvement
labels: feature
assignees: ''
---
@ -18,3 +18,7 @@ A clear and concise description of any alternative solutions or features you've
**Additional context**
Add any other context or screenshots about the feature request here.
**Priorities**
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

36
.github/ISSUE_TEMPLATE/installer.md vendored Normal file
View file

@ -0,0 +1,36 @@
---
name: Installer issue
about: Report problems with installation
title: ''
labels: installer
assignees: ''
---
## Platform
<!-- select the platform on which you tried to install Nix -->
- [ ] Linux: <!-- state your distribution, e.g. Arch Linux, Ubuntu, ... -->
- [ ] macOS
- [ ] WSL
## Additional information
<!-- state special circumstances on your system or additional steps you have taken prior to installation -->
## Output
<details><summary>Output</summary>
```log
<!-- paste console output here and remove this comment -->
```
</details>
## Priorities
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -0,0 +1,31 @@
---
name: Missing or incorrect documentation
about: Help us improve the reference manual
title: ''
labels: documentation
assignees: ''
---
## Problem
<!-- describe your problem -->
## Checklist
<!-- make sure this issue is not redundant or obsolete -->
- [ ] checked [latest Nix manual] \([source])
- [ ] checked [open documentation issues and pull requests] for possible duplicates
[latest Nix manual]: https://nixos.org/manual/nix/unstable/
[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src
[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
## Proposal
<!-- propose a solution -->
## Priorities
Add :+1: to [issues you find important](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc).

28
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View file

@ -0,0 +1,28 @@
# Motivation
<!-- Briefly explain what the change is about and why it is desirable. -->
# Context
<!-- Provide context. Reference open issues if available. -->
<!-- Non-trivial change: Briefly outline the implementation strategy. -->
<!-- Invasive change: Discuss alternative designs or approaches you considered. -->
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
# Checklist for maintainers
<!-- Contributors: please leave this as is -->
Maintainers: tick if completed or explain if not relevant
- [ ] agreed on idea
- [ ] agreed on implementation strategy
- [ ] tests, as appropriate
- functional tests - `tests/**.sh`
- unit tests - `src/*/tests`
- integration tests
- [ ] documentation in the manual
- [ ] code and comments are self-explanatory
- [ ] commit message explains why the change was made
- [ ] new feature or bug fix: updated release notes

View file

@ -5,3 +5,7 @@ Please include relevant [release notes](https://github.com/NixOS/nix/blob/master
**Testing**
If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
**Priorities**
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).

View file

@ -21,7 +21,7 @@ jobs:
fetch-depth: 0
- name: Create backport PRs
# should be kept in sync with `version`
uses: zeebe-io/backport-action@v0.0.8
uses: zeebe-io/backport-action@v1.1.0
with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }}

View file

@ -19,9 +19,9 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: cachix/install-nix-action@v17
- uses: cachix/install-nix-action@v18
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- uses: cachix/cachix-action@v12
if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'
@ -58,8 +58,8 @@ jobs:
with:
fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v17
- uses: cachix/cachix-action@v10
- uses: cachix/install-nix-action@v18
- uses: cachix/cachix-action@v12
with:
name: '${{ env.CACHIX_NAME }}'
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
@ -77,11 +77,18 @@ jobs:
steps:
- uses: actions/checkout@v3
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v17
- uses: cachix/install-nix-action@v18
with:
install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
- run: nix-instantiate -E 'builtins.currentTime' --eval
- run: sudo apt install fish zsh
if: matrix.os == 'ubuntu-latest'
- run: brew install fish
if: matrix.os == 'macos-latest'
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
docker_push_image:
needs: [check_secrets, tests]
@ -95,10 +102,10 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: cachix/install-nix-action@v17
- uses: cachix/install-nix-action@v18
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
- uses: cachix/cachix-action@v10
- uses: cachix/cachix-action@v12
if: needs.check_secrets.outputs.cachix == 'true'
with:
name: '${{ env.CACHIX_NAME }}'

2
.gitignore vendored
View file

@ -27,6 +27,8 @@ perl/Makefile.config
# /scripts/
/scripts/nix-profile.sh
/scripts/nix-profile-daemon.sh
/scripts/nix-profile.fish
/scripts/nix-profile-daemon.fish
# /src/libexpr/
/src/libexpr/lexer-tab.cc

View file

@ -1 +1 @@
2.12.0
2.14.0

View file

@ -1,17 +1,49 @@
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
index 3dbaa3fb..36a1d1f7 100644
--- a/darwin_stop_world.c
+++ b/darwin_stop_world.c
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
int nthreads = 0;
word total_size = 0;
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
+ size_t stack_limit;
if (!EXPECT(GC_thr_initialized, TRUE))
GC_thr_init();
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
}
if (altstack_lo) {
+ // When a thread goes into a coroutine, we lose its original sp until
+ // control flow returns to the thread.
+ // While in the coroutine, the sp points outside the thread stack,
+ // so we can detect this and push the entire thread stack instead,
+ // as an approximation.
+ // We assume that the coroutine has similarly added its entire stack.
+ // This could be made accurate by cooperating with the application
+ // via new functions and/or callbacks.
+ stack_limit = pthread_get_stacksize_np(p->id);
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
+ altstack_lo = altstack_hi - stack_limit;
+ }
+
total_size += altstack_hi - altstack_lo;
GC_push_all_stack(altstack_lo, altstack_hi);
}
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
index 4b2c429..1fb4c52 100644
index b5d71e62..aed7b0bf 100644
--- a/pthread_stop_world.c
+++ b/pthread_stop_world.c
@@ -673,6 +673,8 @@ GC_INNER void GC_push_all_stacks(void)
struct GC_traced_stack_sect_s *traced_stack_sect;
pthread_t self = pthread_self();
word total_size = 0;
@@ -768,6 +768,8 @@ STATIC void GC_restart_handler(int sig)
/* world is stopped. Should not fail if it isn't. */
GC_INNER void GC_push_all_stacks(void)
{
+ size_t stack_limit;
+ pthread_attr_t pattr;
if (!EXPECT(GC_thr_initialized, TRUE))
GC_thr_init();
@@ -722,6 +724,31 @@ GC_INNER void GC_push_all_stacks(void)
GC_bool found_me = FALSE;
size_t nthreads = 0;
int i;
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
hi = p->altstack + p->altstack_size;
/* FIXME: Need to scan the normal stack too, but how ? */
/* FIXME: Assume stack grows down */

View file

@ -41,8 +41,6 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
CFLAGS=
CXXFLAGS=
AC_PROG_CC
AC_PROG_CXX
AC_PROG_CPP
@ -177,7 +175,7 @@ fi
PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
# Checks for libarchive
# Look for libarchive.
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
if test "$shared" != yes; then
@ -276,6 +274,12 @@ fi
PKG_CHECK_MODULES([GTEST], [gtest_main])
# Look for rapidcheck.
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
AC_CHECK_LIB([rapidcheck], [])
# Look for nlohmann/json.
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])

View file

@ -1,7 +1,21 @@
[book]
title = "Nix Reference Manual"
[output.html]
additional-css = ["custom.css"]
additional-js = ["redirects.js"]
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
git-repository-url = "https://github.com/NixOS/nix"
[preprocessor.anchors]
renderers = ["html"]
command = "jq --from-file doc/manual/anchors.jq"
[output.linkcheck]
# no Internet during the build (in the sandbox)
follow-web-links = false
# mdbook-linkcheck does not understand [foo]{#bar} style links, resulting in
# excessive "Potential incomplete link" warnings. No other kind of warning was
# produced at the time of writing.
warning-policy = "ignore"

View file

@ -1,16 +1,20 @@
with builtins;
with import ./utils.nix;
builtinsDump:
let
showBuiltin = name:
let
inherit (builtinsDump.${name}) doc args;
in
''
<dt id="builtins-${name}">
<a href="#builtins-${name}"><code>${name} ${listArgs args}</code></a>
</dt>
<dd>
builtins:
${doc}
</dd>
'';
listArgs = args: builtins.concatStringsSep " " (map (s: "<var>${s}</var>") args);
in
with builtins; concatStringsSep "\n" (map showBuiltin (attrNames builtinsDump))
concatStrings (map
(name:
let builtin = builtins.${name}; in
"<dt id=\"builtins-${name}\"><a href=\"#builtins-${name}\"><code>${name} "
+ concatStringsSep " " (map (s: "<var>${s}</var>") builtin.args)
+ "</code></a></dt>"
+ "<dd>\n\n"
+ builtin.doc
+ "\n\n</dd>"
)
(attrNames builtins))

View file

@ -1,97 +1,115 @@
{ command }:
{ toplevel }:
with builtins;
with import ./utils.nix;
let
showCommand =
{ command, def, filename }:
''
**Warning**: This program is **experimental** and its interface is subject to change.
''
+ "# Name\n\n"
+ "`${command}` - ${def.description}\n\n"
+ "# Synopsis\n\n"
+ showSynopsis { inherit command; args = def.args; }
+ (if def.commands or {} != {}
then
let
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
listCommands = cmds:
concatStrings (map (name:
"* "
+ "[`${command} ${name}`](./${appendName filename name}.md)"
+ " - ${cmds.${name}.description}\n")
(attrNames cmds));
in
"where *subcommand* is one of the following:\n\n"
# FIXME: group by category
+ (if length categories > 1
then
concatStrings (map
(cat:
"**${toString cat.description}:**\n\n"
+ listCommands (filterAttrs (n: v: v.category == cat) def.commands)
+ "\n"
) categories)
+ "\n"
else
listCommands def.commands
+ "\n")
else "")
+ (if def ? doc
then def.doc + "\n\n"
else "")
+ (let s = showOptions def.flags; in
if s != ""
then "# Options\n\n${s}"
else "")
;
showCommand = { command, details, filename, toplevel }:
let
result = ''
> **Warning** \
> This program is **experimental** and its interface is subject to change.
# Name
`${command}` - ${details.description}
# Synopsis
${showSynopsis command details.args}
${maybeSubcommands}
${maybeDocumentation}
${maybeOptions}
'';
showSynopsis = command: args:
let
showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
arguments = concatStringsSep " " (map showArgument args);
in ''
`${command}` [*option*...] ${arguments}
'';
maybeSubcommands = if details ? commands && details.commands != {}
then ''
where *subcommand* is one of the following:
${subcommands}
''
else "";
subcommands = if length categories > 1
then listCategories
else listSubcommands details.commands;
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
listCategories = concatStrings (map showCategory categories);
showCategory = cat: ''
**${toString cat.description}:**
${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)}
'';
listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds));
showSubcommand = name: subcmd: ''
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
'';
maybeDocumentation = if details ? doc then details.doc else "";
maybeOptions = if details.flags == {} then "" else ''
# Options
${showOptions details.flags toplevel.flags}
'';
showOptions = options: commonOptions:
let
allOptions = options // commonOptions;
showCategory = cat: ''
${if cat != "" then "**${cat}:**" else ""}
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
'';
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
showOption = name: option:
let
shortName = if option ? shortName then "/ `-${option.shortName}`" else "";
labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else "";
in trim ''
- `--${name}` ${shortName} ${labels}
${option.description}
'';
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
in concatStrings (map showCategory categories);
in squash result;
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
showOptions = flags:
processCommand = { command, details, filename, toplevel }:
let
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags)));
in
concatStrings (map
(cat:
(if cat != ""
then "**${cat}:**\n\n"
else "")
+ concatStrings
(map (longName:
let
flag = flags.${longName};
in
" - `--${longName}`"
+ (if flag ? shortName then " / `-${flag.shortName}`" else "")
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
+ " \n"
+ " " + flag.description + "\n\n"
) (attrNames (filterAttrs (n: v: v.category == cat) flags))))
categories);
cmd = {
inherit command;
name = filename + ".md";
value = showCommand { inherit command details filename toplevel; };
};
subcommand = subCmd: processCommand {
command = command + " " + subCmd;
details = details.commands.${subCmd};
filename = appendName filename subCmd;
inherit toplevel;
};
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
showSynopsis =
{ command, args }:
"`${command}` [*option*...] ${concatStringsSep " "
(map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
parsedToplevel = builtins.fromJSON toplevel;
manpages = processCommand {
command = "nix";
details = parsedToplevel;
filename = "nix";
toplevel = parsedToplevel;
};
processCommand = { command, def, filename }:
[ { name = filename + ".md"; value = showCommand { inherit command def filename; }; inherit command; } ]
++ concatMap
(name: processCommand {
filename = appendName filename name;
command = command + " " + name;
def = def.commands.${name};
})
(attrNames def.commands or {});
tableOfContents = let
showEntry = page:
" - [${page.command}](command-ref/new-cli/${page.name})";
in concatStringsSep "\n" (map showEntry manpages) + "\n";
in
let
manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; };
summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages);
in
(listToAttrs manpages) // { "SUMMARY.md" = summary; }
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }

View file

@ -1,29 +1,41 @@
with builtins;
with import ./utils.nix;
let
inherit (builtins) attrNames concatStringsSep isAttrs isBool;
inherit (import ./utils.nix) concatStrings squash splitLines;
in
options:
optionsInfo:
let
showOption = name:
let
inherit (optionsInfo.${name}) description documentDefault defaultValue aliases;
result = squash ''
- <span id="conf-${name}">[`${name}`](#conf-${name})</span>
concatStrings (map
(name:
let option = options.${name}; in
" - [`${name}`](#conf-${name})"
+ "<p id=\"conf-${name}\"></p>\n\n"
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
+ (if option.documentDefault
then " **Default:** " + (
if option.value == "" || option.value == []
then "*empty*"
else if isBool option.value
then (if option.value then "`true`" else "`false`")
else
# n.b. a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is "null" in
# JSON, but that converts to "{ }" here.
(if isAttrs option.value then "`\"\"`"
else "`" + toString option.value + "`")) + "\n\n"
else " **Default:** *machine-specific*\n")
+ (if option.aliases != []
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
else "")
)
(attrNames options))
${indent " " body}
'';
# separate body to cleanly handle indentation
body = ''
${description}
**Default:** ${showDefault documentDefault defaultValue}
${showAliases aliases}
'';
showDefault = documentDefault: defaultValue:
if documentDefault then
# a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is `null` in
# JSON, but that converts to `{ }` here.
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
then "*empty*"
else if isBool defaultValue then
if defaultValue then "`true`" else "`false`"
else "`${toString defaultValue}`"
else "*machine-specific*";
showAliases = aliases:
if aliases == [] then "" else
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
indent = prefix: s:
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
in result;
in concatStrings (map showOption (attrNames optionsInfo))

View file

@ -29,19 +29,19 @@ nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -
$(d)/%.1: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=1 $^.tmp -o $@
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=1 $^.tmp -o $@
@rm $^.tmp
$(d)/%.8: $(d)/src/command-ref/%.md
@printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=8 $^.tmp -o $@
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=8 $^.tmp -o $@
@rm $^.tmp
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
@printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man -M section=5 $^.tmp -o $@
$(trace-gen) lowdown -sT man --nroff-nolinks -M section=5 $^.tmp -o $@
@rm $^.tmp
$(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
@ -50,11 +50,16 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
@rm -rf $@
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
$(trace-gen) sed -i $@.tmp/*.md -e 's^@docroot@^../..^g'
@mv $@.tmp $@
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' \
| sed -e 's^@docroot@^..^g'>> $@.tmp
@mv $@.tmp $@
$(d)/nix.json: $(bindir)/nix
@ -67,7 +72,9 @@ $(d)/conf-file.json: $(bindir)/nix
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' \
| sed -e 's^@docroot@^..^g' >> $@.tmp
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
@mv $@.tmp $@
@ -102,6 +109,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
@touch $@
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
$(trace-gen) \
set -euo pipefail; \
RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
| { grep -Fv "because fragment resolution isn't implemented" || :; }
@rm -rf $(DESTDIR)$(docdir)/manual
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
endif

View file

@ -1,330 +1,421 @@
// Redirects from old DocBook manual.
var redirects = {
"#part-advanced-topics": "advanced-topics/advanced-topics.html",
"#chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
"#chap-diff-hook": "advanced-topics/diff-hook.html",
"#check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
"#chap-distributed-builds": "advanced-topics/distributed-builds.html",
"#chap-post-build-hook": "advanced-topics/post-build-hook.html",
"#chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
"#part-command-ref": "command-ref/command-ref.html",
"#conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
"#conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
"#conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
"#conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
"#conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
"#conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
"#conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
"#conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
"#conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
"#conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
"#conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
"#conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
"#conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
"#conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
"#conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
"#conf-build-repeat": "command-ref/conf-file.html#conf-build-repeat",
"#conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
"#conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
"#conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
"#conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
"#conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
"#conf-builders": "command-ref/conf-file.html#conf-builders",
"#conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
"#conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
"#conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
"#conf-cores": "command-ref/conf-file.html#conf-cores",
"#conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
"#conf-enforce-determinism": "command-ref/conf-file.html#conf-enforce-determinism",
"#conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
"#conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
"#conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
"#conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
"#conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
"#conf-fallback": "command-ref/conf-file.html#conf-fallback",
"#conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
"#conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
"#conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
"#conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
"#conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
"#conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
"#conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
"#conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
"#conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
"#conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
"#conf-max-free": "command-ref/conf-file.html#conf-max-free",
"#conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
"#conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
"#conf-min-free": "command-ref/conf-file.html#conf-min-free",
"#conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
"#conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
"#conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
"#conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
"#conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
"#conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
"#conf-repeat": "command-ref/conf-file.html#conf-repeat",
"#conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
"#conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
"#conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
"#conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
"#conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
"#conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
"#conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
"#conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
"#conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
"#conf-substitute": "command-ref/conf-file.html#conf-substitute",
"#conf-substituters": "command-ref/conf-file.html#conf-substituters",
"#conf-system": "command-ref/conf-file.html#conf-system",
"#conf-system-features": "command-ref/conf-file.html#conf-system-features",
"#conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
"#conf-timeout": "command-ref/conf-file.html#conf-timeout",
"#conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
"#conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
"#conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
"#conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
"#conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
"#extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
"#sec-conf-file": "command-ref/conf-file.html",
"#env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
"#env-common": "command-ref/env-common.html",
"#envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
"#sec-common-env": "command-ref/env-common.html",
"#ch-files": "command-ref/files.html",
"#ch-main-commands": "command-ref/main-commands.html",
"#opt-out-link": "command-ref/nix-build.html#opt-out-link",
"#sec-nix-build": "command-ref/nix-build.html",
"#sec-nix-channel": "command-ref/nix-channel.html",
"#sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
"#sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
"#sec-nix-daemon": "command-ref/nix-daemon.html",
"#refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
"#rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
"#rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
"#rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
"#rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
"#sec-nix-env": "command-ref/nix-env.html",
"#ssec-version-comparisons": "command-ref/nix-env.html#versions",
"#sec-nix-hash": "command-ref/nix-hash.html",
"#sec-nix-instantiate": "command-ref/nix-instantiate.html",
"#sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
"#sec-nix-shell": "command-ref/nix-shell.html",
"#ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
"#nixref-queries": "command-ref/nix-store.html#queries",
"#opt-add-root": "command-ref/nix-store.html#opt-add-root",
"#refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
"#refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
"#refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
"#refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
"#refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
"#rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
"#rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
"#rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
"#rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
"#sec-nix-store": "command-ref/nix-store.html",
"#opt-I": "command-ref/opt-common.html#opt-I",
"#opt-attr": "command-ref/opt-common.html#opt-attr",
"#opt-common": "command-ref/opt-common.html",
"#opt-cores": "command-ref/opt-common.html#opt-cores",
"#opt-log-format": "command-ref/opt-common.html#opt-log-format",
"#opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
"#opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
"#opt-timeout": "command-ref/opt-common.html#opt-timeout",
"#sec-common-options": "command-ref/opt-common.html",
"#ch-utilities": "command-ref/utilities.html",
"#chap-hacking": "contributing/hacking.html",
"#adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
"#adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
"#adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
"#adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
"#adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
"#adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
"#adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
"#adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
"#adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
"#adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
"#adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
"#adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
"#fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
"#sec-advanced-attributes": "language/advanced-attributes.html",
"#builtin-abort": "language/builtins.html#builtins-abort",
"#builtin-add": "language/builtins.html#builtins-add",
"#builtin-all": "language/builtins.html#builtins-all",
"#builtin-any": "language/builtins.html#builtins-any",
"#builtin-attrNames": "language/builtins.html#builtins-attrNames",
"#builtin-attrValues": "language/builtins.html#builtins-attrValues",
"#builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
"#builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
"#builtin-bitOr": "language/builtins.html#builtins-bitOr",
"#builtin-bitXor": "language/builtins.html#builtins-bitXor",
"#builtin-builtins": "language/builtins.html#builtins-builtins",
"#builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
"#builtin-concatLists": "language/builtins.html#builtins-concatLists",
"#builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
"#builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
"#builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
"#builtin-derivation": "language/builtins.html#builtins-derivation",
"#builtin-dirOf": "language/builtins.html#builtins-dirOf",
"#builtin-div": "language/builtins.html#builtins-div",
"#builtin-elem": "language/builtins.html#builtins-elem",
"#builtin-elemAt": "language/builtins.html#builtins-elemAt",
"#builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
"#builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
"#builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
"#builtin-filterSource": "language/builtins.html#builtins-filterSource",
"#builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
"#builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
"#builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
"#builtin-genList": "language/builtins.html#builtins-genList",
"#builtin-getAttr": "language/builtins.html#builtins-getAttr",
"#builtin-getEnv": "language/builtins.html#builtins-getEnv",
"#builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
"#builtin-hashFile": "language/builtins.html#builtins-hashFile",
"#builtin-hashString": "language/builtins.html#builtins-hashString",
"#builtin-head": "language/builtins.html#builtins-head",
"#builtin-import": "language/builtins.html#builtins-import",
"#builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
"#builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
"#builtin-isBool": "language/builtins.html#builtins-isBool",
"#builtin-isFloat": "language/builtins.html#builtins-isFloat",
"#builtin-isFunction": "language/builtins.html#builtins-isFunction",
"#builtin-isInt": "language/builtins.html#builtins-isInt",
"#builtin-isList": "language/builtins.html#builtins-isList",
"#builtin-isNull": "language/builtins.html#builtins-isNull",
"#builtin-isString": "language/builtins.html#builtins-isString",
"#builtin-length": "language/builtins.html#builtins-length",
"#builtin-lessThan": "language/builtins.html#builtins-lessThan",
"#builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
"#builtin-map": "language/builtins.html#builtins-map",
"#builtin-match": "language/builtins.html#builtins-match",
"#builtin-mul": "language/builtins.html#builtins-mul",
"#builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
"#builtin-path": "language/builtins.html#builtins-path",
"#builtin-pathExists": "language/builtins.html#builtins-pathExists",
"#builtin-placeholder": "language/builtins.html#builtins-placeholder",
"#builtin-readDir": "language/builtins.html#builtins-readDir",
"#builtin-readFile": "language/builtins.html#builtins-readFile",
"#builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
"#builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
"#builtin-seq": "language/builtins.html#builtins-seq",
"#builtin-sort": "language/builtins.html#builtins-sort",
"#builtin-split": "language/builtins.html#builtins-split",
"#builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
"#builtin-stringLength": "language/builtins.html#builtins-stringLength",
"#builtin-sub": "language/builtins.html#builtins-sub",
"#builtin-substring": "language/builtins.html#builtins-substring",
"#builtin-tail": "language/builtins.html#builtins-tail",
"#builtin-throw": "language/builtins.html#builtins-throw",
"#builtin-toFile": "language/builtins.html#builtins-toFile",
"#builtin-toJSON": "language/builtins.html#builtins-toJSON",
"#builtin-toPath": "language/builtins.html#builtins-toPath",
"#builtin-toString": "language/builtins.html#builtins-toString",
"#builtin-toXML": "language/builtins.html#builtins-toXML",
"#builtin-trace": "language/builtins.html#builtins-trace",
"#builtin-tryEval": "language/builtins.html#builtins-tryEval",
"#builtin-typeOf": "language/builtins.html#builtins-typeOf",
"#ssec-builtins": "language/builtins.html",
"#attr-system": "language/derivations.html#attr-system",
"#ssec-derivation": "language/derivations.html",
"#ch-expression-language": "language/index.html",
"#sec-constructs": "language/constructs.html",
"#sect-let-language": "language/constructs.html#let-language",
"#ss-functions": "language/constructs.html#functions",
"#sec-language-operators": "language/operators.html",
"#table-operators": "language/operators.html",
"#ssec-values": "language/values.html",
"#gloss-closure": "glossary.html#gloss-closure",
"#gloss-derivation": "glossary.html#gloss-derivation",
"#gloss-deriver": "glossary.html#gloss-deriver",
"#gloss-nar": "glossary.html#gloss-nar",
"#gloss-output-path": "glossary.html#gloss-output-path",
"#gloss-profile": "glossary.html#gloss-profile",
"#gloss-reachable": "glossary.html#gloss-reachable",
"#gloss-reference": "glossary.html#gloss-reference",
"#gloss-substitute": "glossary.html#gloss-substitute",
"#gloss-user-env": "glossary.html#gloss-user-env",
"#gloss-validity": "glossary.html#gloss-validity",
"#part-glossary": "glossary.html",
"#sec-building-source": "installation/building-source.html",
"#ch-env-variables": "installation/env-variables.html",
"#sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
"#sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
"#sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
"#chap-installation": "installation/installation.html",
"#ch-installing-binary": "installation/installing-binary.html",
"#sect-macos-installation": "installation/installing-binary.html#macos-installation",
"#sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
"#sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
"#sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
"#sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
"#sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
"#sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
"#sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
"#sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
"#ch-installing-source": "installation/installing-source.html",
"#ssec-multi-user": "installation/multi-user.html",
"#ch-nix-security": "installation/nix-security.html",
"#sec-obtaining-source": "installation/obtaining-source.html",
"#sec-prerequisites-source": "installation/prerequisites-source.html",
"#sec-single-user": "installation/single-user.html",
"#ch-supported-platforms": "installation/supported-platforms.html",
"#ch-upgrading-nix": "installation/upgrading.html",
"#ch-about-nix": "introduction.html",
"#chap-introduction": "introduction.html",
"#ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
"#ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
"#sec-channels": "package-management/channels.html",
"#ssec-copy-closure": "package-management/copy-closure.html",
"#sec-garbage-collection": "package-management/garbage-collection.html",
"#ssec-gc-roots": "package-management/garbage-collector-roots.html",
"#chap-package-management": "package-management/package-management.html",
"#sec-profiles": "package-management/profiles.html",
"#ssec-s3-substituter": "package-management/s3-substituter.html",
"#ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
"#ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
"#ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
"#sec-sharing-packages": "package-management/sharing-packages.html",
"#ssec-ssh-substituter": "package-management/ssh-substituter.html",
"#chap-quick-start": "quick-start.html",
"#sec-relnotes": "release-notes/release-notes.html",
"#ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
"#ch-relnotes-0.10": "release-notes/rl-0.10.html",
"#ssec-relnotes-0.11": "release-notes/rl-0.11.html",
"#ssec-relnotes-0.12": "release-notes/rl-0.12.html",
"#ssec-relnotes-0.13": "release-notes/rl-0.13.html",
"#ssec-relnotes-0.14": "release-notes/rl-0.14.html",
"#ssec-relnotes-0.15": "release-notes/rl-0.15.html",
"#ssec-relnotes-0.16": "release-notes/rl-0.16.html",
"#ch-relnotes-0.5": "release-notes/rl-0.5.html",
"#ch-relnotes-0.6": "release-notes/rl-0.6.html",
"#ch-relnotes-0.7": "release-notes/rl-0.7.html",
"#ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
"#ch-relnotes-0.8": "release-notes/rl-0.8.html",
"#ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
"#ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
"#ch-relnotes-0.9": "release-notes/rl-0.9.html",
"#ssec-relnotes-1.0": "release-notes/rl-1.0.html",
"#ssec-relnotes-1.1": "release-notes/rl-1.1.html",
"#ssec-relnotes-1.10": "release-notes/rl-1.10.html",
"#ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
"#ssec-relnotes-1.11": "release-notes/rl-1.11.html",
"#ssec-relnotes-1.2": "release-notes/rl-1.2.html",
"#ssec-relnotes-1.3": "release-notes/rl-1.3.html",
"#ssec-relnotes-1.4": "release-notes/rl-1.4.html",
"#ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
"#ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
"#ssec-relnotes-1.5": "release-notes/rl-1.5.html",
"#ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
"#ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
"#ssec-relnotes-1.7": "release-notes/rl-1.7.html",
"#ssec-relnotes-1.8": "release-notes/rl-1.8.html",
"#ssec-relnotes-1.9": "release-notes/rl-1.9.html",
"#ssec-relnotes-2.0": "release-notes/rl-2.0.html",
"#ssec-relnotes-2.1": "release-notes/rl-2.1.html",
"#ssec-relnotes-2.2": "release-notes/rl-2.2.html",
"#ssec-relnotes-2.3": "release-notes/rl-2.3.html"
// redirect rules for anchors ensure backwards compatibility of URLs.
// this must be done on the client side, as web servers do not see the anchor part of the URL.
// redirections are declared as follows:
// each entry has as its key a path matching the requested URL path, relative to the mdBook document root.
//
// IMPORTANT: it must specify the full path with file name and suffix
//
// each entry is itself a set of key-value pairs, where
// - keys are anchors on the matched path.
// - values are redirection targets relative to the current path.
const redirects = {
"index.html": {
"part-advanced-topics": "advanced-topics/advanced-topics.html",
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
"chap-diff-hook": "advanced-topics/diff-hook.html",
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
"chap-distributed-builds": "advanced-topics/distributed-builds.html",
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
"part-command-ref": "command-ref/command-ref.html",
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
"conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
"conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
"conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
"conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
"conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
"conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
"conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
"conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
"conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
"conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
"conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
"conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
"conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
"conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
"conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
"conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
"conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
"conf-builders": "command-ref/conf-file.html#conf-builders",
"conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
"conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
"conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
"conf-cores": "command-ref/conf-file.html#conf-cores",
"conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
"conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
"conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
"conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
"conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
"conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
"conf-fallback": "command-ref/conf-file.html#conf-fallback",
"conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
"conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
"conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
"conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
"conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
"conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
"conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
"conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
"conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
"conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
"conf-max-free": "command-ref/conf-file.html#conf-max-free",
"conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
"conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
"conf-min-free": "command-ref/conf-file.html#conf-min-free",
"conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
"conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
"conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
"conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
"conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
"conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
"conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
"conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
"conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
"conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
"conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
"conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
"conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
"conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
"conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
"conf-substitute": "command-ref/conf-file.html#conf-substitute",
"conf-substituters": "command-ref/conf-file.html#conf-substituters",
"conf-system": "command-ref/conf-file.html#conf-system",
"conf-system-features": "command-ref/conf-file.html#conf-system-features",
"conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
"conf-timeout": "command-ref/conf-file.html#conf-timeout",
"conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
"conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
"conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
"conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
"conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
"extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
"sec-conf-file": "command-ref/conf-file.html",
"env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
"env-common": "command-ref/env-common.html",
"envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
"sec-common-env": "command-ref/env-common.html",
"ch-files": "command-ref/files.html",
"ch-main-commands": "command-ref/main-commands.html",
"opt-out-link": "command-ref/nix-build.html#opt-out-link",
"sec-nix-build": "command-ref/nix-build.html",
"sec-nix-channel": "command-ref/nix-channel.html",
"sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
"sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
"sec-nix-daemon": "command-ref/nix-daemon.html",
"refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
"rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
"rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
"rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
"rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
"sec-nix-env": "command-ref/nix-env.html",
"ssec-version-comparisons": "command-ref/nix-env.html#versions",
"sec-nix-hash": "command-ref/nix-hash.html",
"sec-nix-instantiate": "command-ref/nix-instantiate.html",
"sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
"sec-nix-shell": "command-ref/nix-shell.html",
"ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
"nixref-queries": "command-ref/nix-store.html#queries",
"opt-add-root": "command-ref/nix-store.html#opt-add-root",
"refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
"refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
"refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
"refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
"refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
"rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
"rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
"rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
"rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
"sec-nix-store": "command-ref/nix-store.html",
"opt-I": "command-ref/opt-common.html#opt-I",
"opt-attr": "command-ref/opt-common.html#opt-attr",
"opt-common": "command-ref/opt-common.html",
"opt-cores": "command-ref/opt-common.html#opt-cores",
"opt-log-format": "command-ref/opt-common.html#opt-log-format",
"opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
"opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
"opt-timeout": "command-ref/opt-common.html#opt-timeout",
"sec-common-options": "command-ref/opt-common.html",
"ch-utilities": "command-ref/utilities.html",
"chap-hacking": "contributing/hacking.html",
"adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
"adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
"adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
"adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
"adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
"adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
"adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
"adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
"adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
"adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
"adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
"adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
"fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
"sec-advanced-attributes": "language/advanced-attributes.html",
"builtin-abort": "language/builtins.html#builtins-abort",
"builtin-add": "language/builtins.html#builtins-add",
"builtin-all": "language/builtins.html#builtins-all",
"builtin-any": "language/builtins.html#builtins-any",
"builtin-attrNames": "language/builtins.html#builtins-attrNames",
"builtin-attrValues": "language/builtins.html#builtins-attrValues",
"builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
"builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
"builtin-bitOr": "language/builtins.html#builtins-bitOr",
"builtin-bitXor": "language/builtins.html#builtins-bitXor",
"builtin-builtins": "language/builtins.html#builtins-builtins",
"builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
"builtin-concatLists": "language/builtins.html#builtins-concatLists",
"builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
"builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
"builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
"builtin-derivation": "language/builtins.html#builtins-derivation",
"builtin-dirOf": "language/builtins.html#builtins-dirOf",
"builtin-div": "language/builtins.html#builtins-div",
"builtin-elem": "language/builtins.html#builtins-elem",
"builtin-elemAt": "language/builtins.html#builtins-elemAt",
"builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
"builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
"builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
"builtin-filterSource": "language/builtins.html#builtins-filterSource",
"builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
"builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
"builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
"builtin-genList": "language/builtins.html#builtins-genList",
"builtin-getAttr": "language/builtins.html#builtins-getAttr",
"builtin-getEnv": "language/builtins.html#builtins-getEnv",
"builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
"builtin-hashFile": "language/builtins.html#builtins-hashFile",
"builtin-hashString": "language/builtins.html#builtins-hashString",
"builtin-head": "language/builtins.html#builtins-head",
"builtin-import": "language/builtins.html#builtins-import",
"builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
"builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
"builtin-isBool": "language/builtins.html#builtins-isBool",
"builtin-isFloat": "language/builtins.html#builtins-isFloat",
"builtin-isFunction": "language/builtins.html#builtins-isFunction",
"builtin-isInt": "language/builtins.html#builtins-isInt",
"builtin-isList": "language/builtins.html#builtins-isList",
"builtin-isNull": "language/builtins.html#builtins-isNull",
"builtin-isString": "language/builtins.html#builtins-isString",
"builtin-length": "language/builtins.html#builtins-length",
"builtin-lessThan": "language/builtins.html#builtins-lessThan",
"builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
"builtin-map": "language/builtins.html#builtins-map",
"builtin-match": "language/builtins.html#builtins-match",
"builtin-mul": "language/builtins.html#builtins-mul",
"builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
"builtin-path": "language/builtins.html#builtins-path",
"builtin-pathExists": "language/builtins.html#builtins-pathExists",
"builtin-placeholder": "language/builtins.html#builtins-placeholder",
"builtin-readDir": "language/builtins.html#builtins-readDir",
"builtin-readFile": "language/builtins.html#builtins-readFile",
"builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
"builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
"builtin-seq": "language/builtins.html#builtins-seq",
"builtin-sort": "language/builtins.html#builtins-sort",
"builtin-split": "language/builtins.html#builtins-split",
"builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
"builtin-stringLength": "language/builtins.html#builtins-stringLength",
"builtin-sub": "language/builtins.html#builtins-sub",
"builtin-substring": "language/builtins.html#builtins-substring",
"builtin-tail": "language/builtins.html#builtins-tail",
"builtin-throw": "language/builtins.html#builtins-throw",
"builtin-toFile": "language/builtins.html#builtins-toFile",
"builtin-toJSON": "language/builtins.html#builtins-toJSON",
"builtin-toPath": "language/builtins.html#builtins-toPath",
"builtin-toString": "language/builtins.html#builtins-toString",
"builtin-toXML": "language/builtins.html#builtins-toXML",
"builtin-trace": "language/builtins.html#builtins-trace",
"builtin-tryEval": "language/builtins.html#builtins-tryEval",
"builtin-typeOf": "language/builtins.html#builtins-typeOf",
"ssec-builtins": "language/builtins.html",
"attr-system": "language/derivations.html#attr-system",
"ssec-derivation": "language/derivations.html",
"ch-expression-language": "language/index.html",
"sec-constructs": "language/constructs.html",
"sect-let-language": "language/constructs.html#let-language",
"ss-functions": "language/constructs.html#functions",
"sec-language-operators": "language/operators.html",
"table-operators": "language/operators.html",
"ssec-values": "language/values.html",
"gloss-closure": "glossary.html#gloss-closure",
"gloss-derivation": "glossary.html#gloss-derivation",
"gloss-deriver": "glossary.html#gloss-deriver",
"gloss-nar": "glossary.html#gloss-nar",
"gloss-output-path": "glossary.html#gloss-output-path",
"gloss-profile": "glossary.html#gloss-profile",
"gloss-reachable": "glossary.html#gloss-reachable",
"gloss-reference": "glossary.html#gloss-reference",
"gloss-substitute": "glossary.html#gloss-substitute",
"gloss-user-env": "glossary.html#gloss-user-env",
"gloss-validity": "glossary.html#gloss-validity",
"part-glossary": "glossary.html",
"sec-building-source": "installation/building-source.html",
"ch-env-variables": "installation/env-variables.html",
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
"chap-installation": "installation/installation.html",
"ch-installing-binary": "installation/installing-binary.html",
"sect-macos-installation": "installation/installing-binary.html#macos-installation",
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
"sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
"sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
"sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
"sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
"sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
"sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
"sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
"ch-installing-source": "installation/installing-source.html",
"ssec-multi-user": "installation/multi-user.html",
"ch-nix-security": "installation/nix-security.html",
"sec-obtaining-source": "installation/obtaining-source.html",
"sec-prerequisites-source": "installation/prerequisites-source.html",
"sec-single-user": "installation/single-user.html",
"ch-supported-platforms": "installation/supported-platforms.html",
"ch-upgrading-nix": "installation/upgrading.html",
"ch-about-nix": "introduction.html",
"chap-introduction": "introduction.html",
"ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
"ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
"sec-channels": "package-management/channels.html",
"ssec-copy-closure": "package-management/copy-closure.html",
"sec-garbage-collection": "package-management/garbage-collection.html",
"ssec-gc-roots": "package-management/garbage-collector-roots.html",
"chap-package-management": "package-management/package-management.html",
"sec-profiles": "package-management/profiles.html",
"ssec-s3-substituter": "package-management/s3-substituter.html",
"ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
"ssec-s3-substituter-authenticated-reads": "package-management/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
"ssec-s3-substituter-authenticated-writes": "package-management/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
"sec-sharing-packages": "package-management/sharing-packages.html",
"ssec-ssh-substituter": "package-management/ssh-substituter.html",
"chap-quick-start": "quick-start.html",
"sec-relnotes": "release-notes/release-notes.html",
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
"ch-relnotes-0.10": "release-notes/rl-0.10.html",
"ssec-relnotes-0.11": "release-notes/rl-0.11.html",
"ssec-relnotes-0.12": "release-notes/rl-0.12.html",
"ssec-relnotes-0.13": "release-notes/rl-0.13.html",
"ssec-relnotes-0.14": "release-notes/rl-0.14.html",
"ssec-relnotes-0.15": "release-notes/rl-0.15.html",
"ssec-relnotes-0.16": "release-notes/rl-0.16.html",
"ch-relnotes-0.5": "release-notes/rl-0.5.html",
"ch-relnotes-0.6": "release-notes/rl-0.6.html",
"ch-relnotes-0.7": "release-notes/rl-0.7.html",
"ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
"ch-relnotes-0.8": "release-notes/rl-0.8.html",
"ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
"ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
"ch-relnotes-0.9": "release-notes/rl-0.9.html",
"ssec-relnotes-1.0": "release-notes/rl-1.0.html",
"ssec-relnotes-1.1": "release-notes/rl-1.1.html",
"ssec-relnotes-1.10": "release-notes/rl-1.10.html",
"ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
"ssec-relnotes-1.11": "release-notes/rl-1.11.html",
"ssec-relnotes-1.2": "release-notes/rl-1.2.html",
"ssec-relnotes-1.3": "release-notes/rl-1.3.html",
"ssec-relnotes-1.4": "release-notes/rl-1.4.html",
"ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
"ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
"ssec-relnotes-1.5": "release-notes/rl-1.5.html",
"ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
"ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
"ssec-relnotes-1.7": "release-notes/rl-1.7.html",
"ssec-relnotes-1.8": "release-notes/rl-1.8.html",
"ssec-relnotes-1.9": "release-notes/rl-1.9.html",
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
"ssec-relnotes-2.3": "release-notes/rl-2.3.html"
},
"language/values.html": {
"simple-values": "#primitives",
"lists": "#list",
"strings": "#string",
"lists": "#list",
"attribute-sets": "#attribute-set"
}
};
var isRoot = (document.location.pathname.endsWith('/') || document.location.pathname.endsWith('/index.html')) && path_to_root === '';
if (isRoot && redirects[document.location.hash]) {
document.location.href = path_to_root + redirects[document.location.hash];
// the following code matches the current page's URL against the set of redirects.
//
// it is written to minimize the latency between page load and redirect.
// therefore we avoid function calls, copying data, and unnecessary loops.
// IMPORTANT: we use stateful array operations and their order matters!
//
// matching URLs is more involved than it should be:
//
// 1. `document.location.pathname` can have an arbitrary prefix.
//
// 2. `path_to_root` is set by mdBook. it consists only of `../`s and
// determines the depth of `<path>` relative to the prefix:
//
// `document.location.pathname`
// |------------------------------|
// /<prefix>/<path>/[<file>[.html]][#<anchor>]
// |----|
// `path_to_root` has same number of path segments
//
// source: https://phaiax.github.io/mdBook/format/theme/index-hbs.html#data
//
// 3. the following paths are equivalent:
//
// /foo/bar/
// /foo/bar/index.html
// /foo/bar/index
//
// 4. the following paths are also equivalent:
//
// /foo/bar/baz
// /foo/bar/baz.html
//
let segments = document.location.pathname.split('/');
let file = segments.pop();
// normalize file name
if (file === '') { file = "index.html"; }
else if (!file.endsWith('.html')) { file = file + '.html'; }
segments.push(file);
// use `path_to_root` to discern prefix from path.
const depth = path_to_root.split('/').length;
// remove segments containing prefix. the following works because
// 1. the original `document.location.pathname` is absolute,
// hence first element of `segments` is always empty.
// 2. last element of splitting `path_to_root` is also always empty.
// 3. last element of `segments` is the file name.
//
// visual example:
//
// '/foo/bar/baz.html'.split('/') -> [ '', 'foo', 'bar', 'baz.html' ]
// '../'.split('/') -> [ '..', '' ]
//
// the following operations will then result in
//
// path = 'bar/baz.html'
//
segments.splice(0, segments.length - depth);
const path = segments.join('/');
// anchor starts with the hash character (`#`),
// but our redirect declarations don't, so we strip it.
// example:
// document.location.hash -> '#foo'
// document.location.hash.substring(1) -> 'foo'
const anchor = document.location.hash.substring(1);
const redirect = redirects[path];
if (redirect) {
const target = redirect[anchor];
if (target) {
document.location.href = target;
}
}

View file

@ -29,6 +29,7 @@
- [Nix Language](language/index.md)
- [Data Types](language/values.md)
- [Language Constructs](language/constructs.md)
- [String interpolation](language/string-interpolation.md)
- [Operators](language/operators.md)
- [Derivations](language/derivations.md)
- [Advanced Attributes](language/advanced-attributes.md)
@ -59,20 +60,15 @@
@manpages@
- [Files](command-ref/files.md)
- [nix.conf](command-ref/conf-file.md)
<!--
- [Architecture](architecture/architecture.md)
- [Store](architecture/store/store.md)
- [Closure](architecture/store/store/closure.md)
- [Build system terminology](architecture/store/store/build-system-terminology.md)
- [Store Path](architecture/store/path.md)
- [File System Object](architecture/store/fso.md)
-->
- [Glossary](glossary.md)
- [Contributing](contributing/contributing.md)
- [Hacking](contributing/hacking.md)
- [CLI guideline](contributing/cli-guideline.md)
- [Release Notes](release-notes/release-notes.md)
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
- [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md)
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
- [Release 2.9 (2022-05-30)](release-notes/rl-2.9.md)

View file

@ -121,37 +121,3 @@ error:
are not valid, so checking is not possible
Run the build without `--check`, and then try with `--check` again.
# Automatic and Optionally Enforced Determinism Verification
Automatically verify every build at build time by executing the build
multiple times.
Setting `repeat` and `enforce-determinism` in your `nix.conf` permits
the automated verification of every build Nix performs.
The following configuration will run each build three times, and will
require the build to be deterministic:
enforce-determinism = true
repeat = 2
Setting `enforce-determinism` to false as in the following
configuration will run the build multiple times, execute the build
hook, but will allow the build to succeed even if it does not build
reproducibly:
enforce-determinism = false
repeat = 1
An example output of this configuration:
```console
$ nix-build ./test.nix -A unstable
this derivation will be built:
/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 1/2)...
building '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' (round 2/2)...
output '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable' of '/nix/store/ch6llwpr2h8c3jmnf3f2ghkhx59aa97f-unstable.drv' differs from '/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable.check' from previous round
/nix/store/6xg356v9gl03hpbbg8gws77n19qanh02-unstable
```

View file

@ -33,12 +33,17 @@ distribute the public key for verifying the authenticity of the paths.
example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
```
Then, add the public key and the cache URL to your `nix.conf`'s
`trusted-public-keys` and `substituters` options:
Then update [`nix.conf`](../command-ref/conf-file.md) on any machine that will access the cache.
Add the cache URL to [`substituters`](../command-ref/conf-file.md#conf-substituters) and the public key to [`trusted-public-keys`](../command-ref/conf-file.md#conf-trusted-public-keys):
substituters = https://cache.nixos.org/ s3://example-nix-cache
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= example-nix-cache-1:1/cKDz3QCCOmwcztD2eV6Coggp6rqc9DGjWv7C0G+rM=
Machines that build for the cache must sign derivations using the private key.
On those machines, add the path to the key file to the [`secret-key-files`](../command-ref/conf-file.md#conf-secret-key-files) field in their [`nix.conf`](../command-ref/conf-file.md):
secret-key-files = /etc/nix/key.private
We will restart the Nix daemon in a later step.
# Implementing the build hook
@ -52,14 +57,12 @@ set -eu
set -f # disable globbing
export IFS=' '
echo "Signing paths" $OUT_PATHS
nix store sign --key-file /etc/nix/key.private $OUT_PATHS
echo "Uploading paths" $OUT_PATHS
exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
exec nix copy --to "s3://example-nix-cache" $OUT_PATHS
```
> **Note**
>
>
> The `$OUT_PATHS` variable is a space-separated list of Nix store
> paths. In this case, we expect and want the shell to perform word
> splitting to make each output path its own argument to `nix

View file

@ -1,79 +1,115 @@
# Architecture
*(This chapter is unstable and a work in progress. Incoming links may rot.)*
This chapter describes how Nix works.
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
## Overview
Nix consists of [hierarchical layers][layer-architecture].
Nix consists of [hierarchical layers].
[hierarchical layers]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
The following [concept map] shows its main components (rectangles), the objects they operate on (rounded rectangles), and their interactions (connecting phrases):
[concept map]: https://en.m.wikipedia.org/wiki/Concept_map
```
+-----------------------------------------------------------------+
| Nix |
| [ commmand line interface ]------, |
| | | |
| evaluates | |
| | manages |
| V | |
| [ configuration language ] | |
| | | |
| +-----------------------------|-------------------V-----------+ |
| | store evaluates to | |
| | | | |
| | referenced by V builds | |
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
| | | |
| +-------------------------------------------------------------+ |
+-----------------------------------------------------------------+
.----------------.
| Nix expression |----------.
'----------------' |
| passed to
| |
+----------|-------------------|--------------------------------+
| Nix | V |
| | +-------------------------+ |
| | | commmand line interface |------. |
| | +-------------------------+ | |
| | | | |
| evaluated by calls manages |
| | | | |
| | V | |
| | +--------------------+ | |
| '-------->| language evaluator | | |
| +--------------------+ | |
| | | |
| produces | |
| | V |
| +----------------------------|------------------------------+ |
| | store | | |
| | referenced by V builds | |
| | .-------------. .------------. .--------------. | |
| | | build input |----->| build plan |----->| build result | | |
| | '-------------' '------------' '--------------' | |
| +-------------------------------------------------|---------+ |
+---------------------------------------------------|-----------+
|
represented as
|
V
.---------------.
| file |
'---------------'
```
At the top is the [command line interface](../command-ref/command-ref.md), translating from invocations of Nix executables to interactions with the underlying layers.
At the top is the [command line interface](../command-ref/command-ref.md) that drives the underlying layers.
Below that is the [Nix expression language](../expressions/expression-language.md), a [purely functional][purely-functional-programming] configuration language.
It is used to compose expressions which ultimately evaluate to self-contained *build plans*, used to derive *build results* from referenced *build inputs*.
The [Nix language](../language/index.md) evaluator transforms Nix expressions into self-contained *build plans*, which are used to derive *build results* from referenced *build inputs*.
The command line and Nix language are what users interact with most.
The command line interface and Nix expressions are what users deal with most.
> **Note**
> The Nix language itself does not have a notion of *packages* or *configurations*.
> As far as we are concerned here, the inputs and results of a build plan are just data.
Underlying these is the [Nix store](./store/store.md), a mechanism to keep track of build plans, data, and references between them.
It can also execute build plans to produce new data.
Underlying the command line interface and the Nix language evaluator is the [Nix store](../glossary.md#gloss-store), a mechanism to keep track of build plans, data, and references between them.
It can also execute build plans to produce new data, which are made available to the operating system as files.
A build plan is a series of *build tasks*.
Each build task has a special build input which is used as *build instructions*.
A build plan itself is a series of *build tasks*, together with their build inputs.
> **Important**
> A build task in Nix is called [derivation](../glossary.md#gloss-derivation).
Each build task has a special build input executed as *build instructions* in order to perform the build.
The result of a build task can be input to another build task.
The following [data flow diagram] shows a build plan for illustration.
Build inputs used as instructions to a build task are marked accordingly:
[data flow diagram]: https://en.m.wikipedia.org/wiki/Data-flow_diagram
```
+-----------------------------------------------------------------------------------------+
| store |
| ................................................. |
| : build plan : |
| : : |
| [ build input ]-----instructions-, : |
| : | : |
| : v : |
| [ build input ]----------->[ build task ]--instructions-, : |
| : | : |
| : | : |
| : v : |
| : [ build task ]----->[ build result ] |
| [ build input ]-----instructions-, ^ : |
| : | | : |
| : v | : |
| [ build input ]----------->[ build task ]---------------' : |
| : ^ : |
| : | : |
| [ build input ]------------------' : |
| : : |
| : : |
| :...............................................: |
| |
+-----------------------------------------------------------------------------------------+
+--------------------------------------------------------------------+
| build plan |
| |
| .-------------. |
| | build input |---------. |
| '-------------' | |
| instructions |
| | |
| v |
| .-------------. .----------. |
| | build input |-->( build task )-------. |
| '-------------' '----------' | |
| instructions |
| | |
| v |
| .-------------. .----------. .--------------. |
| | build input |---------. ( build task )--->| build result | |
| '-------------' | '----------' '--------------' |
| instructions ^ |
| | | |
| v | |
| .-------------. .----------. | |
| | build input |-->( build task )-------' |
| '-------------' '----------' |
| ^ |
| | |
| | |
| .-------------. | |
| | build input |---------' |
| '-------------' |
| |
+--------------------------------------------------------------------+
```
[layer-architecture]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
[purely-functional-programming]: https://en.m.wikipedia.org/wiki/Purely_functional_programming

View file

@ -1,69 +0,0 @@
# File System Object
The Nix store uses a simple file system model for the data it holds in [store objects](store.md#store-object).
Every file system object is one of the following:
- File: an executable flag, and arbitrary data for contents
- Directory: mapping of names to child file system objects
- [Symbolic link][symlink]: may point anywhere.
We call a store object's outermost file system object the *root*.
data FileSystemObject
= File { isExecutable :: Bool, contents :: Bytes }
| Directory { entries :: Map FileName FileSystemObject }
| SymLink { target :: Path }
Examples:
- a directory with contents
/nix/store/<hash>-hello-2.10
├── bin
│   └── hello
└── share
├── info
│   └── hello.info
└── man
└── man1
└── hello.1.gz
- a directory with relative symlink and other contents
/nix/store/<hash>-go-1.16.9
├── bin -> share/go/bin
├── nix-support/
└── share/
- a directory with absolute symlink
/nix/store/d3k...-nodejs
└── nix_node -> /nix/store/f20...-nodejs-10.24.
A bare file or symlink can be a root file system object.
Examples:
/nix/store/<hash>-hello-2.10.tar.gz
/nix/store/4j5...-pkg-config-wrapper-0.29.2-doc -> /nix/store/i99...-pkg-config-0.29.2-doc
Symlinks pointing outside of their own root or to a store object without a matching reference are allowed, but might not function as intended.
Examples:
- an arbitrarily symlinked file may change or not exist at all
/nix/store/<hash>-foo
└── foo -> /home/foo
- if a symlink to a store path was not automatically created by Nix, it may be invalid or get invalidated when the store object is deleted
/nix/store/<hash>-bar
└── bar -> /nix/store/abc...-foo
Nix file system objects do not support [hard links][hardlink]:
each file system object which is not the root has exactly one parent and one name.
However, as store objects are immutable, an underlying file system can use hard links for optimization.
[symlink]: https://en.m.wikipedia.org/wiki/Symbolic_link
[hardlink]: https://en.m.wikipedia.org/wiki/Hard_link

View file

@ -1,105 +0,0 @@
# Store Path
Nix implements [references](store.md#reference) to [store objects](store.md#store-object) as *store paths*.
Store paths are pairs of
- a 20-byte [digest](#digest) for identification
- a symbolic name for people to read.
Example:
- digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
- name: `firefox-33.1`
It is rendered to a file system path as the concatenation of
- [store directory](#store-directory)
- path-separator (`/`)
- [digest](#digest) rendered in a custom variant of [base-32](https://en.m.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
- hyphen (`-`)
- name
Example:
/nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
|--------| |------------------------------| |----------|
store directory digest name
## Store Directory
Every [store](./store.md) has a store directory.
If the store has a [file system representation](./store.md#files-and-processes), this directory contains the stores [file system objects](#file-system-object), which can be addressed by [store paths](#store-path).
This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
> **Note**
> The store directory defaults to `/nix/store`, but is in principle arbitrary.
It is important which store a given store object belongs to:
Files in the store object can contain store paths, and processes may read these paths.
Nix can only guarantee [referential integrity](store/closure.md) if store paths do not cross store boundaries.
Therefore one can only copy store objects to a different store if
- the source and target stores' directories match
or
- the store object in question has no references, that is, contains no store paths.
One cannot copy a store object to a store with a different store directory.
Instead, it has to be rebuilt, together with all its dependencies.
It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
# Digest
In a [store path](#store-path), the [digest][digest] is the output of a [cryptographic hash function][hash] of either all *inputs* involved in building the referenced store object or its actual *contents*.
Store objects are therefore said to be either [input-addressed](#input-addressing) or [content-addressed](#content-addressing).
> **Historical Note**
> The 20 byte restriction is because originally digests were [SHA-1][sha-1] hashes.
> Nix now uses [SHA-256][sha-256], and longer hashes are still reduced to 20 bytes for compatibility.
[digest]: https://en.m.wiktionary.org/wiki/digest#Noun
[hash]: https://en.m.wikipedia.org/wiki/Cryptographic_hash_function
[sha-1]: https://en.m.wikipedia.org/wiki/SHA-1
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
### Reference scanning
When a new store object is built, Nix scans its file contents for store paths to construct its set of references.
The special format of a store path's [digest](#digest) allows reliably detecting it among arbitrary data.
Nix uses the [closure](store.md#closure) of build inputs to derive the list of allowed store paths, to avoid false positives.
This way, scanning files captures run time dependencies without the user having to declare them explicitly.
Doing it at build time and persisting references in the store object avoids repeating this time-consuming operation.
> **Note**
> In practice, it is sometimes still necessary for users to declare certain dependencies explicitly, if they are to be preserved in the build result's closure.
This depends on the specifics of the software to build and run.
>
> For example, Java programs are compressed after compilation, which obfuscates any store paths they may refer to and prevents Nix from automatically detecting them.
## Input Addressing
Input addressing means that the digest derives from how the store object was produced, namely its build inputs and build plan.
To compute the hash of a store object one needs a deterministic serialisation, i.e., a binary string representation which only changes if the store object changes.
Nix has a custom serialisation format called Nix Archive (NAR)
Store object references of this sort can *not* be validated from the content of the store object.
Rather, a cryptographic signature has to be used to indicate that someone is vouching for the store object really being produced from a build plan with that digest.
## Content Addressing
Content addressing means that the digest derives from the store object's contents, namely its file system objects and references.
If one knows content addressing was used, one can recalculate the reference and thus verify the store object.
Content addressing is currently only used for the special cases of source files and "fixed-output derivations", where the contents of a store object are known in advance.
Content addressing of build results is still an [experimental feature subject to some restrictions](https://github.com/tweag/rfcs/blob/cas-rfc/rfcs/0062-content-addressed-paths.md).

View file

@ -1,151 +0,0 @@
# Store
A Nix store is a collection of *store objects* with references between them.
It supports operations to manipulate that collection.
The following concept map is a graphical outline of this chapter.
Arrows indicate suggested reading order.
```
,--------------[ store ]----------------,
| | |
v v v
[ store object ] [ closure ]--, [ operations ]
| | | | | |
v | | v v |
[ files and processes ] | | [ garbage collection ] |
/ \ | | |
v v | v v
[ file system object ] [ store path ] | [ derivation ]--->[ building ]
| ^ | | |
v | v v |
[ digest ]----' [ reference scanning ]<------------'
/ \
v v
[ input addressing ] [ content addressing ]
```
## Store Object
A store object can hold
- arbitrary *data*
- *references* to other store objects.
Store objects can be build inputs, build results, or build tasks.
Store objects are [immutable][immutable-object]: once created, they do not change until they are deleted.
## Reference
A store object reference is an [opaque][opaque-data-type], [unique identifier][unique-identifier]:
The only way to obtain references is by adding or building store objects.
A reference will always point to exactly one store object.
## Operations
A Nix store can *add*, *retrieve*, and *delete* store objects.
[ data ]
|
V
[ store ] ---> add ----> [ store' ]
|
V
[ reference ]
<!-- -->
[ reference ]
|
V
[ store ] ---> get
|
V
[ store object ]
<!-- -->
[ reference ]
|
V
[ store ] --> delete --> [ store' ]
It can *perform builds*, that is, create new store objects by transforming build inputs into build outputs, using instructions from the build tasks.
[ reference ]
|
V
[ store ] --> build --(maybe)--> [ store' ]
|
V
[ reference ]
As it keeps track of references, it can [garbage-collect][garbage-collection] unused store objects.
[ store ] --> collect garbage --> [ store' ]
## Files and Processes
Nix maps between its store model and the [Unix paradigm][unix-paradigm] of [files and processes][file-descriptor], by encoding immutable store objects and opaque identifiers as file system primitives: files and directories, and paths.
That allows processes to resolve references contained in files and thus access the contents of store objects.
Store objects are therefore implemented as the pair of
- a [file system object](fso.md) for data
- a set of [store paths](path.md) for references.
[unix-paradigm]: https://en.m.wikipedia.org/wiki/Everything_is_a_file
[file-descriptor]: https://en.m.wikipedia.org/wiki/File_descriptor
The following diagram shows a radical simplification of how Nix interacts with the operating system:
It uses files as build inputs, and build outputs are files again.
On the operating system, files can be run as processes, which in turn operate on files.
A build function also amounts to an operating system process (not depicted).
```
+-----------------------------------------------------------------+
| Nix |
| [ commmand line interface ]------, |
| | | |
| evaluates | |
| | manages |
| V | |
| [ configuration language ] | |
| | | |
| +-----------------------------|-------------------V-----------+ |
| | store evaluates to | |
| | | | |
| | referenced by V builds | |
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
| | ^ | | |
| +---------|----------------------------------------|----------+ |
+-----------|----------------------------------------|------------+
| |
file system object store path
| |
+-----------|----------------------------------------|------------+
| operating system +------------+ | |
| '------------ | | <-----------' |
| | file | |
| ,-- | | <-, |
| | +------------+ | |
| execute as | | read, write, execute |
| | +------------+ | |
| '-> | process | --' |
| +------------+ |
+-----------------------------------------------------------------+
```
There exist different types of stores, which all follow this model.
Examples:
- store on the local file system
- remote store accessible via SSH
- binary cache store accessible via HTTP
To make store objects accessible to processes, stores ultimately have to expose store objects through the file system.

View file

@ -1,32 +0,0 @@
# A [Rosetta stone][rosetta-stone] for build system terminology
The Nix store's design is comparable to other build systems.
Usage of terms is, for historic reasons, not entirely consistent within the Nix ecosystem, and still subject to slow change.
The following translation table points out similarities and equivalent terms, to help clarify their meaning and inform consistent use in the future.
| generic build system | Nix | [Bazel][bazel] | [Build Systems à la Carte][bsalc] | programming language |
| -------------------------------- | ---------------- | -------------------------------------------------------------------- | --------------------------------- | ------------------------ |
| data (build input, build result) | store object | [artifact][bazel-artifact] | value | value |
| build instructions | builder | ([depends on action type][bazel-actions]) | function | function |
| build task | derivation | [action][bazel-action] | `Task` | [thunk][thunk] |
| build plan | derivation graph | [action graph][bazel-action-graph], [build graph][bazel-build-graph] | `Tasks` | [call graph][call-graph] |
| build | build | build | application of `Build` | evaluation |
| persistence layer | store | [action cache][bazel-action-cache] | `Store` | heap |
All of these systems share features of [declarative programming][declarative-programming] languages, a key insight first put forward by Eelco Dolstra et al. in [Imposing a Memory Management Discipline on Software Deployment][immdsd] (2004), elaborated in his PhD thesis [The Purely Functional Software Deployment Model][phd-thesis] (2006), and further refined by Andrey Mokhov et al. in [Build Systems à la Carte][bsalc] (2018).
[rosetta-stone]: https://en.m.wikipedia.org/wiki/Rosetta_Stone
[bazel]: https://bazel.build/start/bazel-intro
[bazel-artifact]: https://bazel.build/reference/glossary#artifact
[bazel-actions]: https://docs.bazel.build/versions/main/skylark/lib/actions.html
[bazel-action]: https://bazel.build/reference/glossary#action
[bazel-action-graph]: https://bazel.build/reference/glossary#action-graph
[bazel-build-graph]: https://bazel.build/reference/glossary#build-graph
[bazel-action-cache]: https://bazel.build/reference/glossary#action-cache
[thunk]: https://en.m.wikipedia.org/wiki/Thunk
[call-graph]: https://en.m.wikipedia.org/wiki/Call_graph
[declarative-programming]: https://en.m.wikipedia.org/wiki/Declarative_programming
[immdsd]: https://edolstra.github.io/pubs/immdsd-icse2004-final.pdf
[phd-thesis]: https://edolstra.github.io/pubs/phd-thesis.pdf
[bsalc]: https://www.microsoft.com/en-us/research/uploads/prod/2018/03/build-systems.pdf

View file

@ -1,29 +0,0 @@
# Closure
Nix stores ensure [referential integrity][referential-integrity]: for each store object in the store, all the store objects it references must also be in the store.
The set of all store objects reachable by following references from a given initial set of store objects is called a *closure*.
Adding, building, copying and deleting store objects must be done in a way that preserves referential integrity:
- A newly added store object cannot have references, unless it is a build task.
- Build results must only refer to store objects in the closure of the build inputs.
Building a store object will add appropriate references, according to the build task.
- Store objects being copied must refer to objects already in the destination store.
Recursive copying must either proceed in dependency order or be atomic.
- We can only safely delete store objects which are not reachable from any reference still in use.
<!-- more details in section on garbage collection, link to it once it exists -->
[referential-integrity]: https://en.m.wikipedia.org/wiki/Referential_integrity
[garbage-collection]: https://en.m.wikipedia.org/wiki/Garbage_collection_(computer_science)
[immutable-object]: https://en.m.wikipedia.org/wiki/Immutable_object
[opaque-data-type]: https://en.m.wikipedia.org/wiki/Opaque_data_type
[unique-identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier

View file

@ -7,42 +7,11 @@ Most Nix commands interpret the following environment variables:
`nix-shell`. It can have the values `pure` or `impure`.
- [`NIX_PATH`]{#env-NIX_PATH}\
A colon-separated list of directories used to look up Nix
expressions enclosed in angle brackets (i.e., `<path>`). For
instance, the value
/home/eelco/Dev:/etc/nixos
will cause Nix to look for paths relative to `/home/eelco/Dev` and
`/etc/nixos`, in this order. It is also possible to match paths
against a prefix. For example, the value
nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos
will cause Nix to search for `<nixpkgs/path>` in
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
If a path in the Nix search path starts with `http://` or
`https://`, it is interpreted as the URL of a tarball that will be
downloaded and unpacked to a temporary location. The tarball must
consist of a single top-level directory. For example, setting
`NIX_PATH` to
nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
tells Nix to download and use the current contents of the
`master` branch in the `nixpkgs` repository.
The URLs of the tarballs from the official nixos.org channels (see
[the manual for `nix-channel`](nix-channel.md)) can be abbreviated
as `channel:<channel-name>`. For instance, the following two
values of `NIX_PATH` are equivalent:
nixpkgs=channel:nixos-21.05
nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
The Nix search path can also be extended using the `-I` option to
many Nix commands, which takes precedence over `NIX_PATH`.
A colon-separated list of directories used to look up the location of Nix
expressions using [paths](../language/values.md#type-path)
enclosed in angle brackets (i.e., `<path>`),
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
[`-I` option](./opt-common.md#opt-I).
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
Normally, the Nix store directory (typically `/nix/store`) is not

View file

@ -37,10 +37,12 @@ directory containing at least a file named `default.nix`.
`nix-build` is essentially a wrapper around
[`nix-instantiate`](nix-instantiate.md) (to translate a high-level Nix
expression to a low-level store derivation) and [`nix-store
expression to a low-level [store derivation]) and [`nix-store
--realise`](nix-store.md#operation---realise) (to build the store
derivation).
[store derivation]: ../glossary.md#gloss-store-derivation
> **Warning**
>
> The result of the build is automatically registered as a root of the
@ -53,16 +55,18 @@ All options not listed here are passed to `nix-store
--realise`, except for `--arg` and `--attr` / `-A` which are passed to
`nix-instantiate`.
- [`--no-out-link`]{#opt-no-out-link}\
- <span id="opt-no-out-link">[`--no-out-link`](#opt-no-out-link)<span>
Do not create a symlink to the output path. Note that as a result
the output does not become a root of the garbage collector, and so
might be deleted by `nix-store
--gc`.
might be deleted by `nix-store --gc`.
- <span id="opt-dry-run">[`--dry-run`](#opt-dry-run)</span>
- [`--dry-run`]{#opt-dry-run}\
Show what store paths would be built or downloaded.
- [`--out-link`]{#opt-out-link} / `-o` *outlink*\
- <span id="opt-out-link">[`--out-link`](#opt-out-link)</span> / `-o` *outlink*
Change the name of the symlink to the output path created from
`result` to *outlink*.

View file

@ -30,8 +30,8 @@ Since `nix-copy-closure` calls `ssh`, you may be asked to type in the
appropriate password or passphrase. In fact, you may be asked _twice_
because `nix-copy-closure` currently connects twice to the remote
machine, first to get the set of paths missing on the target machine,
and second to send the dump of those paths. If this bothers you, use
`ssh-agent`.
and second to send the dump of those paths. When using public key
authentication, you can avoid typing the passphrase with `ssh-agent`.
# Options
@ -47,7 +47,9 @@ and second to send the dump of those paths. If this bothers you, use
Enable compression of the SSH connection.
- `--include-outputs`\
Also copy the outputs of store derivations included in the closure.
Also copy the outputs of [store derivation]s included in the closure.
[store derivation]: ../glossary.md#gloss-store-derivation
- `--use-substitutes` / `-s`\
Attempt to download missing paths on the target machine using Nixs

View file

@ -8,6 +8,6 @@
# Description
The Nix daemon is necessary in multi-user Nix installations. It performs
build actions and other operations on the Nix store on behalf of
The Nix daemon is necessary in multi-user Nix installations. It runs
build tasks and other operations on the Nix store on behalf of
unprivileged users.

View file

@ -205,10 +205,12 @@ a number of possible ways:
unambiguous way, which is necessary if there are multiple
derivations with the same name.
- If *args* are store derivations, then these are
- If *args* are [store derivation]s, then these are
[realised](nix-store.md#operation---realise), and the resulting output paths
are installed.
[store derivation]: ../glossary.md#gloss-store-derivation
- If *args* are store paths that are not store derivations, then these
are [realised](nix-store.md#operation---realise) and installed.
@ -280,7 +282,7 @@ To copy the store path with symbolic name `gcc` from another profile:
$ nix-env -i --from-profile /nix/var/nix/profiles/foo gcc
```
To install a specific store derivation (typically created by
To install a specific [store derivation] (typically created by
`nix-instantiate`):
```console
@ -665,7 +667,7 @@ derivation is shown unless `--no-name` is specified.
Print the `system` attribute of the derivation.
- `--drv-path`\
Print the path of the store derivation.
Print the path of the [store derivation].
- `--out-path`\
Print the output path of the derivation.

View file

@ -17,13 +17,14 @@
# Description
The command `nix-instantiate` generates [store
derivations](../glossary.md) from (high-level) Nix expressions. It
evaluates the Nix expressions in each of *files* (which defaults to
The command `nix-instantiate` produces [store derivation]s from (high-level) Nix expressions.
It evaluates the Nix expressions in each of *files* (which defaults to
*./default.nix*). Each top-level expression should evaluate to a
derivation, a list of derivations, or a set of derivations. The paths
of the resulting store derivations are printed on standard output.
[store derivation]: ../glossary.md#gloss-store-derivation
If *files* is the character `-`, then a Nix expression will be read from
standard input.
@ -79,8 +80,7 @@ standard input.
# Examples
Instantiating store derivations from a Nix expression, and building them
using `nix-store`:
Instantiate [store derivation]s from a Nix expression, and build them using `nix-store`:
```console
$ nix-instantiate test.nix (instantiate)

View file

@ -22,7 +22,8 @@ This section lists the options that are common to all operations. These
options are allowed for every subcommand, though they may not always
have an effect.
- [`--add-root`]{#opt-add-root} *path*\
- <span id="opt-add-root">[`--add-root`](#opt-add-root)</span> *path*
Causes the result of a realisation (`--realise` and
`--force-realise`) to be registered as a root of the garbage
collector. *path* will be created as a symlink to the resulting
@ -65,13 +66,13 @@ The operation `--realise` essentially “builds” the specified store
paths. Realisation is a somewhat overloaded term:
- If the store path is a *derivation*, realisation ensures that the
output paths of the derivation are [valid](../glossary.md) (i.e.,
output paths of the derivation are [valid] (i.e.,
the output path and its closure exist in the file system). This
can be done in several ways. First, it is possible that the
outputs are already valid, in which case we are done
immediately. Otherwise, there may be [substitutes](../glossary.md)
immediately. Otherwise, there may be [substitutes]
that produce the outputs (e.g., by downloading them). Finally, the
outputs can be produced by performing the build action described
outputs can be produced by running the build task described
by the derivation.
- If the store path is not a derivation, realisation ensures that the
@ -81,6 +82,9 @@ paths. Realisation is a somewhat overloaded term:
produced through substitutes. If there are no (successful)
substitutes, realisation fails.
[valid]: ../glossary.md#gloss-validity
[substitutes]: ../glossary.md#gloss-substitute
The output path of each derivation is printed on standard output. (For
non-derivations argument, the argument itself is printed.)
@ -104,10 +108,6 @@ The following flags are available:
previous build, the new output path is left in
`/nix/store/name.check.`
See also the `build-repeat` configuration option, which repeats a
derivation a number of times and prevents its outputs from being
registered as “valid” in the Nix store unless they are identical.
Special exit codes:
- `100`\
@ -140,8 +140,10 @@ or.
## Examples
This operation is typically used to build store derivations produced by
[`nix-instantiate`](nix-instantiate.md):
This operation is typically used to build [store derivation]s produced by
[`nix-instantiate`](./nix-instantiate.md):
[store derivation]: ../glossary.md#gloss-store-derivation
```console
$ nix-store -r $(nix-instantiate ./test.nix)
@ -156,6 +158,12 @@ To test whether a previously-built derivation is deterministic:
$ nix-build '<nixpkgs>' -A hello --check -K
```
Use [`--read-log`](#operation---read-log) to show the stderr and stdout of a build:
```console
$ nix-store --read-log $(nix-instantiate ./test.nix)
```
# Operation `--serve`
## Synopsis
@ -290,8 +298,8 @@ error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4'
## Description
The operation `--query` displays various bits of information about the
store paths . The queries are described below. At most one query can be
The operation `--query` displays information about [store path]s.
The queries are described below. At most one query can be
specified. The default query is `--outputs`.
The paths *paths* may also be symlinks from outside of the Nix store, to
@ -301,7 +309,7 @@ symlink.
## Common query options
- `--use-output`; `-u`\
For each argument to the query that is a store derivation, apply the
For each argument to the query that is a [store derivation], apply the
query to the output path of the derivation instead.
- `--force-realise`; `-f`\
@ -311,17 +319,17 @@ symlink.
## Queries
- `--outputs`\
Prints out the [output paths](../glossary.md) of the store
Prints out the [output path]s of the store
derivations *paths*. These are the paths that will be produced when
the derivation is built.
- `--requisites`; `-R`\
Prints out the [closure](../glossary.md) of the store path *paths*.
Prints out the [closure] of the given *paths*.
This query has one option:
- `--include-outputs`
Also include the existing output paths of store derivations,
Also include the existing output paths of [store derivation]s,
and their closures.
This query can be used to implement various kinds of deployment. A
@ -333,10 +341,12 @@ symlink.
derivation and specifying the option `--include-outputs`.
- `--references`\
Prints the set of [references](../glossary.md) of the store paths
Prints the set of [references]s of the store paths
*paths*, that is, their immediate dependencies. (For *all*
dependencies, use `--requisites`.)
[reference]: ../glossary.md#gloss-reference
- `--referrers`\
Prints the set of *referrers* of the store paths *paths*, that is,
the store paths currently existing in the Nix store that refer to
@ -351,11 +361,13 @@ symlink.
in the Nix store that are dependent on *paths*.
- `--deriver`; `-d`\
Prints the [deriver](../glossary.md) of the store paths *paths*. If
Prints the [deriver] of the store paths *paths*. If
the path has no deriver (e.g., if it is a source file), or if the
deriver is not known (e.g., in the case of a binary-only
deployment), the string `unknown-deriver` is printed.
[deriver]: ../glossary.md#gloss-deriver
- `--graph`\
Prints the references graph of the store paths *paths* in the format
of the `dot` tool of AT\&T's [Graphviz
@ -375,12 +387,12 @@ symlink.
Prints the references graph of the store paths *paths* in the
[GraphML](http://graphml.graphdrawing.org/) file format. This can be
used to visualise dependency graphs. To obtain a build-time
dependency graph, apply this to a store derivation. To obtain a
dependency graph, apply this to a [store derivation]. To obtain a
runtime dependency graph, apply it to an output path.
- `--binding` *name*; `-b` *name*\
Prints the value of the attribute *name* (i.e., environment
variable) of the store derivations *paths*. It is an error for a
variable) of the [store derivation]s *paths*. It is an error for a
derivation to not have the specified attribute.
- `--hash`\

View file

@ -42,7 +42,7 @@ $ nix develop
```
To get a shell with a different compilation environment (e.g. stdenv,
gccStdenv, clangStdenv, clang11Stdenv):
gccStdenv, clangStdenv, clang11Stdenv, ccacheStdenv):
```console
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
@ -54,6 +54,9 @@ or if you have a flake-enabled nix:
$ nix develop .#clang11StdenvPackages
```
Note: you can use `ccacheStdenv` to drastically improve rebuild
time. By default, ccache keeps artifacts in `~/.cache/ccache/`.
To build Nix itself in this shell:
```console
@ -83,23 +86,93 @@ by:
$ nix develop
```
## Testing
Nix comes with three different flavors of tests: unit, functional and integration.
## Running tests
### Unit-tests
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
under `src/{library_name}/tests` using the
[googletest](https://google.github.io/googletest/) framework.
[googletest](https://google.github.io/googletest/) and
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
### Functional tests
The functional tests reside under the `tests` directory and are listed in `tests/local.mk`.
The whole testsuite can be run with `make install && make installcheck`.
Individual tests can be run with `make tests/{testName}.sh.test`.
Each test is a bash script.
The whole test suite can be run with:
```shell-session
$ make install && make installcheck
ran test tests/foo.sh... [PASS]
ran test tests/bar.sh... [PASS]
...
```
Individual tests can be run with `make`:
```shell-session
$ make tests/${testName}.sh.test
ran test tests/${testName}.sh... [PASS]
```
or without `make`:
```shell-session
$ ./mk/run-test.sh tests/${testName}.sh
ran test tests/${testName}.sh... [PASS]
```
To see the complete output, one can also run:
```shell-session
$ ./mk/debug-test.sh tests/${testName}.sh
+ foo
output from foo
+ bar
output from bar
...
```
The test script will then be traced with `set -x` and the output displayed as it happens, regardless of whether the test succeeds or fails.
#### Debugging failing functional tests
When a functional test fails, it usually does so somewhere in the middle of the script.
To figure out what's wrong, it is convenient to run the test regularly up to the failing `nix` command, and then run that command with a debugger like GDB.
For example, if the script looks like:
```bash
foo
nix blah blub
bar
```
edit it like so:
```diff
foo
-nix blah blub
+gdb --args nix blah blub
bar
```
Then, running the test with `./mk/debug-test.sh` will drop you into GDB once the script reaches that point:
```shell-session
$ ./mk/debug-test.sh tests/${testName}.sh
...
+ gdb blash blub
GNU gdb (GDB) 12.1
...
(gdb)
```
One can debug the Nix invocation in all the usual ways.
For example, enter `run` to start the Nix invocation.
### Integration tests
@ -108,3 +181,105 @@ These tests include everything that needs to interact with external services or
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
### Installer tests
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
- `x86_64-linux`
- `armv6l-linux`
- `armv7l-linux`
- `x86_64-darwin`
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
#### One-time setup
1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix).
2. At cachix.org:
- Create or log in to an account.
- Create a Cachix cache using the format `<github-username>-nix-install-tests`.
- Navigate to the new cache > Settings > Auth Tokens.
- Generate a new Cachix auth token and copy the generated value.
3. At github.com:
- Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret.
- Name the secret `CACHIX_AUTH_TOKEN`.
- Paste the copied value of the Cachix cache auth token.
#### Using the CI-generated installer for manual testing
After the CI run completes, you can check the output to extract the installer URL:
1. Click into the detailed view of the CI run.
2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them).
3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`)
4. Copy the value of `install_url`
5. To generate an install command, plug this `install_url` and your GitHub username into this template:
```console
sh <(curl -L <install_url>) --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve
```
<!-- #### Manually generating test installers
There's obviously a manual way to do this, and it's still the only way for
platforms that lack GA runners.
I did do this back in Fall 2020 (before the GA approach encouraged here). I'll
sketch what I recall in case it encourages someone to fill in detail, but: I
didn't know what I was doing at the time and had to fumble/ask around a lot--
so I don't want to uphold any of it as "right". It may have been dumb or
the _hard_ way from the getgo. Fundamentals may have changed since.
Here's the build command I used to do this on and for x86_64-darwin:
nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball"
I used the stable out-link to make it easier to script the next steps:
link=$(readlink /tmp/foo)
cp $link/*-darwin.tar.xz ~/somewheres
I've lost the last steps and am just going from memory:
From here, I think I had to extract and modify the `install` script to point
it at this tarball (which I scped to my own site, but it might make more sense
to just share them locally). I extracted this script once and then just
search/replaced in it for each new build.
The installer now supports a `--tarball-url-prefix` flag which _may_ have
solved this need?
-->
### Checking links in the manual
The build checks for broken internal links.
This happens late in the process, so `nix build` is not suitable for iterating.
To build the manual incrementally, run:
```console
make html -j $NIX_BUILD_CORES
```
In order to reflect changes to the [Makefile], clear all generated files before re-building:
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
```console
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
```
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
[URI fragments]: https://en.m.wikipedia.org/wiki/URI_fragment
#### `@docroot@` variable
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
See existing `@docroot@` logic in the [Makefile].
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.

View file

@ -1,26 +1,104 @@
# Glossary
- [derivation]{#gloss-derivation}\
A description of a build action. The result of a derivation is a
A description of a build task. The result of a derivation is a
store object. Derivations are typically specified in Nix expressions
using the [`derivation` primitive](language/derivations.md). These are
using the [`derivation` primitive](./language/derivations.md). These are
translated into low-level *store derivations* (implicitly by
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
[derivation]: #gloss-derivation
- [store derivation]{#gloss-store-derivation}\
A [derivation] represented as a `.drv` file in the [store].
It has a [store path], like any [store object].
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
See [`nix show-derivation`](./command-ref/new-cli/nix3-show-derivation.md) (experimental) for displaying the contents of store derivations.
[store derivation]: #gloss-store-derivation
- [realise]{#gloss-realise}, realisation\
Ensure a [store path] is [valid][validity].
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](./command-ref/nix-store.md#operation---realise).
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
[realise]: #gloss-realise
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
A derivation which has the
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
attribute set to `true`.
- [fixed-output derivation]{#gloss-fixed-output-derivation}\
A derivation which includes the
[`outputHash`](./language/advanced-attributes.md#adv-attr-outputHash) attribute.
- [store]{#gloss-store}\
The location in the file system where store objects live. Typically
`/nix/store`.
From the perspective of the location where Nix is
invoked, the Nix store can be referred to
as a "_local_" or a "_remote_" one:
+ A *local store* exists on the filesystem of
the machine where Nix is invoked. You can use other
local stores by passing the `--store` flag to the
`nix` command. Local stores can be used for building derivations.
+ A *remote store* exists anywhere other than the
local filesystem. One example is the `/nix/store`
directory on another machine, accessed via `ssh` or
served by the `nix-serve` Perl script.
[store]: #gloss-store
- [chroot store]{#gloss-chroot-store}\
A local store whose canonical path is anything other than `/nix/store`.
- [binary cache]{#gloss-binary-cache}\
A *binary cache* is a Nix store which uses a different format: its
metadata and signatures are kept in `.narinfo` files rather than in a
Nix database. This different format simplifies serving store objects
over the network, but cannot host builds. Examples of binary caches
include S3 buckets and the [NixOS binary
cache](https://cache.nixos.org).
- [store path]{#gloss-store-path}\
The location in the file system of a store object, i.e., an
The location of a [store object] in the file system, i.e., an
immediate child of the Nix store directory.
Example: `/nix/store/a040m110amc4h71lds2jmr8qrkj2jhxd-git-2.38.1`
[store path]: #gloss-store-path
- [store object]{#gloss-store-object}\
A file that is an immediate child of the Nix store directory. These
can be regular files, but also entire directory trees. Store objects
can be sources (objects copied from outside of the store),
derivation outputs (objects produced by running a build action), or
derivations (files describing a build action).
derivation outputs (objects produced by running a build task), or
derivations (files describing a build task).
[store object]: #gloss-store-object
- [input-addressed store object]{#gloss-input-addressed-store-object}\
A store object produced by building a
non-[content-addressed](#gloss-content-addressed-derivation),
non-[fixed-output](#gloss-fixed-output-derivation)
derivation.
- [output-addressed store object]{#gloss-output-addressed-store-object}\
A store object whose store path hashes its content. This
includes derivations, the outputs of
[content-addressed derivations](#gloss-content-addressed-derivation),
and the outputs of
[fixed-output derivations](#gloss-fixed-output-derivation).
- [substitute]{#gloss-substitute}\
A substitute is a command invocation stored in the Nix database that
@ -29,6 +107,13 @@
store object by downloading a pre-built version of the store object
from some server.
- [substituter]{#gloss-substituter}\
A *substituter* is an additional store from which Nix will
copy store objects it doesn't have. For details, see the
[`substituters` option](./command-ref/conf-file.md#conf-substituters).
[substituter]: #gloss-substituter
- [purity]{#gloss-purity}\
The assumption that equal Nix derivations when run always produce
the same output. This cannot be guaranteed in general (e.g., a
@ -71,23 +156,31 @@
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
references `R` then `R` is also in the closure of `P`.
[closure]: #gloss-closure
- [output path]{#gloss-output-path}\
A store path produced by a derivation.
A [store path] produced by a [derivation].
[output path]: #gloss-output-path
- [deriver]{#gloss-deriver}\
The deriver of an *output path* is the store
derivation that built it.
The [store derivation] that produced an [output path].
- [validity]{#gloss-validity}\
A store path is considered *valid* if it exists in the file system,
is listed in the Nix database as being valid, and if all paths in
its closure are also valid.
A store path is valid if all [store object]s in its [closure] can be read from the [store].
For a local store, this means:
- The store path leads to an existing [store object] in that [store].
- The store path is listed in the Nix database as being valid.
- All paths in the store path's [closure] are valid.
[validity]: #gloss-validity
- [user environment]{#gloss-user-env}\
An automatically generated store object that consists of a set of
symlinks to “active” applications, i.e., other store paths. These
are generated automatically by
[`nix-env`](command-ref/nix-env.md). See *profiles*.
[`nix-env`](./command-ref/nix-env.md). See *profiles*.
- [profile]{#gloss-profile}\
A symlink to the current *user environment* of a user, e.g.,
@ -98,7 +191,18 @@
store. It can contain regular files, directories and symbolic
links. NARs are generated and unpacked using `nix-store --dump`
and `nix-store --restore`.
- [`∅`]{#gloss-emtpy-set}\
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
- [`ε`]{#gloss-epsilon}\
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
- [string interpolation]{#gloss-string-interpolation}\
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
See [String interpolation](./language/string-interpolation.md) for details.
[string]: ./language/values.md#type-string
[path]: ./language/values.md#type-path
[attribute name]: ./language/values.md#attribute-set

View file

@ -88,19 +88,51 @@ extension. The installer will also create `/etc/profile.d/nix.sh`.
### Linux
```console
sudo rm -rf /etc/profile/nix.sh /etc/nix /nix ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
If you are on Linux with systemd:
# If you are on Linux with systemd, you will need to run:
sudo systemctl stop nix-daemon.socket
sudo systemctl stop nix-daemon.service
sudo systemctl disable nix-daemon.socket
sudo systemctl disable nix-daemon.service
sudo systemctl daemon-reload
1. Remove the Nix daemon service:
```console
sudo systemctl stop nix-daemon.service
sudo systemctl disable nix-daemon.socket nix-daemon.service
sudo systemctl daemon-reload
```
1. Remove systemd service files:
```console
sudo rm /etc/systemd/system/nix-daemon.service /etc/systemd/system/nix-daemon.socket
```
1. The installer script uses systemd-tmpfiles to create the socket directory.
You may also want to remove the configuration for that:
```console
sudo rm /etc/tmpfiles.d/nix-daemon.conf
```
Remove files created by Nix:
```console
sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defexpr ~root/.nix-channels ~/.nix-profile ~/.nix-defexpr ~/.nix-channels
```
There may also be references to Nix in `/etc/profile`, `/etc/bashrc`,
and `/etc/zshrc` which you may remove.
Remove build users and their group:
```console
for i in $(seq 1 32); do
sudo userdel nixbld$i
done
sudo groupdel nixbld
```
There may also be references to Nix in
- `/etc/profile`
- `/etc/bashrc`
- `/etc/zshrc`
which you may remove.
### macOS

View file

@ -104,7 +104,7 @@ a currently running program.
Packages are built from _Nix expressions_, which is a simple
functional language. A Nix expression describes everything that goes
into a package build action (a “derivation”): other packages, sources,
into a package build task (a “derivation”): other packages, sources,
the build script, environment variables for the build script, etc.
Nix tries very hard to ensure that Nix expressions are
_deterministic_: building a Nix expression twice should yield the same

View file

@ -1,7 +1,7 @@
# Derivations
The most important built-in function is `derivation`, which is used to
describe a single derivation (a build action). It takes as input a set,
describe a single derivation (a build task). It takes as input a set,
the attributes of which specify the inputs of the build.
- There must be an attribute named [`system`]{#attr-system} whose value must be a

View file

@ -31,3 +31,551 @@ The Nix language is
Type errors are only detected when expressions are evaluated.
# Overview
This is an incomplete overview of language features, by example.
<table>
<tr>
<th>
Example
</th>
<th>
Description
</th>
</tr>
<tr>
<td>
*Basic values*
</td>
<td>
</td>
</tr>
<tr>
<td>
`"hello world"`
</td>
<td>
A string
</td>
</tr>
<tr>
<td>
```
''
multi
line
string
''
```
</td>
<td>
A multi-line string. Strips common prefixed whitespace. Evaluates to `"multi\n line\n string"`.
</td>
</tr>
<tr>
<td>
`"hello ${ { a = "world" }.a }"`
`"1 2 ${toString 3}"`
`"${pkgs.bash}/bin/sh"`
</td>
<td>
String interpolation (expands to `"hello world"`, `"1 2 3"`, `"/nix/store/<hash>-bash-<version>/bin/sh"`)
</td>
</tr>
<tr>
<td>
`true`, `false`
</td>
<td>
Booleans
</td>
</tr>
<tr>
<td>
`null`
</td>
<td>
Null value
</td>
</tr>
<tr>
<td>
`123`
</td>
<td>
An integer
</td>
</tr>
<tr>
<td>
`3.141`
</td>
<td>
A floating point number
</td>
</tr>
<tr>
<td>
`/etc`
</td>
<td>
An absolute path
</td>
</tr>
<tr>
<td>
`./foo.png`
</td>
<td>
A path relative to the file containing this Nix expression
</td>
</tr>
<tr>
<td>
`~/.config`
</td>
<td>
A home path. Evaluates to the `"<user's home directory>/.config"`.
</td>
</tr>
<tr>
<td>
`<nixpkgs>`
</td>
<td>
Search path for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
</td>
</tr>
<tr>
<td>
*Compound values*
</td>
<td>
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }`
</td>
<td>
A set with attributes named `x` and `y`
</td>
</tr>
<tr>
<td>
`{ foo.bar = 1; }`
</td>
<td>
A nested set, equivalent to `{ foo = { bar = 1; }; }`
</td>
</tr>
<tr>
<td>
`rec { x = "foo"; y = x + "bar"; }`
</td>
<td>
A recursive set, equivalent to `{ x = "foo"; y = "foobar"; }`
</td>
</tr>
<tr>
<td>
`[ "foo" "bar" "baz" ]`
`[ 1 2 3 ]`
`[ (f 1) { a = 1; b = 2; } [ "c" ] ]`
</td>
<td>
Lists with three elements.
</td>
</tr>
<tr>
<td>
*Operators*
</td>
<td>
</td>
</tr>
<tr>
<td>
`"foo" + "bar"`
</td>
<td>
String concatenation
</td>
</tr>
<tr>
<td>
`1 + 2`
</td>
<td>
Integer addition
</td>
</tr>
<tr>
<td>
`"foo" == "f" + "oo"`
</td>
<td>
Equality test (evaluates to `true`)
</td>
</tr>
<tr>
<td>
`"foo" != "bar"`
</td>
<td>
Inequality test (evaluates to `true`)
</td>
</tr>
<tr>
<td>
`!true`
</td>
<td>
Boolean negation
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }.x`
</td>
<td>
Attribute selection (evaluates to `1`)
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; }.z or 3`
</td>
<td>
Attribute selection with default (evaluates to `3`)
</td>
</tr>
<tr>
<td>
`{ x = 1; y = 2; } // { z = 3; }`
</td>
<td>
Merge two sets (attributes in the right-hand set taking precedence)
</td>
</tr>
<tr>
<td>
*Control structures*
</td>
<td>
</td>
</tr>
<tr>
<td>
`if 1 + 1 == 2 then "yes!" else "no!"`
</td>
<td>
Conditional expression
</td>
</tr>
<tr>
<td>
`assert 1 + 1 == 2; "yes!"`
</td>
<td>
Assertion check (evaluates to `"yes!"`).
</td>
</tr>
<tr>
<td>
`let x = "foo"; y = "bar"; in x + y`
</td>
<td>
Variable definition
</td>
</tr>
<tr>
<td>
`with builtins; head [ 1 2 3 ]`
</td>
<td>
Add all attributes from the given set to the scope (evaluates to `1`)
</td>
</tr>
<tr>
<td>
*Functions (lambdas)*
</td>
<td>
</td>
</tr>
<tr>
<td>
`x: x + 1`
</td>
<td>
A function that expects an integer and returns it increased by 1
</td>
</tr>
<tr>
<td>
`x: y: x + y`
</td>
<td>
Curried function, equivalent to `x: (y: x + y)`. Can be used like a function that takes two arguments and returns their sum.
</td>
</tr>
<tr>
<td>
`(x: x + 1) 100`
</td>
<td>
A function call (evaluates to 101)
</td>
</tr>
<tr>
<td>
`let inc = x: x + 1; in inc (inc (inc 100))`
</td>
<td>
A function bound to a variable and subsequently called by name (evaluates to 103)
</td>
</tr>
<tr>
<td>
`{ x, y }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y` and concatenates them
</td>
</tr>
<tr>
<td>
`{ x, y ? "bar" }: x + y`
</td>
<td>
A function that expects a set with required attribute `x` and optional `y`, using `"bar"` as default value for `y`
</td>
</tr>
<tr>
<td>
`{ x, y, ... }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y` and ignores any other attributes
</td>
</tr>
<tr>
<td>
`{ x, y } @ args: x + y`
`args @ { x, y }: x + y`
</td>
<td>
A function that expects a set with required attributes `x` and `y`, and binds the whole set to `args`
</td>
</tr>
<tr>
<td>
*Built-in functions*
</td>
<td>
</td>
</tr>
<tr>
<td>
`import ./foo.nix`
</td>
<td>
Load and return Nix expression in given file
</td>
</tr>
<tr>
<td>
`map (x: x + x) [ 1 2 3 ]`
</td>
<td>
Apply a function to every element of a list (evaluates to `[ 2 4 6 ]`)
</td>
</tr>
</table>

View file

@ -1,28 +1,167 @@
# Operators
The table below lists the operators in the Nix language, in
order of precedence (from strongest to weakest binding).
| Name | Syntax | Associativity | Precedence |
|----------------------------------------|--------------------------------------------|---------------|------------|
| [Attribute selection] | *attrset* `.` *attrpath* \[ `or` *expr* \] | none | 1 |
| Function application | *func* *expr* | left | 2 |
| [Arithmetic negation][arithmetic] | `-` *number* | none | 3 |
| [Has attribute] | *attrset* `?` *attrpath* | none | 4 |
| List concatenation | *list* `++` *list* | right | 5 |
| [Multiplication][arithmetic] | *number* `*` *number* | left | 6 |
| [Division][arithmetic] | *number* `/` *number* | left | 6 |
| [Subtraction][arithmetic] | *number* `-` *number* | left | 7 |
| [Addition][arithmetic] | *number* `+` *number* | left | 7 |
| [String concatenation] | *string* `+` *string* | left | 7 |
| [Path concatenation] | *path* `+` *path* | left | 7 |
| [Path and string concatenation] | *path* `+` *string* | left | 7 |
| [String and path concatenation] | *string* `+` *path* | left | 7 |
| Logical negation (`NOT`) | `!` *bool* | none | 8 |
| [Update] | *attrset* `//` *attrset* | right | 9 |
| [Less than][Comparison] | *expr* `<` *expr* | none | 10 |
| [Less than or equal to][Comparison] | *expr* `<=` *expr* | none | 10 |
| [Greater than][Comparison] | *expr* `>` *expr* | none | 10 |
| [Greater than or equal to][Comparison] | *expr* `>=` *expr* | none | 10 |
| [Equality] | *expr* `==` *expr* | none | 11 |
| Inequality | *expr* `!=` *expr* | none | 11 |
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
| Logical disjunction (`OR`) | *bool* `\|\|` *bool* | left | 13 |
| [Logical implication] | *bool* `->` *bool* | none | 14 |
[string]: ./values.md#type-string
[path]: ./values.md#type-path
[number]: ./values.md#type-number
[list]: ./values.md#list
[attribute set]: ./values.md#attribute-set
## Attribute selection
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
If the attribute doesnt exist, return *value* if provided, otherwise abort evaluation.
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
An attribute path is a dot-separated list of attribute names.
An attribute name can be an identifier or a string.
> *attrpath* = *name* [ `.` *name* ]...
> *name* = *identifier* | *string*
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
[Attribute selection]: #attribute-selection
## Has attribute
> *attrset* `?` *attrpath*
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
The result is a [Boolean] value.
[Boolean]: ./values.md#type-boolean
[Has attribute]: #has-attribute
## Arithmetic
Numbers are type-compatible:
Pure integer operations will always return integers, whereas any operation involving at least one floating point number return a floating point number.
See also [Comparison] and [Equality].
The `+` operator is overloaded to also work on strings and paths.
[arithmetic]: #arithmetic
## String concatenation
> *string* `+` *string*
Concatenate two [string]s and merge their string contexts.
[String concatenation]: #string-concatenation
## Path concatenation
> *path* `+` *path*
Concatenate two [path]s.
The result is a path.
[Path concatenation]: #path-concatenation
## Path and string concatenation
> *path* + *string*
Concatenate *[path]* with *[string]*.
The result is a path.
> **Note**
>
> The string must not have a string context that refers to a [store path].
[Path and string concatenation]: #path-and-string-concatenation
## String and path concatenation
> *string* + *path*
Concatenate *[string]* with *[path]*.
The result is a string.
> **Important**
>
> The file or directory at *path* must exist and is copied to the [store].
> The path appears in the result as the corresponding [store path].
[store path]: ../glossary.md#gloss-store-path
[store]: ../glossary.md#gloss-store
[Path and string concatenation]: #path-and-string-concatenation
## Update
> *attrset1* // *attrset2*
Update [attribute set] *attrset1* with names and values from *attrset2*.
The returned attribute set will have of all the attributes in *attrset1* and *attrset2*.
If an attribute name is present in both, the attribute value from the latter is taken.
[Update]: #update
## Comparison
Comparison is
- [arithmetic] for [number]s
- lexicographic for [string]s and [path]s
- item-wise lexicographic for [list]s:
elements at the same index in both lists are compared according to their type and skipped if they are equal.
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
| comparison | implementation |
|--------------|-----------------------|
| *a* `<=` *b* | `! (` *b* `<` *a* `)` |
| *a* `>` *b* | *b* `<` *a* |
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
[Comparison]: #comparison-operators
## Equality
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
- Comparison of [function]s always returns `false`.
- Numbers are type-compatible, see [arithmetic] operators.
- Floating point numbers only differ up to a limited precision.
[function]: ./constructs.md#functions
[Equality]: #equality
## Logical implication
Equivalent to `!`*b1* `||` *b2*.
[Logical implication]: #logical-implication
| Name | Syntax | Associativity | Description | Precedence |
| ------------------------ | ----------------------------------- | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- |
| Select | *e* `.` *attrpath* \[ `or` *def* \] | none | Select attribute denoted by the attribute path *attrpath* from set *e*. (An attribute path is a dot-separated list of attribute names.) If the attribute doesnt exist, return *def* if provided, otherwise abort evaluation. | 1 |
| Application | *e1* *e2* | left | Call function *e1* with argument *e2*. | 2 |
| Arithmetic Negation | `-` *e* | none | Arithmetic negation. | 3 |
| Has Attribute | *e* `?` *attrpath* | none | Test whether set *e* contains the attribute denoted by *attrpath*; return `true` or `false`. | 4 |
| List Concatenation | *e1* `++` *e2* | right | List concatenation. | 5 |
| Multiplication | *e1* `*` *e2*, | left | Arithmetic multiplication. | 6 |
| Division | *e1* `/` *e2* | left | Arithmetic division. | 6 |
| Addition | *e1* `+` *e2* | left | Arithmetic addition. | 7 |
| Subtraction | *e1* `-` *e2* | left | Arithmetic subtraction. | 7 |
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
| Not | `!` *e* | none | Boolean negation. | 8 |
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
| Logical OR | *e1* <code>&#124;&#124;</code> *e2* | left | Logical OR. | 13 |
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to <code>!e1 &#124;&#124; e2</code>). | 14 |

View file

@ -0,0 +1,82 @@
# String interpolation
String interpolation is a language feature where a [string], [path], or [attribute name] can contain expressions enclosed in `${ }` (dollar-sign with curly brackets).
Such a string is an *interpolated string*, and an expression inside is an *interpolated expression*.
Interpolated expressions must evaluate to one of the following:
- a [string]
- a [path]
- a [derivation]
[string]: ./values.md#type-string
[path]: ./values.md#type-path
[attribute name]: ./values.md#attribute-set
[derivation]: ../glossary.md#gloss-derivation
## Examples
### String
Rather than writing
```nix
"--with-freetype2-library=" + freetype + "/lib"
```
(where `freetype` is a [derivation]), you can instead write
```nix
"--with-freetype2-library=${freetype}/lib"
```
The latter is automatically translated to the former.
A more complicated example (from the Nix expression for [Qt](http://www.trolltech.com/products/qt)):
```nix
configureFlags = "
-system-zlib -system-libpng -system-libjpeg
${if openglSupport then "-dlopen-opengl
-L${mesa}/lib -I${mesa}/include
-L${libXmu}/lib -I${libXmu}/include" else ""}
${if threadSupport then "-thread" else "-no-thread"}
";
```
Note that Nix expressions and strings can be arbitrarily nested;
in this case the outer string contains various interpolated expressions that themselves contain strings (e.g., `"-thread"`), some of which in turn contain interpolated expressions (e.g., `${mesa}`).
### Path
Rather than writing
```nix
./. + "/" + foo + "-" + bar + ".nix"
```
or
```nix
./. + "/${foo}-${bar}.nix"
```
you can instead write
```nix
./${foo}-${bar}.nix
```
### Attribute name
Attribute names can be created dynamically with string interpolation:
```nix
let name = "foo"; in
{
${name} = "bar";
}
```
{ foo = "bar"; }

View file

@ -13,41 +13,9 @@
returns and tabs can be written as `\n`, `\r` and `\t`,
respectively.
You can include the result of an expression into a string by
enclosing it in `${...}`, a feature known as *antiquotation*. The
enclosed expression must evaluate to something that can be coerced
into a string (meaning that it must be a string, a path, or a
derivation). For instance, rather than writing
You can include the results of other expressions into a string by enclosing them in `${ }`, a feature known as [string interpolation].
```nix
"--with-freetype2-library=" + freetype + "/lib"
```
(where `freetype` is a derivation), you can instead write the more
natural
```nix
"--with-freetype2-library=${freetype}/lib"
```
The latter is automatically translated to the former. A more
complicated example (from the Nix expression for
[Qt](http://www.trolltech.com/products/qt)):
```nix
configureFlags = "
-system-zlib -system-libpng -system-libjpeg
${if openglSupport then "-dlopen-opengl
-L${mesa}/lib -I${mesa}/include
-L${libXmu}/lib -I${libXmu}/include" else ""}
${if threadSupport then "-thread" else "-no-thread"}
";
```
Note that Nix expressions and strings can be arbitrarily nested; in
this case the outer string contains various antiquotations that
themselves contain strings (e.g., `"-thread"`), some of which in
turn contain expressions (e.g., `${mesa}`).
[string interpolation]: ./string-interpolation.md
The second way to write string literals is as an *indented string*,
which is enclosed between pairs of *double single-quotes*, like so:
@ -75,7 +43,7 @@
Note that the whitespace and newline following the opening `''` is
ignored if there is no non-whitespace text on the initial line.
Antiquotation (`${expr}`) is supported in indented strings.
Indented strings support [string interpolation].
Since `${` and `''` have special meaning in indented strings, you
need a way to quote them. `$` can be escaped by prefixing it with
@ -117,9 +85,10 @@
Numbers, which can be *integers* (like `123`) or *floating point*
(like `123.43` or `.27e13`).
Numbers are type-compatible: pure integer operations will always
return integers, whereas any operation involving at least one
floating point number will have a floating point number as a result.
See [arithmetic] and [comparison] operators for semantics.
[arithmetic]: ./operators.md#arithmetic
[comparison]: ./operators.md#comparison
- <a id="type-path" href="#type-path">Path</a>
@ -143,12 +112,23 @@
environment variable `NIX_PATH` will be searched for the given file
or directory name.
Antiquotation is supported in any paths except those in angle brackets.
`./${foo}-${bar}.nix` is a more convenient way of writing
`./. + "/" + foo + "-" + bar + ".nix"` or `./. + "/${foo}-${bar}.nix"`. At
least one slash must appear *before* any antiquotations for this to be
recognized as a path. `a.${foo}/b.${bar}` is a syntactically valid division
operation. `./a.${foo}/b.${bar}` is a path.
When an [interpolated string][string interpolation] evaluates to a path, the path is first copied into the Nix store and the resulting string is the [store path] of the newly created [store object].
[store path]: ../glossary.md#gloss-store-path
[store object]: ../glossary.md#gloss-store-object
For instance, evaluating `"${./foo.txt}"` will cause `foo.txt` in the current directory to be copied into the Nix store and result in the string `"/nix/store/<hash>-foo.txt"`.
Note that the Nix language assumes that all input files will remain _unchanged_ while evaluating a Nix expression.
For example, assume you used a file path in an interpolated string during a `nix repl` session.
Later in the same session, after having changed the file contents, evaluating the interpolated string with the file path again might not return a new store path, since Nix might not re-read the file contents.
Paths themselves, except those in angle brackets (`< >`), support [string interpolation].
At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path.
`a.${foo}/b.${bar}` is a syntactically valid division operation.
`./a.${foo}/b.${bar}` is a path.
- <a id="type-boolean" href="#type-boolean">Boolean</a>
@ -221,23 +201,33 @@ will evaluate to `"Xyzzy"` because there is no `c` attribute in the set.
You can use arbitrary double-quoted strings as attribute names:
```nix
{ "foo ${bar}" = 123; "nix-1.0" = 456; }."foo ${bar}"
{ "$!@#?" = 123; }."$!@#?"
```
This will evaluate to `123` (Assuming `bar` is antiquotable). In the
case where an attribute name is just a single antiquotation, the quotes
can be dropped:
```nix
{ foo = 123; }.${bar} or 456
let bar = "bar";
{ "foo ${bar}" = 123; }."foo ${bar}"
```
This will evaluate to `123` if `bar` evaluates to `"foo"` when coerced
to a string and `456` otherwise (again assuming `bar` is antiquotable).
Both will evaluate to `123`.
Attribute names support [string interpolation]:
```nix
let bar = "foo"; in
{ foo = 123; }.${bar}
```
```nix
let bar = "foo"; in
{ ${bar} = 123; }.foo
```
Both will evaluate to `123`.
In the special case where an attribute name inside of a set declaration
evaluates to `null` (which is normally an error, as `null` is not
antiquotable), that attribute is simply not added to the set:
evaluates to `null` (which is normally an error, as `null` cannot be coerced to
a string), that attribute is simply not added to the set:
```nix
{ ${if foo then "bar" else null} = true; }

View file

@ -32,13 +32,13 @@ which should print something like:
Priority: 30
On the client side, you can tell Nix to use your binary cache using
`--option extra-binary-caches`, e.g.:
`--substituters`, e.g.:
```console
$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/
```
The option `extra-binary-caches` tells Nix to use this binary cache in
The option `substituters` tells Nix to use this binary cache in
addition to your default caches, such as <https://cache.nixos.org>.
Thus, for any path in the closure of Firefox, Nix will first check if
the path is available on the server `avalon` or another binary caches.
@ -47,4 +47,4 @@ If not, it will fall back to building from source.
You can also tell Nix to always use your binary cache by adding a line
to the `nix.conf` configuration file like this:
binary-caches = http://avalon:8080/ https://cache.nixos.org/
substituters = http://avalon:8080/ https://cache.nixos.org/

View file

@ -0,0 +1,43 @@
# Release 2.12 (2022-12-06)
* On Linux, Nix can now run builds in a user namespace where they run
as root (UID 0) and have 65,536 UIDs available.
<!-- FIXME: move this to its own section about system features -->
This is primarily useful for running containers such as `systemd-nspawn`
inside a Nix build. For an example, see [`tests/systemd-nspawn/nix`][nspawn].
[nspawn]: https://github.com/NixOS/nix/blob/67bcb99700a0da1395fa063d7c6586740b304598/tests/systemd-nspawn.nix.
A build can enable this by setting the derivation attribute:
```
requiredSystemFeatures = [ "uid-range" ];
```
The `uid-range` [system feature] requires the [`auto-allocate-uids`]
setting to be enabled.
[system feature]: ../command-ref/conf-file.md#conf-system-features
* Nix can now automatically pick UIDs for builds, removing the need to
create `nixbld*` user accounts. See [`auto-allocate-uids`].
[`auto-allocate-uids`]: ../command-ref/conf-file.md#conf-auto-allocate-uids
* On Linux, Nix has experimental support for running builds inside a
cgroup. See
[`use-cgroups`](../command-ref/conf-file.md#conf-use-cgroups).
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
defaults to `false`. If it is set to `true`, the `hash` and `sha256`
arguments will be ignored and the resulting derivation will have
`__impure` set to `true`, making it an impure derivation.
* If `builtins.readFile` is called on a file with context, then only
the parts of the context that appear in the content of the file are
retained. This avoids a lot of spurious errors where strings end up
having a context just because they are read from a store path
([#7260](https://github.com/NixOS/nix/pull/7260)).
* `nix build --json` now prints some statistics about top-level
derivations, such as CPU statistics when cgroups are enabled.

View file

@ -0,0 +1,44 @@
# Release 2.13 (2023-01-17)
* The `repeat` and `enforce-determinism` options have been removed
since they had been broken under many circumstances for a long time.
* You can now use [flake references] in the [old command line interface], e.g.
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
[old command line interface]: ../command-ref/main-commands.md
```shell-session
# nix-build flake:nixpkgs -A hello
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
'<nixpkgs>' -A hello
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
```
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
Historical release notes were not changed.
* Error traces have been reworked to provide detailed explanations and more
accurate error locations. A short excerpt of the trace is now shown by
default when an error occurs.
* Allow explicitly selecting outputs in a store derivation installable, just like we can do with other sorts of installables.
For example,
```shell-session
# nix build /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev
```
now works just as
```shell-session
# nix build nixpkgs#glibc^dev
```
does already.
* On Linux, `nix develop` now sets the
[*personality*](https://man7.org/linux/man-pages/man2/personality.2.html)
for the development shell in the same way as the actual build of the
derivation. This makes shells for `i686-linux` derivations work
correctly on `x86_64-linux`.
* You can now disable the global flake registry by setting the `flake-registry`
configuration option to an empty string. The same can be achieved at runtime with
`--flake-registry ""`.

View file

@ -1,2 +1,10 @@
# Release X.Y (202?-??-??)
* A new function `builtins.readFileType` is available. It is similar to
`builtins.readDir` but acts on a single file or directory.
* The `builtins.readDir` function has been optimized when encountering not-yet-known
file types from POSIX's `readdir`. In such cases the type of each file is/was
discovered by making multiple syscalls. This change makes these operations
lazy such that these lookups will only be performed if the attribute is used.
This optimization affects a minority of filesystems and operating systems.

View file

@ -5,6 +5,32 @@ rec {
concatStrings = concatStringsSep "";
replaceStringsRec = from: to: string:
# recursively replace occurrences of `from` with `to` within `string`
# example:
# replaceStringRec "--" "-" "hello-----world"
# => "hello-world"
let
replaced = replaceStrings [ from ] [ to ] string;
in
if replaced == string then string else replaceStringsRec from to replaced;
squash = replaceStringsRec "\n\n\n" "\n\n";
trim = string:
# trim trailing spaces and squash non-leading spaces
let
trimLine = line:
let
# separate leading spaces from the rest
parts = split "(^ *)" line;
spaces = head (elemAt parts 1);
rest = elemAt parts 2;
# drop trailing spaces
body = head (split " *$" rest);
in spaces + replaceStringsRec " " " " body;
in concatStringsSep "\n" (map trimLine (splitLines string));
# FIXME: O(n^2)
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];

View file

@ -33,9 +33,20 @@ let
root = {
uid = 0;
shell = "/bin/bash";
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root";
gid = 0;
groups = [ "root" ];
description = "System administrator";
};
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
};
} // lib.listToAttrs (
@ -57,6 +68,7 @@ let
groups = {
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
};
userToPasswd = (

View file

@ -18,16 +18,16 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1657693803,
"narHash": "sha256-G++2CJ9u0E7NNTAi9n5G8TdDmGJXcIjkJ3NF8cetQB8=",
"lastModified": 1670461440,
"narHash": "sha256-jy1LB8HOMKGJEGXgzFRLDU1CBGL0/LlkolgnqIsF0D8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "365e1b3a859281cf11b94f87231adeabbdd878a2",
"rev": "04a75b2eecc0acf6239acf9dd04485ff8d14f425",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-22.05-small",
"ref": "nixos-22.11-small",
"repo": "nixpkgs",
"type": "github"
}

View file

@ -1,7 +1,7 @@
{
description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.05-small";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11-small";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
@ -9,21 +9,21 @@
let
version = builtins.readFile ./.version + versionSuffix;
officialRelease = false;
version = nixpkgs.lib.fileContents ./.version + versionSuffix;
versionSuffix =
if officialRelease
then ""
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
officialRelease = false;
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
linuxSystems = linux64BitSystems ++ [ "i686-linux" ];
systems = linuxSystems ++ [ "x86_64-darwin" "aarch64-darwin" ];
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
forAllSystemsAndStdenvs = f: forAllSystems (system:
@ -82,7 +82,9 @@
});
configureFlags =
lib.optionals stdenv.isLinux [
[
"CXXFLAGS=-I${lib.getDev rapidcheck}/extras/gtest/include"
] ++ lib.optionals stdenv.isLinux [
"--with-boost=${boost}/lib"
"--with-sandbox-shell=${sh}/bin/busybox"
]
@ -96,6 +98,7 @@
buildPackages.flex
(lib.getBin buildPackages.lowdown-nix)
buildPackages.mdbook
buildPackages.mdbook-linkcheck
buildPackages.autoconf-archive
buildPackages.autoreconfHook
buildPackages.pkg-config
@ -108,13 +111,14 @@
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
buildDeps =
[ (curl.override { patchNetrcRegression = true; })
[ curl
bzip2 xz brotli editline
openssl sqlite
libarchive
boost
lowdown-nix
gtest
rapidcheck
]
++ lib.optionals stdenv.isLinux [libseccomp]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
@ -133,7 +137,8 @@
patches = (o.patches or []) ++ [
./boehmgc-coroutine-sp-fallback.diff
];
}))
})
)
nlohmann_json
];
};
@ -260,6 +265,7 @@
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
tar cvfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
--mtime='1970-01-01' \
--absolute-names \
--hard-dereference \
--transform "s,$TMPDIR/install,$dir/install," \
@ -363,7 +369,7 @@
buildInputs =
[ nix
(curl.override { patchNetrcRegression = true; })
curl
bzip2
xz
pkgs.perl
@ -419,6 +425,8 @@
buildCross = nixpkgs.lib.genAttrs crossSystems (crossSystem:
nixpkgs.lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
buildNoGc = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
# Perl bindings for various platforms.
perlBindings = nixpkgs.lib.genAttrs systems (system: self.packages.${system}.nix.perl-bindings);
@ -457,6 +465,10 @@
src = self;
configureFlags = [
"CXXFLAGS=-I${lib.getDev pkgs.rapidcheck}/extras/gtest/include"
];
enableParallelBuilding = true;
nativeBuildInputs = nativeBuildDeps;
@ -505,6 +517,12 @@
overlay = self.overlays.default;
});
tests.containers = (import ./tests/containers.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
overlay = self.overlays.default;
});
tests.setuid = nixpkgs.lib.genAttrs
["i686-linux" "x86_64-linux"]
(system:
@ -526,6 +544,12 @@
mkdir $out
'';
tests.nixpkgsLibTests =
nixpkgs.lib.genAttrs systems (system:
import (nixpkgs + "/lib/tests/release.nix")
{ pkgs = nixpkgsFor.${system}; }
);
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
pkgs = nixpkgsFor.x86_64-linux;
nixpkgs = nixpkgs-regression;
@ -545,12 +569,18 @@
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
} "touch $out");
installerTests = import ./tests/installer {
binaryTarballs = self.hydraJobs.binaryTarball;
inherit nixpkgsFor;
};
};
checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
installTests = self.hydraJobs.installTests.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
dockerImage = self.hydraJobs.dockerImage.${system};
});
@ -632,6 +662,7 @@
inherit system crossSystem;
overlays = [ self.overlays.default ];
};
inherit (nixpkgsCross) lib;
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
name = "nix-${version}";
@ -644,7 +675,11 @@
nativeBuildInputs = nativeBuildDeps;
buildInputs = buildDeps ++ propagatedDeps;
configureFlags = [ "--sysconfdir=/etc" "--disable-doc-gen" ];
configureFlags = [
"CXXFLAGS=-I${lib.getDev nixpkgsCross.rapidcheck}/extras/gtest/include"
"--sysconfdir=/etc"
"--disable-doc-gen"
];
enableParallelBuilding = true;

107
maintainers/README.md Normal file
View file

@ -0,0 +1,107 @@
# Nix maintainers team
## Motivation
The goal of the team is to help other people to contribute to Nix.
## Members
- Eelco Dolstra (@edolstra) Team lead
- Théophane Hufschmitt (@thufschmitt)
- Valentin Gagarin (@fricklerhandwerk)
- Thomas Bereknyei (@tomberek)
- Robert Hensing (@roberth)
## Meeting protocol
The team meets twice a week:
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
1. Triage issues and pull requests from the _No Status_ column (30 min)
2. Discuss issues and pull requests from the _To discuss_ column (30 min)
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
1. Code review on pull requests from _In review_.
2. Other chores and tasks.
Meeting notes are collected on a [collaborative scratchpad](https://pad.lassul.us/Cv7FpYx-Ri-4VjUykQOLAw), and published on Discourse under the [Nix category](https://discourse.nixos.org/c/dev/nix/50).
## Project board protocol
The team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19/views/1) for tracking its work.
Issues on the board progress through the following states:
- No Status
During the discussion meeting, the team triages new items.
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
On every meeting, at least one item from each of the following categories is inspected:
1. [critical](https://github.com/NixOS/nix/labels/critical)
2. [security](https://github.com/NixOS/nix/labels/security)
3. [regression](https://github.com/NixOS/nix/labels/regression)
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
Team members can also add pull requests or issues they would like the whole team to consider.
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
- To discuss
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
As a general guideline, the order of items is determined as follows:
- Prioritise pull requests over issues
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
- Prioritise fixing bugs over documentation, improvements or new features
The team values stability and accessibility higher than raw functionality.
- Interleave issues and PRs
This way issues without attempts at a solution get a chance to get addressed.
- In review
Pull requests in this column are reviewed together during work meetings.
This is both for spreading implementation knowledge and for establishing common values in code reviews.
When the overall direction is agreed upon, even when further changes are required, the pull request is assigned to one team member.
- Assigned for merging
One team member is assigned to each of these pull requests.
They will communicate with the authors, and make the final approval once all remaining issues are addressed.
If more substantive issues arise, the assignee can move the pull request back to _To discuss_ to involve the team again.
The process is illustrated in the following diagram:
```mermaid
flowchart TD
discuss[To discuss]
review[To review]
New --> |Disagreement on idea| discuss
New & discuss --> |Consensus on idea| review
review --> |Consensus on implementation| Assigned
Assigned --> |Implementation issues arise| review
Assigned --> |Remaining issues fixed| Merged
```

View file

@ -115,10 +115,6 @@ sub downloadFile {
write_file("$tmpFile.sha256", $sha256_actual);
if (! -e "$tmpFile.asc") {
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
}
return $sha256_expected;
}
@ -194,7 +190,7 @@ for my $fn (glob "$tmpDir/*") {
my $configuration = ();
$configuration->{content_type} = "application/octet-stream";
if ($fn =~ /.sha256|.asc|install/) {
if ($fn =~ /.sha256|install/) {
# Text files
$configuration->{content_type} = "text/plain";
}

View file

@ -28,7 +28,7 @@
<key>SoftResourceLimits</key>
<dict>
<key>NumberOfFiles</key>
<integer>4096</integer>
<integer>1048576</integer>
</dict>
</dict>
</plist>

View file

@ -9,7 +9,7 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
[Service]
ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
KillMode=process
LimitNOFILE=4096
LimitNOFILE=1048576
[Install]
WantedBy=multi-user.target

View file

@ -10,14 +10,15 @@ function _nix() {
local -a suggestions
declare -a suggestions
for suggestion in ${res:1}; do
# FIXME: This doesn't work properly if the suggestion word contains a `:`
# itself
suggestions+="${suggestion/ /:}"
suggestions+=("${suggestion%% *}")
done
local -a args
if [[ "$tpe" == filenames ]]; then
compadd -f
args+=('-f')
elif [[ "$tpe" == attrs ]]; then
args+=('-S' '')
fi
_describe 'nix' suggestions
compadd -J nix "${args[@]}" -a suggestions
}
_nix "$@"

11
mk/common-test.sh Normal file
View file

@ -0,0 +1,11 @@
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
: ${BASH:=/usr/bin/env bash}
init_test () {
cd tests && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
}
run_test_proper () {
cd $(dirname $test) && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
}

11
mk/debug-test.sh Executable file
View file

@ -0,0 +1,11 @@
#!/usr/bin/env bash
set -eu
test=$1
dir="$(dirname "${BASH_SOURCE[0]}")"
source "$dir/common-test.sh"
(init_test)
run_test_proper

View file

@ -1,4 +1,4 @@
#!/bin/sh
#!/usr/bin/env bash
set -u
@ -7,7 +7,12 @@ green=""
yellow=""
normal=""
post_run_msg="ran test $1..."
test=$1
dir="$(dirname "${BASH_SOURCE[0]}")"
source "$dir/common-test.sh"
post_run_msg="ran test $test..."
if [ -t 1 ]; then
red=""
green=""
@ -16,12 +21,12 @@ if [ -t 1 ]; then
fi
run_test () {
(cd tests && env ${TESTS_ENVIRONMENT} init.sh 2>/dev/null > /dev/null)
log="$(cd $(dirname $1) && env ${TESTS_ENVIRONMENT} $(basename $1) 2>&1)"
(init_test 2>/dev/null > /dev/null)
log="$(run_test_proper 2>&1)"
status=$?
}
run_test "$1"
run_test
# Hack: Retry the test if it fails with “unexpected EOF reading a line” as these
# appear randomly without anyone knowing why.
@ -32,7 +37,7 @@ if [[ $status -ne 0 && $status -ne 99 && \
]]; then
echo "$post_run_msg [${yellow}FAIL$normal] (possibly flaky, so will be retried)"
echo "$log" | sed 's/^/ /'
run_test "$1"
run_test
fi
if [ $status -eq 0 ]; then

View file

@ -8,7 +8,11 @@ define run-install-test
.PHONY: $1.test
$1.test: $1 $(test-deps)
@env TEST_NAME=$(basename $1) TESTS_ENVIRONMENT="$(tests-environment)" mk/run_test.sh $1 < /dev/null
@env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
.PHONY: $1.test-debug
$1.test-debug: $1 $(test-deps)
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
endef

View file

@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
# Fish has different syntax than zsh/bash, treat it separate
readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish"
readonly PROFILE_FISH_PREFIXES=(
# each of these are common values of $__fish_sysconf_dir,
# under which Fish will look for a file named
# $PROFILE_FISH_SUFFIX.
"/etc/fish" # standard
"/usr/local/etc/fish" # their installer .pkg for macOS
"/opt/homebrew/etc/fish" # homebrew
"/opt/local/etc/fish" # macports
)
readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish"
readonly NIX_INSTALLED_NIX="@nix@"
readonly NIX_INSTALLED_CACERT="@cacert@"
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
@ -45,7 +58,7 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
readonly ROOT_HOME=~root
if [ -t 0 ]; then
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
readonly IS_HEADLESS='no'
else
readonly IS_HEADLESS='yes'
@ -84,13 +97,10 @@ is_os_darwin() {
}
contact_us() {
echo "You can open an issue at https://github.com/nixos/nix/issues"
echo "You can open an issue at"
echo "https://github.com/NixOS/nix/issues/new?labels=installer&template=installer.md"
echo ""
echo "Or feel free to contact the team:"
echo " - Matrix: #nix:nixos.org"
echo " - IRC: in #nixos on irc.libera.chat"
echo " - twitter: @nixos_org"
echo " - forum: https://discourse.nixos.org"
echo "Or get in touch with the community: https://nixos.org/community"
}
get_help() {
echo "We'd love to help if you need it."
@ -362,7 +372,7 @@ finish_fail() {
finish_cleanup
failure <<EOF
Jeeze, something went wrong. If you can take all the output and open
Oh no, something went wrong. If you can take all the output and open
an issue, we'd love to fix the problem so nobody else has this issue.
:(
@ -565,7 +575,7 @@ EOF
# to extract _just_ the user's note, instead it is prefixed with
# some plist junk. This was causing the user note to always be set,
# even if there was no reason for it.
if ! poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then
if poly_user_note_get "$username" | grep -q "Nix build user $coreid"; then
row " Note" "Nix build user $coreid"
else
poly_user_note_set "$username" "Nix build user $coreid"
@ -810,7 +820,7 @@ EOF
fi
_sudo "to load data for the first time in to the Nix Database" \
"$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
echo " Just finished getting the nix database ready."
)
@ -828,6 +838,19 @@ fi
EOF
}
# Fish has differing syntax
fish_source_lines() {
cat <<EOF
# Nix
if test -e '$PROFILE_NIX_FILE_FISH'
. '$PROFILE_NIX_FILE_FISH'
end
# End Nix
EOF
}
configure_shell_profile() {
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
for profile_target in "${PROFILE_TARGETS[@]}"; do
@ -849,6 +872,27 @@ configure_shell_profile() {
tee -a "$profile_target"
fi
done
task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}"
for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do
if [ ! -d "$fish_prefix" ]; then
# this specific prefix (ie: /etc/fish) is very likely to exist
# if Fish is installed with this sysconfdir.
continue
fi
profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}"
conf_dir=$(dirname "$profile_target")
if [ ! -d "$conf_dir" ]; then
_sudo "create $conf_dir for our Fish hook" \
mkdir "$conf_dir"
fi
fish_source_lines \
| _sudo "write nix-daemon settings to $profile_target" \
tee "$profile_target"
done
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
# their way less disruptively, but a counter-argument is that they won't
# immediately notice if something didn't get set up right?

View file

@ -71,6 +71,8 @@ while [ $# -gt 0 ]; do
# # intentional tail space
# ACTIONS="${ACTIONS}uninstall "
# ;;
--yes)
export NIX_INSTALLER_YES=1;;
--no-channel-add)
export NIX_INSTALLER_NO_CHANNEL_ADD=1;;
--daemon-user-count)
@ -90,7 +92,7 @@ while [ $# -gt 0 ]; do
shift;;
*)
{
echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]"
echo "Nix Installer [--daemon|--no-daemon] [--daemon-user-count INT] [--yes] [--no-channel-add] [--no-modify-profile] [--nix-extra-conf-file FILE]"
echo "Choose installation method."
echo ""
@ -104,6 +106,8 @@ while [ $# -gt 0 ]; do
echo " trivial to uninstall."
echo " (default)"
echo ""
echo " --yes: Run the script non-interactively, accepting all prompts."
echo ""
echo " --no-channel-add: Don't add any channels. nixpkgs-unstable is installed by default."
echo ""
echo " --no-modify-profile: Don't modify the user profile to automatically load nix."
@ -209,31 +213,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
fi
added=
p=$HOME/.nix-profile/etc/profile.d/nix.sh
p=
p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh
p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
# Make the shell source nix.sh during login.
for i in .bash_profile .bash_login .profile; do
fn="$HOME/$i"
if [ -w "$fn" ]; then
if ! grep -q "$p" "$fn"; then
if ! grep -q "$p_sh" "$fn"; then
echo "modifying $fn..." >&2
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
fi
added=1
p=${p_sh}
break
fi
done
for i in .zshenv .zshrc; do
fn="$HOME/$i"
if [ -w "$fn" ]; then
if ! grep -q "$p" "$fn"; then
if ! grep -q "$p_sh" "$fn"; then
echo "modifying $fn..." >&2
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
fi
added=1
p=${p_sh}
break
fi
done
if [ -d "$HOME/.config/fish" ]; then
fishdir=$HOME/.config/fish/conf.d
if [ ! -d "$fishdir" ]; then
mkdir -p "$fishdir"
fi
fn="$fishdir/nix.fish"
echo "placing $fn..." >&2
printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn"
added=1
p=${p_fish}
fi
else
p=${p_sh}
fi
if [ -z "$added" ]; then

View file

@ -24,12 +24,17 @@ $1
EOF
}
escape_systemd_env() {
temp_var="${1//\'/\\\'}"
echo "${temp_var//\%/%%}"
}
# Gather all non-empty proxy environment variables into a string
create_systemd_proxy_env() {
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
for v in $vars; do
if [ "x${!v:-}" != "x" ]; then
echo "Environment=${v}=${!v}"
echo "Environment=${v}=$(escape_systemd_env ${!v})"
fi
done
}

View file

@ -40,12 +40,12 @@ case "$(uname -s).$(uname -m)" in
path=@tarballPath_aarch64-linux@
system=aarch64-linux
;;
Linux.armv6l_linux)
Linux.armv6l)
hash=@tarballHash_armv6l-linux@
path=@tarballPath_armv6l-linux@
system=armv6l-linux
;;
Linux.armv7l_linux)
Linux.armv7l)
hash=@tarballHash_armv7l-linux@
path=@tarballPath_armv7l-linux@
system=armv7l-linux

View file

@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts)
profiledir = $(sysconfdir)/profile.d
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
clean-files += $(nix_noinst_scripts)

View file

@ -0,0 +1,49 @@
function add_path --argument-names new_path
if type -q fish_add_path
# fish 3.2.0 or newer
fish_add_path --prepend --global $new_path
else
# older versions of fish
if not contains $new_path $fish_user_paths
set --global fish_user_paths $new_path $fish_user_paths
end
end
end
# Only execute this file once per shell.
if test -n "$__ETC_PROFILE_NIX_SOURCED"
exit
end
set __ETC_PROFILE_NIX_SOURCED 1
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
if test -n "$NIX_SSH_CERT_FILE"
: # Allow users to override the NIX_SSL_CERT_FILE
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
else
# Fall back to what is in the nix profiles, favouring whatever is defined last.
for i in $NIX_PROFILES
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
end
end
end
add_path "@localstatedir@/nix/profiles/default/bin"
add_path "$HOME/.nix-profile/bin"
functions -e add_path

View file

@ -0,0 +1,51 @@
function add_path --argument-names new_path
if type -q fish_add_path
# fish 3.2.0 or newer
fish_add_path --prepend --global $new_path
else
# older versions of fish
if not contains $new_path $fish_user_paths
set --global fish_user_paths $new_path $fish_user_paths
end
end
end
if test -n "$HOME" && test -n "$USER"
# Set up the per-user profile.
set NIX_LINK $HOME/.nix-profile
# Set up environment.
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
if test -n "$NIX_SSH_CERT_FILE"
: # Allow users to override the NIX_SSL_CERT_FILE
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
end
# Only use MANPATH if it is already set. In general `man` will just simply
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
# which is in the $PATH. For more info, run `manpath -d`.
if set --query MANPATH
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
end
add_path "$NIX_LINK/bin"
set --erase NIX_LINK
end
functions -e add_path

View file

@ -1,7 +1,6 @@
if [ -n "$HOME" ] && [ -n "$USER" ]; then
# Set up the per-user profile.
# This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
NIX_LINK=$HOME/.nix-profile

View file

@ -186,12 +186,12 @@ static int main_build_remote(int argc, char * * argv)
// build the hint template.
std::string errorText =
"Failed to find a machine for remote build!\n"
"derivation: %s\nrequired (system, features): (%s, %s)";
"derivation: %s\nrequired (system, features): (%s, [%s])";
errorText += "\n%s available machines:";
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
for (unsigned int i = 0; i < machines.size(); ++i)
errorText += "\n(%s, %s, %s, %s)";
errorText += "\n([%s], %s, [%s], [%s])";
// add the template values.
std::string drvstr;

View file

@ -88,7 +88,8 @@ EvalCommand::EvalCommand()
{
addFlag({
.longName = "debugger",
.description = "start an interactive environment if evaluation fails",
.description = "Start an interactive environment if evaluation fails.",
.category = MixEvalArgs::category,
.handler = {&startReplOnEvalErrors, true},
});
}
@ -225,7 +226,7 @@ MixProfile::MixProfile()
{
addFlag({
.longName = "profile",
.description = "The profile to update.",
.description = "The profile to operate on.",
.labels = {"path"},
.handler = {&profile},
.completer = completePath

View file

@ -13,8 +13,6 @@ namespace nix {
MixEvalArgs::MixEvalArgs()
{
auto category = "Common evaluation options";
addFlag({
.longName = "arg",
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
@ -34,7 +32,77 @@ MixEvalArgs::MixEvalArgs()
addFlag({
.longName = "include",
.shortName = 'I',
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
.description = R"(
Add *path* to the Nix search path. The Nix search path is
initialized from the colon-separated [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment
variable, and is used to look up the location of Nix expressions using [paths](@docroot@/language/values.md#type-path) enclosed in angle
brackets (i.e., `<nixpkgs>`).
For instance, passing
```
-I /home/eelco/Dev
-I /etc/nixos
```
will cause Nix to look for paths relative to `/home/eelco/Dev` and
`/etc/nixos`, in that order. This is equivalent to setting the
`NIX_PATH` environment variable to
```
/home/eelco/Dev:/etc/nixos
```
It is also possible to match paths against a prefix. For example,
passing
```
-I nixpkgs=/home/eelco/Dev/nixpkgs-branch
-I /etc/nixos
```
will cause Nix to search for `<nixpkgs/path>` in
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
If a path in the Nix search path starts with `http://` or `https://`,
it is interpreted as the URL of a tarball that will be downloaded and
unpacked to a temporary location. The tarball must consist of a single
top-level directory. For example, passing
```
-I nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
```
tells Nix to download and use the current contents of the `master`
branch in the `nixpkgs` repository.
The URLs of the tarballs from the official `nixos.org` channels
(see [the manual page for `nix-channel`](../nix-channel.md)) can be
abbreviated as `channel:<channel-name>`. For instance, the
following two flags are equivalent:
```
-I nixpkgs=channel:nixos-21.05
-I nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
```
You can also fetch source trees using [flake URLs](./nix3-flake.md#url-like-syntax) and add them to the
search path. For instance,
```
-I nixpkgs=flake:nixpkgs
```
specifies that the prefix `nixpkgs` shall refer to the source tree
downloaded from the `nixpkgs` entry in the flake registry. Similarly,
```
-I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
```
makes `<nixpkgs>` refer to a particular branch of the
`NixOS/nixpkgs` repository on GitHub.
)",
.category = category,
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
@ -91,14 +159,25 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
Path lookupFileArg(EvalState & state, std::string_view s)
{
if (isUri(s)) {
return state.store->toRealPath(
fetchers::downloadTarball(
state.store, resolveUri(s), "source", false).first.storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball(
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).first.storePath;
return state.store->toRealPath(storePath);
}
else if (hasPrefix(s, "flake:")) {
settings.requireExperimentalFeature(Xp::Flakes);
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
return state.store->toRealPath(storePath);
}
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p(s.substr(1, s.size() - 2));
return state.findFile(p);
} else
}
else
return absPath(std::string(s));
}

View file

@ -10,6 +10,8 @@ class Bindings;
struct MixEvalArgs : virtual Args
{
static constexpr auto category = "Common evaluation options";
MixEvalArgs();
Bindings * getAutoArgs(EvalState & state);

View file

@ -1,5 +1,6 @@
#include "globals.hh"
#include "installables.hh"
#include "outputs-spec.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
@ -168,7 +169,7 @@ SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
addFlag({
.longName = "derivation",
.description = "Operate on the store derivation rather than its outputs.",
.description = "Operate on the [store derivation](../../glossary.md#gloss-store-derivation) rather than its outputs.",
.category = installablesCategory,
.handler = {&operateOn, OperateOn::Derivation},
});
@ -207,55 +208,59 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
void SourceExprCommand::completeInstallable(std::string_view prefix)
{
if (file) {
completionType = ctAttrs;
try {
if (file) {
completionType = ctAttrs;
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
Value root;
state->eval(e, root);
Value root;
state->eval(e, root);
auto autoArgs = getAutoArgs(*state);
auto autoArgs = getAutoArgs(*state);
std::string prefix_ = std::string(prefix);
auto sep = prefix_.rfind('.');
std::string searchWord;
if (sep != std::string::npos) {
searchWord = prefix_.substr(sep + 1, std::string::npos);
prefix_ = prefix_.substr(0, sep);
} else {
searchWord = prefix_;
prefix_ = "";
}
std::string prefix_ = std::string(prefix);
auto sep = prefix_.rfind('.');
std::string searchWord;
if (sep != std::string::npos) {
searchWord = prefix_.substr(sep + 1, std::string::npos);
prefix_ = prefix_.substr(0, sep);
} else {
searchWord = prefix_;
prefix_ = "";
}
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
Value &v1(*v);
state->forceValue(v1, pos);
Value v2;
state->autoCallFunction(*autoArgs, v1, v2);
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
Value &v1(*v);
state->forceValue(v1, pos);
Value v2;
state->autoCallFunction(*autoArgs, v1, v2);
if (v2.type() == nAttrs) {
for (auto & i : *v2.attrs) {
std::string name = state->symbols[i.name];
if (name.find(searchWord) == 0) {
if (prefix_ == "")
completions->add(name);
else
completions->add(prefix_ + "." + name);
if (v2.type() == nAttrs) {
for (auto & i : *v2.attrs) {
std::string name = state->symbols[i.name];
if (name.find(searchWord) == 0) {
if (prefix_ == "")
completions->add(name);
else
completions->add(prefix_ + "." + name);
}
}
}
} else {
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
}
} else {
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
} catch (EvalError&) {
// Don't want eval errors to mess-up with the completion engine, so let's just swallow them
}
}
@ -354,7 +359,7 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
}
}
DerivedPath Installable::toDerivedPath()
DerivedPathWithInfo Installable::toDerivedPath()
{
auto buildables = toDerivedPaths();
if (buildables.size() != 1)
@ -395,93 +400,53 @@ static StorePath getDeriver(
struct InstallableStorePath : Installable
{
ref<Store> store;
StorePath storePath;
DerivedPath req;
InstallableStorePath(ref<Store> store, StorePath && storePath)
: store(store), storePath(std::move(storePath)) { }
InstallableStorePath(ref<Store> store, DerivedPath && req)
: store(store), req(std::move(req))
{ }
std::string what() const override { return store->printStorePath(storePath); }
DerivedPaths toDerivedPaths() override
std::string what() const override
{
if (storePath.isDerivation()) {
auto drv = store->readDerivation(storePath);
return {
DerivedPath::Built {
.drvPath = storePath,
.outputs = drv.outputNames(),
}
};
} else {
return {
DerivedPath::Opaque {
.path = storePath,
}
};
}
return req.to_string(*store);
}
StorePathSet toDrvPaths(ref<Store> store) override
DerivedPathsWithInfo toDerivedPaths() override
{
if (storePath.isDerivation()) {
return {storePath};
} else {
return {getDeriver(store, *this, storePath)};
}
return {{.path = req, .info = {} }};
}
std::optional<StorePath> getStorePath() override
{
return storePath;
return std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
return bfd.drvPath;
},
[&](const DerivedPath::Opaque & bo) {
return bo.path;
},
}, req.raw());
}
};
DerivedPaths InstallableValue::toDerivedPaths()
{
DerivedPaths res;
std::map<StorePath, std::set<std::string>> drvsToOutputs;
RealisedPath::Set drvsToCopy;
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) {
for (auto & outputName : drv.outputsToInstall)
drvsToOutputs[drv.drvPath].insert(outputName);
drvsToCopy.insert(drv.drvPath);
}
for (auto & i : drvsToOutputs)
res.push_back(DerivedPath::Built { i.first, i.second });
return res;
}
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
{
StorePathSet res;
for (auto & drv : toDerivations())
res.insert(drv.drvPath);
return res;
}
struct InstallableAttrPath : InstallableValue
{
SourceExprCommand & cmd;
RootValue v;
std::string attrPath;
OutputsSpec outputsSpec;
ExtendedOutputsSpec extendedOutputsSpec;
InstallableAttrPath(
ref<EvalState> state,
SourceExprCommand & cmd,
Value * v,
const std::string & attrPath,
OutputsSpec outputsSpec)
ExtendedOutputsSpec extendedOutputsSpec)
: InstallableValue(state)
, cmd(cmd)
, v(allocRootValue(v))
, attrPath(attrPath)
, outputsSpec(std::move(outputsSpec))
, extendedOutputsSpec(std::move(extendedOutputsSpec))
{ }
std::string what() const override { return attrPath; }
@ -493,40 +458,54 @@ struct InstallableAttrPath : InstallableValue
return {vRes, pos};
}
virtual std::vector<InstallableValue::DerivationInfo> toDerivations() override;
};
DerivedPathsWithInfo toDerivedPaths() override
{
auto v = toValue(*state).first;
std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations()
{
auto v = toValue(*state).first;
Bindings & autoArgs = *cmd.getAutoArgs(*state);
Bindings & autoArgs = *cmd.getAutoArgs(*state);
DrvInfos drvInfos;
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
DrvInfos drvInfos;
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
// Backward compatibility hack: group results by drvPath. This
// helps keep .all output together.
std::map<StorePath, OutputsSpec> byDrvPath;
std::vector<DerivationInfo> res;
for (auto & drvInfo : drvInfos) {
auto drvPath = drvInfo.queryDrvPath();
if (!drvPath)
throw Error("'%s' is not a derivation", what());
for (auto & drvInfo : drvInfos) {
auto drvPath = drvInfo.queryDrvPath();
if (!drvPath)
throw Error("'%s' is not a derivation", what());
std::set<std::string> outputsToInstall;
auto newOutputs = std::visit(overloaded {
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
std::set<std::string> outputsToInstall;
for (auto & output : drvInfo.queryOutputs(false, true))
outputsToInstall.insert(output.first);
return OutputsSpec::Names { std::move(outputsToInstall) };
},
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
return e;
},
}, extendedOutputsSpec.raw());
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
outputsToInstall = *outputNames;
else
for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
outputsToInstall.insert(output.first);
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
res.push_back(DerivationInfo {
.drvPath = *drvPath,
.outputsToInstall = std::move(outputsToInstall)
});
if (!didInsert)
iter->second = iter->second.union_(newOutputs);
}
DerivedPathsWithInfo res;
for (auto & [drvPath, outputs] : byDrvPath)
res.push_back({
.path = DerivedPath::Built {
.drvPath = drvPath,
.outputs = outputs,
},
});
return res;
}
return res;
}
};
std::vector<std::string> InstallableFlake::getActualAttrPaths()
{
@ -575,7 +554,7 @@ ref<eval_cache::EvalCache> openEvalCache(
auto vFlake = state.allocValue();
flake::callFlake(state, *lockedFlake, *vFlake);
state.forceAttrs(*vFlake, noPos);
state.forceAttrs(*vFlake, noPos, "while parsing cached flake data");
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
assert(aOutputs);
@ -599,7 +578,7 @@ InstallableFlake::InstallableFlake(
ref<EvalState> state,
FlakeRef && flakeRef,
std::string_view fragment,
OutputsSpec outputsSpec,
ExtendedOutputsSpec extendedOutputsSpec,
Strings attrPaths,
Strings prefixes,
const flake::LockFlags & lockFlags)
@ -607,14 +586,14 @@ InstallableFlake::InstallableFlake(
flakeRef(flakeRef),
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
prefixes(fragment == "" ? Strings{} : prefixes),
outputsSpec(std::move(outputsSpec)),
extendedOutputsSpec(std::move(extendedOutputsSpec)),
lockFlags(lockFlags)
{
if (cmd && cmd->getAutoArgs(*state)->size())
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
}
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
@ -622,56 +601,84 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
auto attrPath = attr->getAttrPathStr();
if (!attr->isDerivation())
throw Error("flake output attribute '%s' is not a derivation", attrPath);
if (!attr->isDerivation()) {
// FIXME: use eval cache?
auto v = attr->forceValue();
if (v.type() == nPath) {
PathSet context;
auto storePath = state->copyPathToStore(context, Path(v.path));
return {{
.path = DerivedPath::Opaque {
.path = std::move(storePath),
}
}};
}
else if (v.type() == nString) {
PathSet context;
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
auto storePath = state->store->maybeParseStorePath(s);
if (storePath && context.count(std::string(s))) {
return {{
.path = DerivedPath::Opaque {
.path = std::move(*storePath),
}
}};
} else
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
}
else
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
}
auto drvPath = attr->forceDerivation();
std::set<std::string> outputsToInstall;
std::optional<NixInt> priority;
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
if (aOutputSpecified->getBool()) {
if (auto aOutputName = attr->maybeGetAttr("outputName"))
outputsToInstall = { aOutputName->getString() };
}
}
else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
for (auto & s : aOutputsToInstall->getListOfStrings())
outputsToInstall.insert(s);
if (attr->maybeGetAttr(state->sOutputSpecified)) {
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
if (auto aPriority = aMeta->maybeGetAttr("priority"))
priority = aPriority->getInt();
}
if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
outputsToInstall.clear();
if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
for (auto & s : aOutputs->getListOfStrings())
outputsToInstall.insert(s);
}
return {{
.path = DerivedPath::Built {
.drvPath = std::move(drvPath),
.outputs = std::visit(overloaded {
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
std::set<std::string> outputsToInstall;
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
if (aOutputSpecified->getBool()) {
if (auto aOutputName = attr->maybeGetAttr("outputName"))
outputsToInstall = { aOutputName->getString() };
}
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
for (auto & s : aOutputsToInstall->getListOfStrings())
outputsToInstall.insert(s);
}
if (outputsToInstall.empty())
outputsToInstall.insert("out");
if (outputsToInstall.empty())
outputsToInstall.insert("out");
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
outputsToInstall = *outputNames;
auto drvInfo = DerivationInfo {
.drvPath = std::move(drvPath),
.outputsToInstall = std::move(outputsToInstall),
.priority = priority,
};
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
}
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
{
std::vector<DerivationInfo> res;
res.push_back(std::get<2>(toDerivation()));
return res;
return OutputsSpec::Names { std::move(outputsToInstall) };
},
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
return e;
},
}, extendedOutputsSpec.raw()),
},
.info = {
.priority = priority,
.originalRef = flakeRef,
.resolvedRef = getLockedFlake()->flake.lockedRef,
.attrPath = attrPath,
.extendedOutputsSpec = extendedOutputsSpec,
}
}};
}
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
@ -777,7 +784,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
if (file == "-") {
auto e = state->parseStdin();
state->eval(e, *vFile);
} else if (file)
}
else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else {
auto e = state->parseExprFromString(*expr, absPath("."));
@ -785,12 +793,12 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
}
for (auto & s : ss) {
auto [prefix, outputsSpec] = parseOutputsSpec(s);
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
result.push_back(
std::make_shared<InstallableAttrPath>(
state, *this, vFile,
prefix == "." ? "" : prefix,
outputsSpec));
prefix == "." ? "" : std::string { prefix },
extendedOutputsSpec));
}
} else {
@ -798,9 +806,46 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
if (s.find('/') != std::string::npos) {
auto [prefix_, extendedOutputsSpec_] = ExtendedOutputsSpec::parse(s);
// To avoid clang's pedantry
auto prefix = std::move(prefix_);
auto extendedOutputsSpec = std::move(extendedOutputsSpec_);
auto found = prefix.find('/');
if (found != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
auto derivedPath = std::visit(overloaded {
// If the user did not use ^, we treat the output more liberally.
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
// First, we accept a symlink chain or an actual store path.
auto storePath = store->followLinksToStorePath(prefix);
// Second, we see if the store path ends in `.drv` to decide what sort
// of derived path they want.
//
// This handling predates the `^` syntax. The `^*` in
// `/nix/store/hash-foo.drv^*` unambiguously means "do the
// `DerivedPath::Built` case", so plain `/nix/store/hash-foo.drv` could
// also unambiguously mean "do the DerivedPath::Opaque` case".
//
// Issue #7261 tracks reconsidering this `.drv` dispatching.
return storePath.isDerivation()
? (DerivedPath) DerivedPath::Built {
.drvPath = std::move(storePath),
.outputs = OutputsSpec::All {},
}
: (DerivedPath) DerivedPath::Opaque {
.path = std::move(storePath),
};
},
// If the user did use ^, we just do exactly what is written.
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
return DerivedPath::Built {
.drvPath = store->parseStorePath(prefix),
.outputs = outputSpec,
};
},
}, extendedOutputsSpec.raw());
result.push_back(std::make_shared<InstallableStorePath>(store, std::move(derivedPath)));
continue;
} catch (BadStorePath &) {
} catch (...) {
@ -810,13 +855,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
}
try {
auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment,
outputsSpec,
extendedOutputsSpec,
getDefaultFlakeAttrPaths(),
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
@ -840,20 +885,20 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
return installables.front();
}
BuiltPaths Installable::build(
std::vector<BuiltPathWithResult> Installable::build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode)
{
BuiltPaths res;
for (auto & [_, builtPath] : build2(evalStore, store, mode, installables, bMode))
res.push_back(builtPath);
std::vector<BuiltPathWithResult> res;
for (auto & [_, builtPathWithResult] : build2(evalStore, store, mode, installables, bMode))
res.push_back(builtPathWithResult);
return res;
}
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::build2(
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Installable::build2(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
@ -863,17 +908,23 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
struct Aux
{
ExtraPathInfo info;
std::shared_ptr<Installable> installable;
};
std::vector<DerivedPath> pathsToBuild;
std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap;
std::map<DerivedPath, std::vector<Aux>> backmap;
for (auto & i : installables) {
for (auto b : i->toDerivedPaths()) {
pathsToBuild.push_back(b);
backmap[b].push_back(i);
pathsToBuild.push_back(b.path);
backmap[b.path].push_back({.info = b.info, .installable = i});
}
}
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> res;
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> res;
switch (mode) {
@ -882,42 +933,18 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
printMissing(store, pathsToBuild, lvlError);
for (auto & path : pathsToBuild) {
for (auto & installable : backmap[path]) {
for (auto & aux : backmap[path]) {
std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
auto drvOutputs = drv.outputsAndOptPaths(*store);
for (auto & output : bfd.outputs) {
auto outputHash = get(outputHashes, output);
if (!outputHash)
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath), output);
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
DrvOutput outputId { *outputHash, output };
auto realisation = store->queryRealisation(outputId);
if (!realisation)
throw Error(
"cannot operate on an output of the "
"unbuilt derivation '%s'",
outputId.to_string());
outputs.insert_or_assign(output, realisation->outPath);
} else {
// If ca-derivations isn't enabled, assume that
// the output path is statically known.
auto drvOutput = get(drvOutputs, output);
assert(drvOutput);
assert(drvOutput->second);
outputs.insert_or_assign(
output, *drvOutput->second);
}
}
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
res.push_back({aux.installable, {
.path = BuiltPath::Built { bfd.drvPath, outputs },
.info = aux.info}});
},
[&](const DerivedPath::Opaque & bo) {
res.push_back({installable, BuiltPath::Opaque { bo.path }});
res.push_back({aux.installable, {
.path = BuiltPath::Opaque { bo.path },
.info = aux.info}});
},
}, path.raw());
}
@ -927,22 +954,28 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::bui
case Realise::Outputs: {
if (settings.printMissing)
printMissing(store, pathsToBuild, lvlInfo);
printMissing(store, pathsToBuild, lvlInfo);
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
if (!buildResult.success())
buildResult.rethrow();
for (auto & installable : backmap[buildResult.path]) {
for (auto & aux : backmap[buildResult.path]) {
std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
std::map<std::string, StorePath> outputs;
for (auto & path : buildResult.builtOutputs)
outputs.emplace(path.first.outputName, path.second.outPath);
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
res.push_back({aux.installable, {
.path = BuiltPath::Built { bfd.drvPath, outputs },
.info = aux.info,
.result = buildResult}});
},
[&](const DerivedPath::Opaque & bo) {
res.push_back({installable, BuiltPath::Opaque { bo.path }});
res.push_back({aux.installable, {
.path = BuiltPath::Opaque { bo.path },
.info = aux.info,
.result = buildResult}});
},
}, buildResult.path.raw());
}
@ -965,9 +998,12 @@ BuiltPaths Installable::toBuiltPaths(
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables)
{
if (operateOn == OperateOn::Output)
return Installable::build(evalStore, store, mode, installables);
else {
if (operateOn == OperateOn::Output) {
BuiltPaths res;
for (auto & p : Installable::build(evalStore, store, mode, installables))
res.push_back(p.path);
return res;
} else {
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
@ -1024,7 +1060,7 @@ StorePathSet Installable::toDerivations(
[&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);
},
}, b.raw());
}, b.path.raw());
return drvPaths;
}

View file

@ -2,11 +2,12 @@
#include "util.hh"
#include "path.hh"
#include "path-with-outputs.hh"
#include "outputs-spec.hh"
#include "derived-path.hh"
#include "eval.hh"
#include "store-api.hh"
#include "flake/flake.hh"
#include "build-result.hh"
#include <optional>
@ -19,7 +20,7 @@ namespace eval_cache { class EvalCache; class AttrCursor; }
struct App
{
std::vector<StorePathWithOutputs> context;
std::vector<DerivedPath> context;
Path program;
// FIXME: add args, sandbox settings, metadata, ...
};
@ -51,20 +52,42 @@ enum class OperateOn {
Derivation
};
struct ExtraPathInfo
{
std::optional<NixInt> priority;
std::optional<FlakeRef> originalRef;
std::optional<FlakeRef> resolvedRef;
std::optional<std::string> attrPath;
// FIXME: merge with DerivedPath's 'outputs' field?
std::optional<ExtendedOutputsSpec> extendedOutputsSpec;
};
/* A derived path with any additional info that commands might
need from the derivation. */
struct DerivedPathWithInfo
{
DerivedPath path;
ExtraPathInfo info;
};
struct BuiltPathWithResult
{
BuiltPath path;
ExtraPathInfo info;
std::optional<BuildResult> result;
};
typedef std::vector<DerivedPathWithInfo> DerivedPathsWithInfo;
struct Installable
{
virtual ~Installable() { }
virtual std::string what() const = 0;
virtual DerivedPaths toDerivedPaths() = 0;
virtual DerivedPathsWithInfo toDerivedPaths() = 0;
virtual StorePathSet toDrvPaths(ref<Store> store)
{
throw Error("'%s' cannot be converted to a derivation path", what());
}
DerivedPath toDerivedPath();
DerivedPathWithInfo toDerivedPath();
UnresolvedApp toApp(EvalState & state);
@ -91,14 +114,14 @@ struct Installable
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
}
static BuiltPaths build(
static std::vector<BuiltPathWithResult> build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode = bmNormal);
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> build2(
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> build2(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
@ -139,19 +162,6 @@ struct InstallableValue : Installable
ref<EvalState> state;
InstallableValue(ref<EvalState> state) : state(state) {}
struct DerivationInfo
{
StorePath drvPath;
std::set<std::string> outputsToInstall;
std::optional<NixInt> priority;
};
virtual std::vector<DerivationInfo> toDerivations() = 0;
DerivedPaths toDerivedPaths() override;
StorePathSet toDrvPaths(ref<Store> store) override;
};
struct InstallableFlake : InstallableValue
@ -159,7 +169,7 @@ struct InstallableFlake : InstallableValue
FlakeRef flakeRef;
Strings attrPaths;
Strings prefixes;
OutputsSpec outputsSpec;
ExtendedOutputsSpec extendedOutputsSpec;
const flake::LockFlags & lockFlags;
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
@ -168,7 +178,7 @@ struct InstallableFlake : InstallableValue
ref<EvalState> state,
FlakeRef && flakeRef,
std::string_view fragment,
OutputsSpec outputsSpec,
ExtendedOutputsSpec extendedOutputsSpec,
Strings attrPaths,
Strings prefixes,
const flake::LockFlags & lockFlags);
@ -179,9 +189,7 @@ struct InstallableFlake : InstallableValue
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
std::tuple<std::string, FlakeRef, DerivationInfo> toDerivation();
std::vector<DerivationInfo> toDerivations() override;
DerivedPathsWithInfo toDerivedPaths() override;
std::pair<Value *, PosIdx> toValue(EvalState & state) override;

View file

@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers

View file

@ -215,17 +215,15 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
out << dt.hint.str() << "\n";
// prefer direct pos, but if noPos then try the expr.
auto pos = *dt.pos
? *dt.pos
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
auto pos = dt.pos
? dt.pos
: static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
if (pos) {
printAtPos(pos, out);
auto loc = getCodeLines(pos);
if (loc.has_value()) {
out << pos;
if (auto loc = pos->getCodeLines()) {
out << "\n";
printCodeLines(out, "", pos, *loc);
printCodeLines(out, "", *pos, *loc);
out << "\n";
}
}
@ -242,7 +240,11 @@ void NixRepl::mainLoop()
// Allow nix-repl specific settings in .inputrc
rl_readline_name = "nix-repl";
createDirs(dirOf(historyFile));
try {
createDirs(dirOf(historyFile));
} catch (SysError & e) {
logWarning(e.info());
}
#ifndef READLINE
el_hist_size = 1000;
#endif
@ -266,6 +268,7 @@ void NixRepl::mainLoop()
// ctrl-D should exit the debugger.
state->debugStop = false;
state->debugQuit = true;
logger->cout("");
break;
}
try {
@ -380,6 +383,10 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
i++;
}
} else {
/* Temporarily disable the debugger, to avoid re-entering readline. */
auto debug_repl = state->debugRepl;
state->debugRepl = nullptr;
Finally restoreDebug([&]() { state->debugRepl = debug_repl; });
try {
/* This is an expression that should evaluate to an
attribute set. Evaluate it to get the names of the
@ -390,7 +397,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
Expr * e = parseString(expr);
Value v;
e->eval(*state, *env, v);
state->forceAttrs(v, noPos);
state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
for (auto & i : *v.attrs) {
std::string_view name = state->symbols[i.name];
@ -580,15 +587,17 @@ bool NixRepl::processLine(std::string line)
Value v;
evalString(arg, v);
const auto [file, line] = [&] () -> std::pair<std::string, uint32_t> {
const auto [path, line] = [&] () -> std::pair<Path, uint32_t> {
if (v.type() == nPath || v.type() == nString) {
PathSet context;
auto filename = state->coerceToString(noPos, v, context).toOwned();
state->symbols.create(filename);
return {filename, 0};
auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
return {path, 0};
} else if (v.isLambda()) {
auto pos = state->positions[v.lambda.fun->pos];
return {pos.file, pos.line};
if (auto path = std::get_if<Path>(&pos.origin))
return {*path, pos.line};
else
throw Error("'%s' cannot be shown in an editor", pos);
} else {
// assume it's a derivation
return findPackageFilename(*state, v, arg);
@ -596,7 +605,7 @@ bool NixRepl::processLine(std::string line)
}();
// Open in EDITOR
auto args = editorFor(file, line);
auto args = editorFor(path, line);
auto editor = args.front();
args.pop_front();
@ -632,7 +641,12 @@ bool NixRepl::processLine(std::string line)
Path drvPathRaw = state->store->printStorePath(drvPath);
if (command == ":b" || command == ":bl") {
state->store->buildPaths({DerivedPath::Built{drvPath}});
state->store->buildPaths({
DerivedPath::Built {
.drvPath = drvPath,
.outputs = OutputsSpec::All { },
},
});
auto drv = state->store->readDerivation(drvPath);
logger->cout("\nThis derivation produced the following outputs:");
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
@ -778,7 +792,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
flake::LockFlags {
.updateLockFile = false,
.useRegistries = !evalSettings.pureEval,
.allowMutable = !evalSettings.pureEval,
.allowUnlocked = !evalSettings.pureEval,
}),
v);
addAttrsToScope(v);
@ -825,7 +839,7 @@ void NixRepl::loadFiles()
void NixRepl::addAttrsToScope(Value & attrs)
{
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); });
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); }, "while evaluating an attribute set to be merged in the global scope");
if (displ + attrs.attrs->size() >= envSize)
throw Error("environment full; cannot add more variables");
@ -930,7 +944,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
Bindings::iterator i = v.attrs->find(state->sDrvPath);
PathSet context;
if (i != v.attrs->end())
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context));
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
else
str << "???";
str << "»";
@ -1046,7 +1060,7 @@ struct CmdRepl : InstallablesCommand
evalSettings.pureEval = false;
}
void prepare()
void prepare() override
{
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
warn("future versions of Nix will require using `--file` to load a file");

View file

@ -118,7 +118,7 @@ std::pair<std::string, uint32_t> findPackageFilename(EvalState & state, Value &
// FIXME: is it possible to extract the Pos object instead of doing this
// toString + parsing?
auto pos = state.forceString(*v2);
auto pos = state.forceString(*v2, noPos, "while evaluating the 'meta.position' attribute of a derivation");
auto colon = pos.rfind(':');
if (colon == std::string::npos)

View file

@ -300,7 +300,7 @@ struct AttrDb
NixStringContext context;
if (!queryAttribute.isNull(3))
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
context.push_back(decodeContext(cfg, s));
context.push_back(NixStringContextElem::parse(cfg, s));
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
}
case AttrType::Bool:
@ -385,7 +385,7 @@ Value & AttrCursor::getValue()
if (!_value) {
if (parent) {
auto & vParent = parent->first->getValue();
root->state.forceAttrs(vParent, noPos);
root->state.forceAttrs(vParent, noPos, "while searching for an attribute");
auto attr = vParent.attrs->get(parent->second);
if (!attr)
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
@ -571,14 +571,14 @@ std::string AttrCursor::getString()
debug("using cached string attribute '%s'", getAttrPathStr());
return s->first;
} else
root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
}
}
auto & v = forceValue();
if (v.type() != nString && v.type() != nPath)
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
return v.type() == nString ? v.string.s : v.path;
}
@ -592,7 +592,18 @@ string_t AttrCursor::getStringWithContext()
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
bool valid = true;
for (auto & c : s->second) {
if (!root->state.store->isValidPath(c.first)) {
const StorePath & path = std::visit(overloaded {
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & {
return d.drvPath;
},
[&](const NixStringContextElem::Built & b) -> const StorePath & {
return b.drvPath;
},
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
return o.path;
},
}, c.raw());
if (!root->state.store->isValidPath(path)) {
valid = false;
break;
}
@ -602,7 +613,7 @@ string_t AttrCursor::getStringWithContext()
return *s;
}
} else
root->state.debugThrowLastTrace(TypeError("'%s' is not a string", getAttrPathStr()));
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
}
}
@ -613,7 +624,7 @@ string_t AttrCursor::getStringWithContext()
else if (v.type() == nPath)
return {v.path, {}};
else
root->state.debugThrowLastTrace(TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type())));
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
}
bool AttrCursor::getBool()
@ -626,14 +637,14 @@ bool AttrCursor::getBool()
debug("using cached Boolean attribute '%s'", getAttrPathStr());
return *b;
} else
root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
}
}
auto & v = forceValue();
if (v.type() != nBool)
root->state.debugThrowLastTrace(TypeError("'%s' is not a Boolean", getAttrPathStr()));
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
return v.boolean;
}
@ -645,17 +656,17 @@ NixInt AttrCursor::getInt()
cachedValue = root->db->getAttr(getKey());
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto i = std::get_if<int_t>(&cachedValue->second)) {
debug("using cached Integer attribute '%s'", getAttrPathStr());
debug("using cached integer attribute '%s'", getAttrPathStr());
return i->x;
} else
throw TypeError("'%s' is not an Integer", getAttrPathStr());
throw TypeError("'%s' is not an integer", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type() != nInt)
throw TypeError("'%s' is not an Integer", getAttrPathStr());
throw TypeError("'%s' is not an integer", getAttrPathStr());
return v.integer;
}
@ -685,7 +696,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
std::vector<std::string> res;
for (auto & elem : v.listItems())
res.push_back(std::string(root->state.forceStringNoCtx(*elem)));
res.push_back(std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching")));
if (root->db)
cachedValue = {root->db->setListOfStrings(getKey(), res), res};
@ -703,14 +714,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
debug("using cached attrset attribute '%s'", getAttrPathStr());
return *attrs;
} else
root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
}
}
auto & v = forceValue();
if (v.type() != nAttrs)
root->state.debugThrowLastTrace(TypeError("'%s' is not an attribute set", getAttrPathStr()));
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
std::vector<Symbol> attrs;
for (auto & attr : *getValue().attrs)

View file

@ -103,33 +103,36 @@ void EvalState::forceValue(Value & v, Callable getPos)
else if (v.isApp())
callFunction(*v.app.left, *v.app.right, v, noPos);
else if (v.isBlackhole())
throwEvalError(getPos(), "infinite recursion encountered");
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
}
[[gnu::always_inline]]
inline void EvalState::forceAttrs(Value & v, const PosIdx pos)
inline void EvalState::forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx)
{
forceAttrs(v, [&]() { return pos; });
forceAttrs(v, [&]() { return pos; }, errorCtx);
}
template <typename Callable>
[[gnu::always_inline]]
inline void EvalState::forceAttrs(Value & v, Callable getPos)
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
{
forceValue(v, getPos);
if (v.type() != nAttrs)
throwTypeError(getPos(), "value is %1% while a set was expected", v);
forceValue(v, noPos);
if (v.type() != nAttrs) {
PosIdx pos = getPos();
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}
}
[[gnu::always_inline]]
inline void EvalState::forceList(Value & v, const PosIdx pos)
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
{
forceValue(v, pos);
if (!v.isList())
throwTypeError(pos, "value is %1% while a list was expected", v);
forceValue(v, noPos);
if (!v.isList()) {
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}
}

File diff suppressed because it is too large Load diff

View file

@ -60,7 +60,6 @@ void copyContext(const Value & v, PathSet & context);
typedef std::map<Path, StorePath> SrcToStore;
std::ostream & printValue(const EvalState & state, std::ostream & str, const Value & v);
std::string printValue(const EvalState & state, const Value & v);
std::ostream & operator << (std::ostream & os, const ValueType t);
@ -78,7 +77,7 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
struct DebugTrace {
std::optional<ErrPos> pos;
std::shared_ptr<AbstractPos> pos;
const Expr & expr;
const Env & env;
hintformat hint;
@ -87,6 +86,43 @@ struct DebugTrace {
void debugError(Error * e, Env & env, Expr & expr);
class ErrorBuilder
{
private:
EvalState & state;
ErrorInfo info;
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
public:
template<typename... Args>
[[nodiscard, gnu::noinline]]
static ErrorBuilder * create(EvalState & s, const Args & ... args)
{
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
}
[[nodiscard, gnu::noinline]]
ErrorBuilder & atPos(PosIdx pos);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withSuggestions(Suggestions & s);
[[nodiscard, gnu::noinline]]
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
template<class ErrorType>
[[gnu::noinline, gnu::noreturn]]
void debugThrow();
};
class EvalState : public std::enable_shared_from_this<EvalState>
{
public:
@ -146,29 +182,38 @@ public:
template<class E>
[[gnu::noinline, gnu::noreturn]]
void debugThrow(E && error, const Env & env, const Expr & expr)
void debugThrowLastTrace(E && error)
{
if (debugRepl)
runDebugRepl(&error, env, expr);
throw std::move(error);
debugThrow(error, nullptr, nullptr);
}
template<class E>
[[gnu::noinline, gnu::noreturn]]
void debugThrowLastTrace(E && e)
void debugThrow(E && error, const Env * env, const Expr * expr)
{
// Call this in the situation where Expr and Env are inaccessible.
// The debugger will start in the last context that's in the
// DebugTrace stack.
if (debugRepl && !debugTraces.empty()) {
const DebugTrace & last = debugTraces.front();
runDebugRepl(&e, last.env, last.expr);
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
if (!env || !expr) {
const DebugTrace & last = debugTraces.front();
env = &last.env;
expr = &last.expr;
}
runDebugRepl(&error, *env, *expr);
}
throw std::move(e);
throw std::move(error);
}
// This is dangerous, but gets in line with the idea that error creation and
// throwing should not allocate on the stack of hot functions.
// as long as errors are immediately thrown, it works.
ErrorBuilder * errorBuilder;
template<typename... Args>
[[nodiscard, gnu::noinline]]
ErrorBuilder & error(const Args & ... args) {
errorBuilder = ErrorBuilder::create(*this, args...);
return *errorBuilder;
}
private:
SrcToStore srcToStore;
@ -283,8 +328,8 @@ public:
/* Evaluation the expression, then verify that it has the expected
type. */
inline bool evalBool(Env & env, Expr * e);
inline bool evalBool(Env & env, Expr * e, const PosIdx pos);
inline void evalAttrs(Env & env, Expr * e, Value & v);
inline bool evalBool(Env & env, Expr * e, const PosIdx pos, std::string_view errorCtx);
inline void evalAttrs(Env & env, Expr * e, Value & v, const PosIdx pos, std::string_view errorCtx);
/* If `v' is a thunk, enter it and overwrite `v' with the result
of the evaluation of the thunk. If `v' is a delayed function
@ -300,89 +345,25 @@ public:
void forceValueDeep(Value & v);
/* Force `v', and then verify that it has the expected type. */
NixInt forceInt(Value & v, const PosIdx pos);
NixFloat forceFloat(Value & v, const PosIdx pos);
bool forceBool(Value & v, const PosIdx pos);
NixInt forceInt(Value & v, const PosIdx pos, std::string_view errorCtx);
NixFloat forceFloat(Value & v, const PosIdx pos, std::string_view errorCtx);
bool forceBool(Value & v, const PosIdx pos, std::string_view errorCtx);
void forceAttrs(Value & v, const PosIdx pos);
void forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx);
template <typename Callable>
inline void forceAttrs(Value & v, Callable getPos);
inline void forceAttrs(Value & v, Callable getPos, std::string_view errorCtx);
inline void forceList(Value & v, const PosIdx pos);
void forceFunction(Value & v, const PosIdx pos); // either lambda or primop
std::string_view forceString(Value & v, const PosIdx pos = noPos);
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos = noPos);
std::string_view forceStringNoCtx(Value & v, const PosIdx pos = noPos);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const char * s, const std::string & s2);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const char * s, const std::string & s2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const char * s, const std::string & s2, const std::string & s3,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const char * s, const std::string & s2, const std::string & s3);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const char * s, const std::string & s2, const std::string & s3);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx pos, const Suggestions & suggestions, const char * s, const std::string & s2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwEvalError(const PosIdx p1, const char * s, const Symbol sym, const PosIdx p2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const char * s, const Value & v);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const char * s, const Value & v,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const char * s);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const char * s,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const char * s, const ExprLambda & fun, const Symbol s2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const PosIdx pos, const Suggestions & suggestions, const char * s, const ExprLambda & fun, const Symbol s2,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwTypeError(const char * s, const Value & v,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwAssertionError(const PosIdx pos, const char * s, const std::string & s1,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwUndefinedVarError(const PosIdx pos, const char * s, const std::string & s1,
Env & env, Expr & expr);
[[gnu::noinline, gnu::noreturn]]
void throwMissingArgumentError(const PosIdx pos, const char * s, const std::string & s1,
Env & env, Expr & expr);
inline void forceList(Value & v, const PosIdx pos, std::string_view errorCtx);
void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx); // either lambda or primop
std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx);
std::string_view forceString(Value & v, PathSet & context, const PosIdx pos, std::string_view errorCtx);
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
[[gnu::noinline]]
void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
[[gnu::noinline]]
void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame = false) const;
public:
/* Return true iff the value `v' denotes a derivation (i.e. a
@ -397,18 +378,19 @@ public:
booleans and lists to a string. If `copyToStore' is set,
referenced paths are copied to the Nix store as a side effect. */
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
std::string_view errorCtx,
bool coerceMore = false, bool copyToStore = true,
bool canonicalizePath = true);
std::string copyPathToStore(PathSet & context, const Path & path);
StorePath copyPathToStore(PathSet & context, const Path & path);
/* Path coercion. Converts strings, paths and derivations to a
path. The result is guaranteed to be a canonicalised, absolute
path. Nothing is copied to the store. */
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context);
Path coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
/* Like coerceToPath, but the result must be a store path. */
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context);
StorePath coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx);
public:
@ -457,14 +439,18 @@ private:
friend struct ExprAttrs;
friend struct ExprLet;
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv);
Expr * parse(
char * text,
size_t length,
Pos::Origin origin,
Path basePath,
std::shared_ptr<StaticEnv> & staticEnv);
public:
/* Do a deep equality test between two values. That is, list
elements and attributes are compared recursively. */
bool eqValues(Value & v1, Value & v2);
bool eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx);
bool isFunctor(Value & fun);
@ -499,7 +485,7 @@ public:
void mkThunk_(Value & v, Expr * expr);
void mkPos(Value & v, PosIdx pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
/* Print statistics. */
void printStats();
@ -568,10 +554,6 @@ struct DebugTraceStacker {
std::string_view showType(ValueType type);
std::string showType(const Value & v);
/* Decode a context string !<name>!<path> into a pair <path,
name>. */
NixStringContextElem decodeContext(const Store & store, std::string_view s);
/* If `path' refers to a directory, then append "/default.nix". */
Path resolveExprPath(Path path);
@ -590,6 +572,10 @@ struct EvalSettings : Config
static Strings getDefaultNixPath();
static bool isPseudoUrl(std::string_view s);
static std::string resolvePseudoUrl(std::string_view url);
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
@ -662,6 +648,13 @@ extern EvalSettings evalSettings;
static const std::string corepkgsPrefix{"/__corepkgs__/"};
template<class ErrorType>
void ErrorBuilder::debugThrow()
{
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
state.debugThrowLastTrace(ErrorType(info));
}
}
#include "eval-inline.hh"

View file

@ -12,13 +12,13 @@
, executable ? false
, unpack ? false
, name ? baseNameOf (toString url)
, impure ? false
}:
derivation {
derivation ({
builder = "builtin:fetchurl";
# New-style output content requirements.
inherit outputHashAlgo outputHash;
outputHashMode = if unpack || executable then "recursive" else "flat";
inherit name url executable unpack;
@ -38,4 +38,6 @@ derivation {
# To make "nix-prefetch-url" work.
urls = [ url ];
}
} // (if impure
then { __impure = true; }
else { inherit outputHashAlgo outputHash; }))

View file

@ -43,7 +43,7 @@ let
outputs = flake.outputs (inputs // { self = result; });
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; };
in
if node.flake or true then
assert builtins.isFunction flake.outputs;

View file

@ -56,7 +56,7 @@ void ConfigFile::apply()
auto tlname = get(trustedList, name);
if (auto saved = tlname ? get(*tlname, valueS) : nullptr) {
trusted = *saved;
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
printInfo("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name, valueS);
} else {
// FIXME: filter ANSI escapes, newlines, \r, etc.
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
@ -68,7 +68,7 @@ void ConfigFile::apply()
}
}
if (!trusted) {
warn("ignoring untrusted flake configuration setting '%s'", name);
warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config");
continue;
}
}

View file

@ -143,7 +143,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
} catch (Error & e) {
e.addTrace(
state.positions[attr.pos],
hintfmt("in flake attribute '%s'", state.symbols[attr.name]));
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
throw;
}
}
@ -152,7 +152,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try {
input.ref = FlakeRef::fromAttrs(attrs);
} catch (Error & e) {
e.addTrace(state.positions[pos], hintfmt("in flake input"));
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
throw;
}
else {
@ -220,7 +220,7 @@ static Flake getFlake(
Value vInfo;
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile, foFile}, 0, 0));
expectType(state, nAttrs, vInfo, state.positions.add({flakeFile}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
@ -259,28 +259,28 @@ static Flake getFlake(
if (setting.value->type() == nString)
flake.config.settings.emplace(
state.symbols[setting.name],
std::string(state.forceStringNoCtx(*setting.value, setting.pos)));
std::string(state.forceStringNoCtx(*setting.value, setting.pos, "")));
else if (setting.value->type() == nPath) {
PathSet emptyContext = {};
flake.config.settings.emplace(
state.symbols[setting.name],
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
}
else if (setting.value->type() == nInt)
flake.config.settings.emplace(
state.symbols[setting.name],
state.forceInt(*setting.value, setting.pos));
state.forceInt(*setting.value, setting.pos, ""));
else if (setting.value->type() == nBool)
flake.config.settings.emplace(
state.symbols[setting.name],
Explicit<bool> { state.forceBool(*setting.value, setting.pos) });
Explicit<bool> { state.forceBool(*setting.value, setting.pos, "") });
else if (setting.value->type() == nList) {
std::vector<std::string> ss;
for (auto elem : setting.value->listItems()) {
if (elem->type() != nString)
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
state.symbols[setting.name], showType(*setting.value));
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos));
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
}
flake.config.settings.emplace(state.symbols[setting.name], ss);
}
@ -353,7 +353,7 @@ LockedFlake lockFlake(
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
ref<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath,
@ -362,9 +362,15 @@ LockedFlake lockFlake(
computeLocks;
computeLocks = [&](
/* The inputs of this node, either from flake.nix or
flake.lock. */
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
/* The node whose locks are to be updated.*/
ref<Node> node,
/* The path to this node in the lock file graph. */
const InputPath & inputPathPrefix,
/* The old node, if any, from which locks can be
copied. */
std::shared_ptr<const Node> oldNode,
const InputPath & lockRootPath,
const Path & parentPath,
@ -452,7 +458,7 @@ LockedFlake lockFlake(
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
higher level flake. */
auto childNode = std::make_shared<LockedNode>(
auto childNode = make_ref<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
node->inputs.insert_or_assign(id, childNode);
@ -481,14 +487,14 @@ LockedFlake lockFlake(
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
if (! trustLock) {
if (!trustLock) {
// It is possible that the flake has changed,
// so we must confirm all the follows that are in the lockfile are also in the flake.
// so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath);
overridePath.push_back(i.first);
auto o = overrides.find(overridePath);
// If the override disappeared, we have to refetch the flake,
// since some of the inputs may not be present in the lockfile.
// since some of the inputs may not be present in the lock file.
if (o == overrides.end()) {
mustRefetch = true;
// There's no point populating the rest of the fake inputs,
@ -521,8 +527,8 @@ LockedFlake lockFlake(
this input. */
debug("creating new input '%s'", inputPathS);
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (!lockFlags.allowUnlocked && !input.ref->input.isLocked())
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
@ -544,7 +550,7 @@ LockedFlake lockFlake(
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
auto childNode = std::make_shared<LockedNode>(inputFlake.lockedRef, ref);
auto childNode = make_ref<LockedNode>(inputFlake.lockedRef, ref);
node->inputs.insert_or_assign(id, childNode);
@ -564,15 +570,19 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
oldLock ? lockRootPath : inputPath, localPath, false);
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
oldLock ? lockRootPath : inputPath,
localPath,
false);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, ref, false));
auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
node->inputs.insert_or_assign(id, childNode);
}
}
@ -587,8 +597,13 @@ LockedFlake lockFlake(
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
computeLocks(
flake.inputs, newLockFile.root, {},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
flake.inputs,
newLockFile.root,
{},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root.get_ptr(),
{},
parentPath,
false);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
@ -611,9 +626,9 @@ LockedFlake lockFlake(
if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) {
if (!newLockFile.isImmutable()) {
if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
} else {
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
@ -726,7 +741,7 @@ void callFlake(EvalState & state,
static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos));
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.getFlake"));
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isLocked())
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, state.positions[pos]);
@ -737,7 +752,7 @@ static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, V
.updateLockFile = false,
.writeLockFile = false,
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
.allowMutable = !evalSettings.pureEval,
.allowUnlocked = !evalSettings.pureEval,
}),
v);
}

View file

@ -108,11 +108,11 @@ struct LockFlags
bool applyNixConfig = false;
/* Whether mutable flake references (i.e. those without a Git
/* Whether unlocked flake references (i.e. those without a Git
revision or similar) without a corresponding lock are
allowed. Mutable flake references with a lock are always
allowed. Unlocked flake references with a lock are always
allowed. */
bool allowMutable = true;
bool allowUnlocked = true;
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;

View file

@ -238,15 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
}
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
const std::string & url,
const std::optional<Path> & baseDir,
bool allowMissing,
bool isFlake)
{
auto [prefix, outputsSpec] = parseOutputsSpec(url);
auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
return {std::move(flakeRef), fragment, outputsSpec};
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
return {std::move(flakeRef), fragment, extendedOutputsSpec};
}
}

View file

@ -3,7 +3,7 @@
#include "types.hh"
#include "hash.hh"
#include "fetchers.hh"
#include "path-with-outputs.hh"
#include "outputs-spec.hh"
#include <variant>
@ -35,7 +35,7 @@ typedef std::string FlakeId;
struct FlakeRef
{
/* fetcher-specific representation of the input, sufficient to
/* Fetcher-specific representation of the input, sufficient to
perform the fetch operation. */
fetchers::Input input;
@ -80,7 +80,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir = {});
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
const std::string & url,
const std::optional<Path> & baseDir = {},
bool allowMissing = false,

View file

@ -31,12 +31,12 @@ FlakeRef getFlakeRef(
}
LockedNode::LockedNode(const nlohmann::json & json)
: lockedRef(getFlakeRef(json, "locked", "info"))
: lockedRef(getFlakeRef(json, "locked", "info")) // FIXME: remove "info"
, originalRef(getFlakeRef(json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
{
if (!lockedRef.input.isLocked())
throw Error("lockfile contains mutable lock '%s'",
throw Error("lock file contains mutable lock '%s'",
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
}
@ -49,15 +49,15 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
auto pos = root;
if (!pos) return {};
for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i))
pos = *node;
else if (auto follows = std::get_if<1>(&*i)) {
pos = findInput(*follows);
if (!pos) return {};
if (auto p = findInput(*follows))
pos = ref(p);
else
return {};
}
} else
return {};
@ -72,7 +72,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version);
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
std::map<std::string, ref<Node>> nodeMap;
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
@ -93,12 +93,12 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
auto jsonNode2 = nodes.find(inputKey);
if (jsonNode2 == nodes.end())
throw Error("lock file references missing node '%s'", inputKey);
auto input = std::make_shared<LockedNode>(*jsonNode2);
auto input = make_ref<LockedNode>(*jsonNode2);
k = nodeMap.insert_or_assign(inputKey, input).first;
getInputs(*input, *jsonNode2);
}
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
node.inputs.insert_or_assign(i.key(), child);
if (auto child = k->second.dynamic_pointer_cast<LockedNode>())
node.inputs.insert_or_assign(i.key(), ref(child));
else
// FIXME: replace by follows node
throw Error("lock file contains cycle to root node");
@ -122,9 +122,9 @@ nlohmann::json LockFile::toJSON() const
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
std::unordered_set<std::string> keys;
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
std::function<std::string(const std::string & key, ref<const Node> node)> dumpNode;
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
dumpNode = [&](std::string key, ref<const Node> node) -> std::string
{
auto k = nodeKeys.find(node);
if (k != nodeKeys.end())
@ -159,10 +159,11 @@ nlohmann::json LockFile::toJSON() const
n["inputs"] = std::move(inputs);
}
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
if (!lockedNode->isFlake) n["flake"] = false;
if (!lockedNode->isFlake)
n["flake"] = false;
}
nodes[key] = std::move(n);
@ -201,13 +202,13 @@ void LockFile::write(const Path & path) const
writeFile(path, fmt("%s\n", *this));
}
bool LockFile::isImmutable() const
std::optional<FlakeRef> LockFile::isUnlocked() const
{
std::unordered_set<std::shared_ptr<const Node>> nodes;
std::set<ref<const Node>> nodes;
std::function<void(std::shared_ptr<const Node> node)> visit;
std::function<void(ref<const Node> node)> visit;
visit = [&](std::shared_ptr<const Node> node)
visit = [&](ref<const Node> node)
{
if (!nodes.insert(node).second) return;
for (auto & i : node->inputs)
@ -219,11 +220,12 @@ bool LockFile::isImmutable() const
for (auto & i : nodes) {
if (i == root) continue;
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
auto node = i.dynamic_pointer_cast<const LockedNode>();
if (node && !node->lockedRef.input.isLocked())
return node->lockedRef;
}
return true;
return {};
}
bool LockFile::operator ==(const LockFile & other) const
@ -247,12 +249,12 @@ InputPath parseInputPath(std::string_view s)
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
{
std::unordered_set<std::shared_ptr<Node>> done;
std::set<ref<Node>> done;
std::map<InputPath, Node::Edge> res;
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
recurse = [&](const InputPath & prefix, ref<Node> node)
{
if (!done.insert(node).second) return;

View file

@ -20,7 +20,7 @@ struct LockedNode;
type LockedNode. */
struct Node : std::enable_shared_from_this<Node>
{
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
typedef std::variant<ref<LockedNode>, InputPath> Edge;
std::map<FlakeId, Edge> inputs;
@ -47,11 +47,13 @@ struct LockedNode : Node
struct LockFile
{
std::shared_ptr<Node> root = std::make_shared<Node>();
ref<Node> root = make_ref<Node>();
LockFile() {};
LockFile(const nlohmann::json & json, const Path & path);
typedef std::map<ref<const Node>, std::string> KeyMap;
nlohmann::json toJSON() const;
std::string to_string() const;
@ -60,7 +62,8 @@ struct LockFile
void write(const Path & path) const;
bool isImmutable() const;
/* Check whether this lock file has any unlocked inputs. */
std::optional<FlakeRef> isUnlocked() const;
bool operator ==(const LockFile & other) const;

View file

@ -51,7 +51,7 @@ std::string DrvInfo::queryName() const
if (name == "" && attrs) {
auto i = attrs->find(state->sName);
if (i == attrs->end()) throw TypeError("derivation name missing");
name = state->forceStringNoCtx(*i->value);
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
}
return name;
}
@ -61,7 +61,7 @@ std::string DrvInfo::querySystem() const
{
if (system == "" && attrs) {
auto i = attrs->find(state->sSystem);
system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos);
system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation");
}
return system;
}
@ -75,7 +75,7 @@ std::optional<StorePath> DrvInfo::queryDrvPath() const
if (i == attrs->end())
drvPath = {std::nullopt};
else
drvPath = {state->coerceToStorePath(i->pos, *i->value, context)};
drvPath = {state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation")};
}
return drvPath.value_or(std::nullopt);
}
@ -95,7 +95,7 @@ StorePath DrvInfo::queryOutPath() const
Bindings::iterator i = attrs->find(state->sOutPath);
PathSet context;
if (i != attrs->end())
outPath = state->coerceToStorePath(i->pos, *i->value, context);
outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation");
}
if (!outPath)
throw UnimplementedError("CA derivations are not yet supported");
@ -109,23 +109,23 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
/* Get the outputs list. */
Bindings::iterator i;
if (attrs && (i = attrs->find(state->sOutputs)) != attrs->end()) {
state->forceList(*i->value, i->pos);
state->forceList(*i->value, i->pos, "while evaluating the 'outputs' attribute of a derivation");
/* For each output... */
for (auto elem : i->value->listItems()) {
std::string output(state->forceStringNoCtx(*elem, i->pos));
std::string output(state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation"));
if (withPaths) {
/* Evaluate the corresponding set. */
Bindings::iterator out = attrs->find(state->symbols.create(output));
if (out == attrs->end()) continue; // FIXME: throw error?
state->forceAttrs(*out->value, i->pos);
state->forceAttrs(*out->value, i->pos, "while evaluating an output of a derivation");
/* And evaluate its outPath attribute. */
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
if (outPath == out->value->attrs->end()) continue; // FIXME: throw error?
PathSet context;
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context));
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation"));
} else
outputs.emplace(output, std::nullopt);
}
@ -137,7 +137,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
return outputs;
Bindings::iterator i;
if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos)) {
if (attrs && (i = attrs->find(state->sOutputSpecified)) != attrs->end() && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) {
Outputs result;
auto out = outputs.find(queryOutputName());
if (out == outputs.end())
@ -150,7 +150,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
const Value * outTI = queryMeta("outputsToInstall");
if (!outTI) return outputs;
const auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
/* ^ this shows during `nix-env -i` right under the bad derivation */
if (!outTI->isList()) throw errMsg;
Outputs result;
@ -169,7 +169,7 @@ std::string DrvInfo::queryOutputName() const
{
if (outputName == "" && attrs) {
Bindings::iterator i = attrs->find(state->sOutputName);
outputName = i != attrs->end() ? state->forceStringNoCtx(*i->value) : "";
outputName = i != attrs->end() ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : "";
}
return outputName;
}
@ -181,7 +181,7 @@ Bindings * DrvInfo::getMeta()
if (!attrs) return 0;
Bindings::iterator a = attrs->find(state->sMeta);
if (a == attrs->end()) return 0;
state->forceAttrs(*a->value, a->pos);
state->forceAttrs(*a->value, a->pos, "while evaluating the 'meta' attribute of a derivation");
meta = a->value->attrs;
return meta;
}
@ -382,7 +382,7 @@ static void getDerivations(EvalState & state, Value & vIn,
`recurseForDerivations = true' attribute. */
if (i->value->type() == nAttrs) {
Bindings::iterator j = i->value->attrs->find(state.sRecurseForDerivations);
if (j != i->value->attrs->end() && state.forceBool(*j->value, j->pos))
if (j != i->value->attrs->end() && state.forceBool(*j->value, j->pos, "while evaluating the attribute `recurseForDerivations`"))
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
}
}

View file

@ -6,6 +6,7 @@ libexpr_DIR := $(d)
libexpr_SOURCES := \
$(wildcard $(d)/*.cc) \
$(wildcard $(d)/value/*.cc) \
$(wildcard $(d)/primops/*.cc) \
$(wildcard $(d)/flake/*.cc) \
$(d)/lexer-tab.cc \
@ -37,6 +38,8 @@ clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexe
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))

View file

@ -8,6 +8,58 @@
namespace nix {
struct PosAdapter : AbstractPos
{
Pos::Origin origin;
PosAdapter(Pos::Origin origin)
: origin(std::move(origin))
{
}
std::optional<std::string> getSource() const override
{
return std::visit(overloaded {
[](const Pos::none_tag &) -> std::optional<std::string> {
return std::nullopt;
},
[](const Pos::Stdin & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const Pos::String & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const Path & path) -> std::optional<std::string> {
try {
return readFile(path);
} catch (Error &) {
return std::nullopt;
}
}
}, origin);
}
void print(std::ostream & out) const override
{
std::visit(overloaded {
[&](const Pos::none_tag &) { out << "«none»"; },
[&](const Pos::Stdin &) { out << "«stdin»"; },
[&](const Pos::String & s) { out << "«string»"; },
[&](const Path & path) { out << path; }
}, origin);
}
};
Pos::operator std::shared_ptr<AbstractPos>() const
{
auto pos = std::make_shared<PosAdapter>(origin);
pos->line = line;
pos->column = column;
return pos;
}
/* Displaying abstract syntax trees. */
static void showString(std::ostream & str, std::string_view s)
@ -248,24 +300,10 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
std::ostream & operator << (std::ostream & str, const Pos & pos)
{
if (!pos)
if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
str << *pos2;
} else
str << "undefined position";
else
{
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
switch (pos.origin) {
case foFile:
f % (const std::string &) pos.file;
break;
case foStdin:
case foString:
f % "(string)";
break;
default:
throw Error("unhandled Pos origin!");
}
str << (f % pos.line % pos.column).str();
}
return str;
}
@ -289,7 +327,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
}
/* Computing levels/displacements for variables. */
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)

View file

@ -8,7 +8,6 @@
#include "error.hh"
#include "chunked-vector.hh"
namespace nix {
@ -23,15 +22,22 @@ MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
/* Position objects. */
struct Pos
{
std::string file;
FileOrigin origin;
uint32_t line;
uint32_t column;
struct none_tag { };
struct Stdin { ref<std::string> source; };
struct String { ref<std::string> source; };
typedef std::variant<none_tag, Stdin, String, Path> Origin;
Origin origin;
explicit operator bool() const { return line > 0; }
operator std::shared_ptr<AbstractPos>() const;
};
class PosIdx {
@ -47,7 +53,11 @@ public:
explicit operator bool() const { return id > 0; }
bool operator<(const PosIdx other) const { return id < other.id; }
bool operator <(const PosIdx other) const { return id < other.id; }
bool operator ==(const PosIdx other) const { return id == other.id; }
bool operator !=(const PosIdx other) const { return id != other.id; }
};
class PosTable
@ -61,13 +71,13 @@ public:
// current origins.back() can be reused or not.
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
explicit Origin(uint32_t idx): idx(idx), file{}, origin{} {}
// Used for searching in PosTable::[].
explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
public:
const std::string file;
const FileOrigin origin;
const Pos::Origin origin;
Origin(std::string file, FileOrigin origin): file(std::move(file)), origin(origin) {}
Origin(Pos::Origin origin): origin(origin) {}
};
struct Offset {
@ -107,7 +117,7 @@ public:
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
const auto origin = *std::prev(pastOrigin);
const auto offset = offsets[idx];
return {origin.file, origin.origin, offset.line, offset.column};
return {offset.line, offset.column, origin.origin};
}
};

View file

@ -34,11 +34,6 @@ namespace nix {
Path basePath;
PosTable::Origin origin;
std::optional<ErrorInfo> error;
ParseData(EvalState & state, PosTable::Origin origin)
: state(state)
, symbols(state.symbols)
, origin(std::move(origin))
{ };
};
struct ParserFormals {
@ -405,21 +400,21 @@ expr_op
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
| expr_op '<' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
| expr_op '>' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
| expr_op AND expr_op { $$ = new ExprOpAnd(CUR_POS, $1, $3); }
| expr_op OR expr_op { $$ = new ExprOpOr(CUR_POS, $1, $3); }
| expr_op IMPL expr_op { $$ = new ExprOpImpl(CUR_POS, $1, $3); }
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(CUR_POS, $1, $3); }
| expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
| expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
| expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); }
| expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); }
| expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); }
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); }
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
| expr_op '+' expr_op
{ $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<PosIdx, Expr *>>({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
| expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
| expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
| expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); }
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(CUR_POS, $1, $3); }
{ $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector<std::pair<PosIdx, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
| expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
| expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
| expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); }
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); }
| expr_app
;
@ -643,29 +638,26 @@ formal
#include "filetransfer.hh"
#include "fetchers.hh"
#include "store-api.hh"
#include "flake/flake.hh"
namespace nix {
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
Expr * EvalState::parse(
char * text,
size_t length,
Pos::Origin origin,
Path basePath,
std::shared_ptr<StaticEnv> & staticEnv)
{
yyscan_t scanner;
std::string file;
switch (origin) {
case foFile:
file = path;
break;
case foStdin:
case foString:
file = text;
break;
default:
assert(false);
}
ParseData data(*this, {file, origin});
data.basePath = basePath;
ParseData data {
.state = *this,
.symbols = symbols,
.basePath = std::move(basePath),
.origin = {origin},
};
yylex_init(&scanner);
yy_scan_buffer(text, length, scanner);
@ -717,14 +709,15 @@ Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv
auto buffer = readFile(path);
// readFile should have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
return parse(buffer.data(), buffer.size(), path, dirOf(path), staticEnv);
}
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
Expr * EvalState::parseExprFromString(std::string s_, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{
s.append("\0\0", 2);
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
auto s = make_ref<std::string>(std::move(s_));
s->append("\0\0", 2);
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
}
@ -740,7 +733,8 @@ Expr * EvalState::parseStdin()
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
auto s = make_ref<std::string>(std::move(buffer));
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, absPath("."), staticBaseEnv);
}
@ -788,13 +782,13 @@ Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, c
if (hasPrefix(path, "nix/"))
return concatStrings(corepkgsPrefix, path.substr(4));
debugThrowLastTrace(ThrownError({
debugThrow(ThrownError({
.msg = hintfmt(evalSettings.pureEval
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
path),
.errPos = positions[pos]
}));
}), 0, 0);
}
@ -805,17 +799,28 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
std::pair<bool, std::string> res;
if (isUri(elem.second)) {
if (EvalSettings::isPseudoUrl(elem.second)) {
try {
res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).first.storePath) };
auto storePath = fetchers::downloadTarball(
store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).first.storePath;
res = { true, store->toRealPath(storePath) };
} catch (FileTransferError & e) {
logWarning({
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
});
res = { false, "" };
}
} else {
}
else if (hasPrefix(elem.second, "flake:")) {
settings.requireExperimentalFeature(Xp::Flakes);
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", elem.second);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
res = { true, store->toRealPath(storePath) };
}
else {
auto path = absPath(elem.second);
if (pathExists(path))
res = { true, path };

File diff suppressed because it is too large Load diff

View file

@ -8,7 +8,7 @@ namespace nix {
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
PathSet context;
auto s = state.coerceToString(pos, *args[0], context);
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext");
v.mkString(*s);
}
@ -18,7 +18,7 @@ static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringCo
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
PathSet context;
state.forceString(*args[0], context, pos);
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext");
v.mkBool(!context.empty());
}
@ -34,11 +34,18 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
PathSet context;
auto s = state.coerceToString(pos, *args[0], context);
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
PathSet context2;
for (auto & p : context)
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
for (auto && p : context) {
auto c = NixStringContextElem::parse(*state.store, p);
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
context2.emplace(state.store->printStorePath(ptr->drvPath));
} else {
/* Can reuse original item */
context2.emplace(std::move(p));
}
}
v.mkString(*s, context2);
}
@ -73,35 +80,21 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
Strings outputs;
};
PathSet context;
state.forceString(*args[0], context, pos);
auto contextInfos = std::map<Path, ContextInfo>();
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
auto contextInfos = std::map<StorePath, ContextInfo>();
for (const auto & p : context) {
Path drv;
std::string output;
const Path * path = &p;
if (p.at(0) == '=') {
drv = std::string(p, 1);
path = &drv;
} else if (p.at(0) == '!') {
NixStringContextElem ctx = decodeContext(*state.store, p);
drv = state.store->printStorePath(ctx.first);
output = ctx.second;
path = &drv;
}
auto isPath = drv.empty();
auto isAllOutputs = (!drv.empty()) && output.empty();
auto iter = contextInfos.find(*path);
if (iter == contextInfos.end()) {
contextInfos.emplace(*path, ContextInfo{isPath, isAllOutputs, output.empty() ? Strings{} : Strings{std::move(output)}});
} else {
if (isPath)
iter->second.path = true;
else if (isAllOutputs)
iter->second.allOutputs = true;
else
iter->second.outputs.emplace_back(std::move(output));
}
NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p);
std::visit(overloaded {
[&](NixStringContextElem::DrvDeep & d) {
contextInfos[d.drvPath].allOutputs = true;
},
[&](NixStringContextElem::Built & b) {
contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output));
},
[&](NixStringContextElem::Opaque & o) {
contextInfos[o.path].path = true;
},
}, ctx.raw());
}
auto attrs = state.buildBindings(contextInfos.size());
@ -120,7 +113,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
for (const auto & [i, output] : enumerate(info.second.outputs))
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
}
attrs.alloc(info.first).mkAttrs(infoAttrs);
attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs);
}
v.mkAttrs(attrs);
@ -137,9 +130,9 @@ static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
PathSet context;
auto orig = state.forceString(*args[0], context, pos);
auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext");
state.forceAttrs(*args[1], pos);
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext");
auto sPath = state.symbols.create("path");
auto sAllOutputs = state.symbols.create("allOutputs");
@ -147,24 +140,24 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
const auto & name = state.symbols[i.name];
if (!state.store->isStorePath(name))
throw EvalError({
.msg = hintfmt("Context key '%s' is not a store path", name),
.msg = hintfmt("context key '%s' is not a store path", name),
.errPos = state.positions[i.pos]
});
if (!settings.readOnlyMode)
state.store->ensurePath(state.store->parseStorePath(name));
state.forceAttrs(*i.value, i.pos);
state.forceAttrs(*i.value, i.pos, "while evaluating the value of a string context");
auto iter = i.value->attrs->find(sPath);
if (iter != i.value->attrs->end()) {
if (state.forceBool(*iter->value, iter->pos))
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `path` attribute of a string context"))
context.emplace(name);
}
iter = i.value->attrs->find(sAllOutputs);
if (iter != i.value->attrs->end()) {
if (state.forceBool(*iter->value, iter->pos)) {
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
if (!isDerivation(name)) {
throw EvalError({
.msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", name),
.msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name),
.errPos = state.positions[i.pos]
});
}
@ -174,15 +167,15 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
iter = i.value->attrs->find(state.sOutputs);
if (iter != i.value->attrs->end()) {
state.forceList(*iter->value, iter->pos);
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
if (iter->value->listSize() && !isDerivation(name)) {
throw EvalError({
.msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", name),
.msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name),
.errPos = state.positions[i.pos]
});
}
for (auto elem : iter->value->listItems()) {
auto outputName = state.forceStringNoCtx(*elem, iter->pos);
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
context.insert(concatStrings("!", outputName, "!", name));
}
}

View file

@ -7,7 +7,7 @@ namespace nix {
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure");
std::optional<std::string> fromStoreUrl;
std::optional<StorePath> fromPath;
@ -19,7 +19,8 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
if (attrName == "fromPath") {
PathSet context;
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context);
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context,
"while evaluating the 'fromPath' attribute passed to builtins.fetchClosure");
}
else if (attrName == "toPath") {
@ -27,12 +28,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
toCA = true;
if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
PathSet context;
toPath = state.coerceToStorePath(attr.pos, *attr.value, context);
toPath = state.coerceToStorePath(attr.pos, *attr.value, context,
"while evaluating the 'toPath' attribute passed to builtins.fetchClosure");
}
}
else if (attrName == "fromStore")
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos);
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos,
"while evaluating the 'fromStore' attribute passed to builtins.fetchClosure");
else
throw Error({

View file

@ -19,23 +19,23 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
if (args[0]->type() == nAttrs) {
state.forceAttrs(*args[0], pos);
for (auto & attr : *args[0]->attrs) {
std::string_view n(state.symbols[attr.name]);
if (n == "url")
url = state.coerceToString(attr.pos, *attr.value, context, false, false).toOwned();
url = state.coerceToString(attr.pos, *attr.value, context,
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
false, false).toOwned();
else if (n == "rev") {
// Ugly: unlike fetchGit, here the "rev" attribute can
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos);
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex))
rev = Hash::parseAny(value, htSHA1);
else
ref = value;
}
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos);
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
else
throw EvalError({
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
@ -50,7 +50,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
});
} else
url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
url = state.coerceToString(pos, *args[0], context,
"while evaluating the first argument passed to builtins.fetchMercurial",
false, false).toOwned();
// FIXME: git externals probably can be used to bypass the URI
// whitelist. Ah well.

Some files were not shown because too many files have changed in this diff Show more