forked from lix-project/lix
Merge remote-tracking branch 'origin/master' into coerce-string
This commit is contained in:
commit
3f9f6ae127
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -2,7 +2,7 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: improvement
|
labels: feature
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
28
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
---
|
||||||
|
name: Missing or incorrect documentation
|
||||||
|
about: Help us improve the reference manual
|
||||||
|
title: ''
|
||||||
|
labels: documentation
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
<!-- describe your problem -->
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
<!-- make sure this issue is not redundant or obsolete -->
|
||||||
|
|
||||||
|
- [ ] checked [latest Nix manual] \([source])
|
||||||
|
- [ ] checked [open documentation issues and pull requests] for possible duplicates
|
||||||
|
|
||||||
|
[latest Nix manual]: https://nixos.org/manual/nix/unstable/
|
||||||
|
[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src
|
||||||
|
[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
|
||||||
|
|
||||||
|
## Proposal
|
||||||
|
|
||||||
|
<!-- propose a solution -->
|
||||||
|
|
9
.github/workflows/ci.yml
vendored
9
.github/workflows/ci.yml
vendored
|
@ -81,7 +81,14 @@ jobs:
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
- run: sudo apt install fish zsh
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
- run: brew install fish
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
|
||||||
docker_push_image:
|
docker_push_image:
|
||||||
needs: [check_secrets, tests]
|
needs: [check_secrets, tests]
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -27,6 +27,8 @@ perl/Makefile.config
|
||||||
# /scripts/
|
# /scripts/
|
||||||
/scripts/nix-profile.sh
|
/scripts/nix-profile.sh
|
||||||
/scripts/nix-profile-daemon.sh
|
/scripts/nix-profile-daemon.sh
|
||||||
|
/scripts/nix-profile.fish
|
||||||
|
/scripts/nix-profile-daemon.fish
|
||||||
|
|
||||||
# /src/libexpr/
|
# /src/libexpr/
|
||||||
/src/libexpr/lexer-tab.cc
|
/src/libexpr/lexer-tab.cc
|
||||||
|
|
|
@ -1,97 +1,114 @@
|
||||||
{ command }:
|
{ toplevel }:
|
||||||
|
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import ./utils.nix;
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
showCommand =
|
showCommand = { command, details, filename, toplevel }:
|
||||||
{ command, def, filename }:
|
|
||||||
''
|
|
||||||
**Warning**: This program is **experimental** and its interface is subject to change.
|
|
||||||
''
|
|
||||||
+ "# Name\n\n"
|
|
||||||
+ "`${command}` - ${def.description}\n\n"
|
|
||||||
+ "# Synopsis\n\n"
|
|
||||||
+ showSynopsis { inherit command; args = def.args; }
|
|
||||||
+ (if def.commands or {} != {}
|
|
||||||
then
|
|
||||||
let
|
let
|
||||||
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
|
result = ''
|
||||||
listCommands = cmds:
|
> **Warning** \
|
||||||
concatStrings (map (name:
|
> This program is **experimental** and its interface is subject to change.
|
||||||
"* "
|
|
||||||
+ "[`${command} ${name}`](./${appendName filename name}.md)"
|
# Name
|
||||||
+ " - ${cmds.${name}.description}\n")
|
|
||||||
(attrNames cmds));
|
`${command}` - ${details.description}
|
||||||
in
|
|
||||||
"where *subcommand* is one of the following:\n\n"
|
# Synopsis
|
||||||
# FIXME: group by category
|
|
||||||
+ (if length categories > 1
|
${showSynopsis command details.args}
|
||||||
then
|
|
||||||
concatStrings (map
|
${maybeSubcommands}
|
||||||
(cat:
|
|
||||||
"**${toString cat.description}:**\n\n"
|
${maybeDocumentation}
|
||||||
+ listCommands (filterAttrs (n: v: v.category == cat) def.commands)
|
|
||||||
+ "\n"
|
${maybeOptions}
|
||||||
) categories)
|
'';
|
||||||
+ "\n"
|
showSynopsis = command: args:
|
||||||
else
|
let
|
||||||
listCommands def.commands
|
showArgument = arg: "*${arg.label}*" + (if arg ? arity then "" else "...");
|
||||||
+ "\n")
|
arguments = concatStringsSep " " (map showArgument args);
|
||||||
else "")
|
in ''
|
||||||
+ (if def ? doc
|
`${command}` [*option*...] ${arguments}
|
||||||
then def.doc + "\n\n"
|
'';
|
||||||
else "")
|
maybeSubcommands = if details ? commands && details.commands != {}
|
||||||
+ (let s = showOptions def.flags; in
|
then ''
|
||||||
if s != ""
|
where *subcommand* is one of the following:
|
||||||
then "# Options\n\n${s}"
|
|
||||||
else "")
|
${subcommands}
|
||||||
;
|
''
|
||||||
|
else "";
|
||||||
|
subcommands = if length categories > 1
|
||||||
|
then listCategories
|
||||||
|
else listSubcommands details.commands;
|
||||||
|
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
|
||||||
|
listCategories = concatStrings (map showCategory categories);
|
||||||
|
showCategory = cat: ''
|
||||||
|
**${toString cat.description}:**
|
||||||
|
|
||||||
|
${listSubcommands (filterAttrs (n: v: v.category == cat) details.commands)}
|
||||||
|
'';
|
||||||
|
listSubcommands = cmds: concatStrings (attrValues (mapAttrs showSubcommand cmds));
|
||||||
|
showSubcommand = name: subcmd: ''
|
||||||
|
* [`${command} ${name}`](./${appendName filename name}.md) - ${subcmd.description}
|
||||||
|
'';
|
||||||
|
maybeDocumentation = if details ? doc then details.doc else "";
|
||||||
|
maybeOptions = if details.flags == {} then "" else ''
|
||||||
|
# Options
|
||||||
|
|
||||||
|
${showOptions details.flags toplevel.flags}
|
||||||
|
'';
|
||||||
|
showOptions = options: commonOptions:
|
||||||
|
let
|
||||||
|
allOptions = options // commonOptions;
|
||||||
|
showCategory = cat: ''
|
||||||
|
${if cat != "" then "**${cat}:**" else ""}
|
||||||
|
|
||||||
|
${listOptions (filterAttrs (n: v: v.category == cat) allOptions)}
|
||||||
|
'';
|
||||||
|
listOptions = opts: concatStringsSep "\n" (attrValues (mapAttrs showOption opts));
|
||||||
|
showOption = name: option:
|
||||||
|
let
|
||||||
|
shortName = if option ? shortName then "/ `-${option.shortName}`" else "";
|
||||||
|
labels = if option ? labels then (concatStringsSep " " (map (s: "*${s}*") option.labels)) else "";
|
||||||
|
in trim ''
|
||||||
|
- `--${name}` ${shortName} ${labels}
|
||||||
|
|
||||||
|
${option.description}
|
||||||
|
'';
|
||||||
|
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues allOptions)));
|
||||||
|
in concatStrings (map showCategory categories);
|
||||||
|
in squash result;
|
||||||
|
|
||||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||||
|
|
||||||
showOptions = flags:
|
processCommand = { command, details, filename, toplevel }:
|
||||||
let
|
let
|
||||||
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags)));
|
cmd = {
|
||||||
in
|
inherit command;
|
||||||
concatStrings (map
|
name = filename + ".md";
|
||||||
(cat:
|
value = showCommand { inherit command details filename toplevel; };
|
||||||
(if cat != ""
|
};
|
||||||
then "**${cat}:**\n\n"
|
subcommand = subCmd: processCommand {
|
||||||
else "")
|
command = command + " " + subCmd;
|
||||||
+ concatStrings
|
details = details.commands.${subCmd};
|
||||||
(map (longName:
|
filename = appendName filename subCmd;
|
||||||
let
|
inherit toplevel;
|
||||||
flag = flags.${longName};
|
};
|
||||||
in
|
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
|
||||||
" - `--${longName}`"
|
|
||||||
+ (if flag ? shortName then " / `-${flag.shortName}`" else "")
|
|
||||||
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
|
|
||||||
+ " \n"
|
|
||||||
+ " " + flag.description + "\n\n"
|
|
||||||
) (attrNames (filterAttrs (n: v: v.category == cat) flags))))
|
|
||||||
categories);
|
|
||||||
|
|
||||||
showSynopsis =
|
parsedToplevel = builtins.fromJSON toplevel;
|
||||||
{ command, args }:
|
manpages = processCommand {
|
||||||
"`${command}` [*option*...] ${concatStringsSep " "
|
command = "nix";
|
||||||
(map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
|
details = parsedToplevel;
|
||||||
|
filename = "nix";
|
||||||
|
toplevel = parsedToplevel;
|
||||||
|
};
|
||||||
|
|
||||||
processCommand = { command, def, filename }:
|
tableOfContents = let
|
||||||
[ { name = filename + ".md"; value = showCommand { inherit command def filename; }; inherit command; } ]
|
showEntry = page:
|
||||||
++ concatMap
|
" - [${page.command}](command-ref/new-cli/${page.name})";
|
||||||
(name: processCommand {
|
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||||
filename = appendName filename name;
|
|
||||||
command = command + " " + name;
|
|
||||||
def = def.commands.${name};
|
|
||||||
})
|
|
||||||
(attrNames def.commands or {});
|
|
||||||
|
|
||||||
in
|
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||||
|
|
||||||
let
|
|
||||||
manpages = processCommand { filename = "nix"; command = "nix"; def = builtins.fromJSON command; };
|
|
||||||
summary = concatStrings (map (manpage: " - [${manpage.command}](command-ref/new-cli/${manpage.name})\n") manpages);
|
|
||||||
in
|
|
||||||
(listToAttrs manpages) // { "SUMMARY.md" = summary; }
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||||
@rm -rf $@
|
@rm -rf $@
|
||||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
|
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
|
|
|
@ -59,14 +59,6 @@
|
||||||
@manpages@
|
@manpages@
|
||||||
- [Files](command-ref/files.md)
|
- [Files](command-ref/files.md)
|
||||||
- [nix.conf](command-ref/conf-file.md)
|
- [nix.conf](command-ref/conf-file.md)
|
||||||
<!--
|
|
||||||
- [Architecture](architecture/architecture.md)
|
|
||||||
- [Store](architecture/store/store.md)
|
|
||||||
- [Closure](architecture/store/store/closure.md)
|
|
||||||
- [Build system terminology](architecture/store/store/build-system-terminology.md)
|
|
||||||
- [Store Path](architecture/store/path.md)
|
|
||||||
- [File System Object](architecture/store/fso.md)
|
|
||||||
-->
|
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
# Architecture
|
|
||||||
|
|
||||||
*(This chapter is unstable and a work in progress. Incoming links may rot.)*
|
|
||||||
|
|
||||||
This chapter describes how Nix works.
|
|
||||||
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Nix consists of [hierarchical layers][layer-architecture].
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
|
||||||
| [ commmand line interface ]------, |
|
|
||||||
| | | |
|
|
||||||
| evaluates | |
|
|
||||||
| | manages |
|
|
||||||
| V | |
|
|
||||||
| [ configuration language ] | |
|
|
||||||
| | | |
|
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
|
||||||
| | store evaluates to | |
|
|
||||||
| | | | |
|
|
||||||
| | referenced by V builds | |
|
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
|
||||||
| | | |
|
|
||||||
| +-------------------------------------------------------------+ |
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
At the top is the [command line interface](../command-ref/command-ref.md), translating from invocations of Nix executables to interactions with the underlying layers.
|
|
||||||
|
|
||||||
Below that is the [Nix expression language](../expressions/expression-language.md), a [purely functional][purely-functional-programming] configuration language.
|
|
||||||
It is used to compose expressions which ultimately evaluate to self-contained *build plans*, used to derive *build results* from referenced *build inputs*.
|
|
||||||
|
|
||||||
The command line and Nix language are what users interact with most.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
|
||||||
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
|
||||||
|
|
||||||
Underlying these is the [Nix store](./store/store.md), a mechanism to keep track of build plans, data, and references between them.
|
|
||||||
It can also execute build plans to produce new data.
|
|
||||||
|
|
||||||
A build plan is a series of *build tasks*.
|
|
||||||
Each build task has a special build input which is used as *build instructions*.
|
|
||||||
The result of a build task can be input to another build task.
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------------------------------+
|
|
||||||
| store |
|
|
||||||
| ................................................. |
|
|
||||||
| : build plan : |
|
|
||||||
| : : |
|
|
||||||
| [ build input ]-----instructions-, : |
|
|
||||||
| : | : |
|
|
||||||
| : v : |
|
|
||||||
| [ build input ]----------->[ build task ]--instructions-, : |
|
|
||||||
| : | : |
|
|
||||||
| : | : |
|
|
||||||
| : v : |
|
|
||||||
| : [ build task ]----->[ build result ] |
|
|
||||||
| [ build input ]-----instructions-, ^ : |
|
|
||||||
| : | | : |
|
|
||||||
| : v | : |
|
|
||||||
| [ build input ]----------->[ build task ]---------------' : |
|
|
||||||
| : ^ : |
|
|
||||||
| : | : |
|
|
||||||
| [ build input ]------------------' : |
|
|
||||||
| : : |
|
|
||||||
| : : |
|
|
||||||
| :...............................................: |
|
|
||||||
| |
|
|
||||||
+-----------------------------------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
[layer-architecture]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
|
|
||||||
[purely-functional-programming]: https://en.m.wikipedia.org/wiki/Purely_functional_programming
|
|
|
@ -1,69 +0,0 @@
|
||||||
# File System Object
|
|
||||||
|
|
||||||
The Nix store uses a simple file system model for the data it holds in [store objects](store.md#store-object).
|
|
||||||
|
|
||||||
Every file system object is one of the following:
|
|
||||||
|
|
||||||
- File: an executable flag, and arbitrary data for contents
|
|
||||||
- Directory: mapping of names to child file system objects
|
|
||||||
- [Symbolic link][symlink]: may point anywhere.
|
|
||||||
|
|
||||||
We call a store object's outermost file system object the *root*.
|
|
||||||
|
|
||||||
data FileSystemObject
|
|
||||||
= File { isExecutable :: Bool, contents :: Bytes }
|
|
||||||
| Directory { entries :: Map FileName FileSystemObject }
|
|
||||||
| SymLink { target :: Path }
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- a directory with contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10
|
|
||||||
├── bin
|
|
||||||
│ └── hello
|
|
||||||
└── share
|
|
||||||
├── info
|
|
||||||
│ └── hello.info
|
|
||||||
└── man
|
|
||||||
└── man1
|
|
||||||
└── hello.1.gz
|
|
||||||
|
|
||||||
- a directory with relative symlink and other contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-go-1.16.9
|
|
||||||
├── bin -> share/go/bin
|
|
||||||
├── nix-support/
|
|
||||||
└── share/
|
|
||||||
|
|
||||||
- a directory with absolute symlink
|
|
||||||
|
|
||||||
/nix/store/d3k...-nodejs
|
|
||||||
└── nix_node -> /nix/store/f20...-nodejs-10.24.
|
|
||||||
|
|
||||||
A bare file or symlink can be a root file system object.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10.tar.gz
|
|
||||||
|
|
||||||
/nix/store/4j5...-pkg-config-wrapper-0.29.2-doc -> /nix/store/i99...-pkg-config-0.29.2-doc
|
|
||||||
|
|
||||||
Symlinks pointing outside of their own root or to a store object without a matching reference are allowed, but might not function as intended.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- an arbitrarily symlinked file may change or not exist at all
|
|
||||||
|
|
||||||
/nix/store/<hash>-foo
|
|
||||||
└── foo -> /home/foo
|
|
||||||
|
|
||||||
- if a symlink to a store path was not automatically created by Nix, it may be invalid or get invalidated when the store object is deleted
|
|
||||||
|
|
||||||
/nix/store/<hash>-bar
|
|
||||||
└── bar -> /nix/store/abc...-foo
|
|
||||||
|
|
||||||
Nix file system objects do not support [hard links][hardlink]:
|
|
||||||
each file system object which is not the root has exactly one parent and one name.
|
|
||||||
However, as store objects are immutable, an underlying file system can use hard links for optimization.
|
|
||||||
|
|
||||||
[symlink]: https://en.m.wikipedia.org/wiki/Symbolic_link
|
|
||||||
[hardlink]: https://en.m.wikipedia.org/wiki/Hard_link
|
|
|
@ -1,105 +0,0 @@
|
||||||
# Store Path
|
|
||||||
|
|
||||||
Nix implements [references](store.md#reference) to [store objects](store.md#store-object) as *store paths*.
|
|
||||||
|
|
||||||
Store paths are pairs of
|
|
||||||
|
|
||||||
- a 20-byte [digest](#digest) for identification
|
|
||||||
- a symbolic name for people to read.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
- digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
|
|
||||||
- name: `firefox-33.1`
|
|
||||||
|
|
||||||
It is rendered to a file system path as the concatenation of
|
|
||||||
|
|
||||||
- [store directory](#store-directory)
|
|
||||||
- path-separator (`/`)
|
|
||||||
- [digest](#digest) rendered in a custom variant of [base-32](https://en.m.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
|
||||||
- hyphen (`-`)
|
|
||||||
- name
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
/nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
|
|
||||||
|--------| |------------------------------| |----------|
|
|
||||||
store directory digest name
|
|
||||||
|
|
||||||
## Store Directory
|
|
||||||
|
|
||||||
Every [store](./store.md) has a store directory.
|
|
||||||
|
|
||||||
If the store has a [file system representation](./store.md#files-and-processes), this directory contains the store’s [file system objects](#file-system-object), which can be addressed by [store paths](#store-path).
|
|
||||||
|
|
||||||
This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> The store directory defaults to `/nix/store`, but is in principle arbitrary.
|
|
||||||
|
|
||||||
It is important which store a given store object belongs to:
|
|
||||||
Files in the store object can contain store paths, and processes may read these paths.
|
|
||||||
Nix can only guarantee [referential integrity](store/closure.md) if store paths do not cross store boundaries.
|
|
||||||
|
|
||||||
Therefore one can only copy store objects to a different store if
|
|
||||||
|
|
||||||
- the source and target stores' directories match
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
- the store object in question has no references, that is, contains no store paths.
|
|
||||||
|
|
||||||
One cannot copy a store object to a store with a different store directory.
|
|
||||||
Instead, it has to be rebuilt, together with all its dependencies.
|
|
||||||
It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
|
|
||||||
|
|
||||||
# Digest
|
|
||||||
|
|
||||||
In a [store path](#store-path), the [digest][digest] is the output of a [cryptographic hash function][hash] of either all *inputs* involved in building the referenced store object or its actual *contents*.
|
|
||||||
|
|
||||||
Store objects are therefore said to be either [input-addressed](#input-addressing) or [content-addressed](#content-addressing).
|
|
||||||
|
|
||||||
> **Historical Note**
|
|
||||||
> The 20 byte restriction is because originally digests were [SHA-1][sha-1] hashes.
|
|
||||||
> Nix now uses [SHA-256][sha-256], and longer hashes are still reduced to 20 bytes for compatibility.
|
|
||||||
|
|
||||||
[digest]: https://en.m.wiktionary.org/wiki/digest#Noun
|
|
||||||
[hash]: https://en.m.wikipedia.org/wiki/Cryptographic_hash_function
|
|
||||||
[sha-1]: https://en.m.wikipedia.org/wiki/SHA-1
|
|
||||||
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
|
|
||||||
|
|
||||||
### Reference scanning
|
|
||||||
|
|
||||||
When a new store object is built, Nix scans its file contents for store paths to construct its set of references.
|
|
||||||
|
|
||||||
The special format of a store path's [digest](#digest) allows reliably detecting it among arbitrary data.
|
|
||||||
Nix uses the [closure](store.md#closure) of build inputs to derive the list of allowed store paths, to avoid false positives.
|
|
||||||
|
|
||||||
This way, scanning files captures run time dependencies without the user having to declare them explicitly.
|
|
||||||
Doing it at build time and persisting references in the store object avoids repeating this time-consuming operation.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> In practice, it is sometimes still necessary for users to declare certain dependencies explicitly, if they are to be preserved in the build result's closure.
|
|
||||||
This depends on the specifics of the software to build and run.
|
|
||||||
>
|
|
||||||
> For example, Java programs are compressed after compilation, which obfuscates any store paths they may refer to and prevents Nix from automatically detecting them.
|
|
||||||
|
|
||||||
## Input Addressing
|
|
||||||
|
|
||||||
Input addressing means that the digest derives from how the store object was produced, namely its build inputs and build plan.
|
|
||||||
|
|
||||||
To compute the hash of a store object one needs a deterministic serialisation, i.e., a binary string representation which only changes if the store object changes.
|
|
||||||
|
|
||||||
Nix has a custom serialisation format called Nix Archive (NAR)
|
|
||||||
|
|
||||||
Store object references of this sort can *not* be validated from the content of the store object.
|
|
||||||
Rather, a cryptographic signature has to be used to indicate that someone is vouching for the store object really being produced from a build plan with that digest.
|
|
||||||
|
|
||||||
## Content Addressing
|
|
||||||
|
|
||||||
Content addressing means that the digest derives from the store object's contents, namely its file system objects and references.
|
|
||||||
If one knows content addressing was used, one can recalculate the reference and thus verify the store object.
|
|
||||||
|
|
||||||
Content addressing is currently only used for the special cases of source files and "fixed-output derivations", where the contents of a store object are known in advance.
|
|
||||||
Content addressing of build results is still an [experimental feature subject to some restrictions](https://github.com/tweag/rfcs/blob/cas-rfc/rfcs/0062-content-addressed-paths.md).
|
|
||||||
|
|
|
@ -1,151 +0,0 @@
|
||||||
# Store
|
|
||||||
|
|
||||||
A Nix store is a collection of *store objects* with references between them.
|
|
||||||
It supports operations to manipulate that collection.
|
|
||||||
|
|
||||||
The following concept map is a graphical outline of this chapter.
|
|
||||||
Arrows indicate suggested reading order.
|
|
||||||
|
|
||||||
```
|
|
||||||
,--------------[ store ]----------------,
|
|
||||||
| | |
|
|
||||||
v v v
|
|
||||||
[ store object ] [ closure ]--, [ operations ]
|
|
||||||
| | | | | |
|
|
||||||
v | | v v |
|
|
||||||
[ files and processes ] | | [ garbage collection ] |
|
|
||||||
/ \ | | |
|
|
||||||
v v | v v
|
|
||||||
[ file system object ] [ store path ] | [ derivation ]--->[ building ]
|
|
||||||
| ^ | | |
|
|
||||||
v | v v |
|
|
||||||
[ digest ]----' [ reference scanning ]<------------'
|
|
||||||
/ \
|
|
||||||
v v
|
|
||||||
[ input addressing ] [ content addressing ]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Store Object
|
|
||||||
|
|
||||||
A store object can hold
|
|
||||||
|
|
||||||
- arbitrary *data*
|
|
||||||
- *references* to other store objects.
|
|
||||||
|
|
||||||
Store objects can be build inputs, build results, or build tasks.
|
|
||||||
|
|
||||||
Store objects are [immutable][immutable-object]: once created, they do not change until they are deleted.
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
A store object reference is an [opaque][opaque-data-type], [unique identifier][unique-identifier]:
|
|
||||||
The only way to obtain references is by adding or building store objects.
|
|
||||||
A reference will always point to exactly one store object.
|
|
||||||
|
|
||||||
## Operations
|
|
||||||
|
|
||||||
A Nix store can *add*, *retrieve*, and *delete* store objects.
|
|
||||||
|
|
||||||
[ data ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> add ----> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> get
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store object ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> delete --> [ store' ]
|
|
||||||
|
|
||||||
|
|
||||||
It can *perform builds*, that is, create new store objects by transforming build inputs into build outputs, using instructions from the build tasks.
|
|
||||||
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> build --(maybe)--> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
|
|
||||||
As it keeps track of references, it can [garbage-collect][garbage-collection] unused store objects.
|
|
||||||
|
|
||||||
|
|
||||||
[ store ] --> collect garbage --> [ store' ]
|
|
||||||
|
|
||||||
## Files and Processes
|
|
||||||
|
|
||||||
Nix maps between its store model and the [Unix paradigm][unix-paradigm] of [files and processes][file-descriptor], by encoding immutable store objects and opaque identifiers as file system primitives: files and directories, and paths.
|
|
||||||
That allows processes to resolve references contained in files and thus access the contents of store objects.
|
|
||||||
|
|
||||||
Store objects are therefore implemented as the pair of
|
|
||||||
|
|
||||||
- a [file system object](fso.md) for data
|
|
||||||
- a set of [store paths](path.md) for references.
|
|
||||||
|
|
||||||
[unix-paradigm]: https://en.m.wikipedia.org/wiki/Everything_is_a_file
|
|
||||||
[file-descriptor]: https://en.m.wikipedia.org/wiki/File_descriptor
|
|
||||||
|
|
||||||
The following diagram shows a radical simplification of how Nix interacts with the operating system:
|
|
||||||
It uses files as build inputs, and build outputs are files again.
|
|
||||||
On the operating system, files can be run as processes, which in turn operate on files.
|
|
||||||
A build function also amounts to an operating system process (not depicted).
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
|
||||||
| [ commmand line interface ]------, |
|
|
||||||
| | | |
|
|
||||||
| evaluates | |
|
|
||||||
| | manages |
|
|
||||||
| V | |
|
|
||||||
| [ configuration language ] | |
|
|
||||||
| | | |
|
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
|
||||||
| | store evaluates to | |
|
|
||||||
| | | | |
|
|
||||||
| | referenced by V builds | |
|
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
|
||||||
| | ^ | | |
|
|
||||||
| +---------|----------------------------------------|----------+ |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| |
|
|
||||||
file system object store path
|
|
||||||
| |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| operating system +------------+ | |
|
|
||||||
| '------------ | | <-----------' |
|
|
||||||
| | file | |
|
|
||||||
| ,-- | | <-, |
|
|
||||||
| | +------------+ | |
|
|
||||||
| execute as | | read, write, execute |
|
|
||||||
| | +------------+ | |
|
|
||||||
| '-> | process | --' |
|
|
||||||
| +------------+ |
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
There exist different types of stores, which all follow this model.
|
|
||||||
Examples:
|
|
||||||
- store on the local file system
|
|
||||||
- remote store accessible via SSH
|
|
||||||
- binary cache store accessible via HTTP
|
|
||||||
|
|
||||||
To make store objects accessible to processes, stores ultimately have to expose store objects through the file system.
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
# A [Rosetta stone][rosetta-stone] for build system terminology
|
|
||||||
|
|
||||||
The Nix store's design is comparable to other build systems.
|
|
||||||
Usage of terms is, for historic reasons, not entirely consistent within the Nix ecosystem, and still subject to slow change.
|
|
||||||
|
|
||||||
The following translation table points out similarities and equivalent terms, to help clarify their meaning and inform consistent use in the future.
|
|
||||||
|
|
||||||
| generic build system | Nix | [Bazel][bazel] | [Build Systems à la Carte][bsalc] | programming language |
|
|
||||||
| -------------------------------- | ---------------- | -------------------------------------------------------------------- | --------------------------------- | ------------------------ |
|
|
||||||
| data (build input, build result) | store object | [artifact][bazel-artifact] | value | value |
|
|
||||||
| build instructions | builder | ([depends on action type][bazel-actions]) | function | function |
|
|
||||||
| build task | derivation | [action][bazel-action] | `Task` | [thunk][thunk] |
|
|
||||||
| build plan | derivation graph | [action graph][bazel-action-graph], [build graph][bazel-build-graph] | `Tasks` | [call graph][call-graph] |
|
|
||||||
| build | build | build | application of `Build` | evaluation |
|
|
||||||
| persistence layer | store | [action cache][bazel-action-cache] | `Store` | heap |
|
|
||||||
|
|
||||||
All of these systems share features of [declarative programming][declarative-programming] languages, a key insight first put forward by Eelco Dolstra et al. in [Imposing a Memory Management Discipline on Software Deployment][immdsd] (2004), elaborated in his PhD thesis [The Purely Functional Software Deployment Model][phd-thesis] (2006), and further refined by Andrey Mokhov et al. in [Build Systems à la Carte][bsalc] (2018).
|
|
||||||
|
|
||||||
[rosetta-stone]: https://en.m.wikipedia.org/wiki/Rosetta_Stone
|
|
||||||
[bazel]: https://bazel.build/start/bazel-intro
|
|
||||||
[bazel-artifact]: https://bazel.build/reference/glossary#artifact
|
|
||||||
[bazel-actions]: https://docs.bazel.build/versions/main/skylark/lib/actions.html
|
|
||||||
[bazel-action]: https://bazel.build/reference/glossary#action
|
|
||||||
[bazel-action-graph]: https://bazel.build/reference/glossary#action-graph
|
|
||||||
[bazel-build-graph]: https://bazel.build/reference/glossary#build-graph
|
|
||||||
[bazel-action-cache]: https://bazel.build/reference/glossary#action-cache
|
|
||||||
[thunk]: https://en.m.wikipedia.org/wiki/Thunk
|
|
||||||
[call-graph]: https://en.m.wikipedia.org/wiki/Call_graph
|
|
||||||
[declarative-programming]: https://en.m.wikipedia.org/wiki/Declarative_programming
|
|
||||||
[immdsd]: https://edolstra.github.io/pubs/immdsd-icse2004-final.pdf
|
|
||||||
[phd-thesis]: https://edolstra.github.io/pubs/phd-thesis.pdf
|
|
||||||
[bsalc]: https://www.microsoft.com/en-us/research/uploads/prod/2018/03/build-systems.pdf
|
|
|
@ -1,29 +0,0 @@
|
||||||
# Closure
|
|
||||||
|
|
||||||
Nix stores ensure [referential integrity][referential-integrity]: for each store object in the store, all the store objects it references must also be in the store.
|
|
||||||
|
|
||||||
The set of all store objects reachable by following references from a given initial set of store objects is called a *closure*.
|
|
||||||
|
|
||||||
Adding, building, copying and deleting store objects must be done in a way that preserves referential integrity:
|
|
||||||
|
|
||||||
- A newly added store object cannot have references, unless it is a build task.
|
|
||||||
|
|
||||||
- Build results must only refer to store objects in the closure of the build inputs.
|
|
||||||
|
|
||||||
Building a store object will add appropriate references, according to the build task.
|
|
||||||
|
|
||||||
- Store objects being copied must refer to objects already in the destination store.
|
|
||||||
|
|
||||||
Recursive copying must either proceed in dependency order or be atomic.
|
|
||||||
|
|
||||||
- We can only safely delete store objects which are not reachable from any reference still in use.
|
|
||||||
|
|
||||||
<!-- more details in section on garbage collection, link to it once it exists -->
|
|
||||||
|
|
||||||
[referential-integrity]: https://en.m.wikipedia.org/wiki/Referential_integrity
|
|
||||||
[garbage-collection]: https://en.m.wikipedia.org/wiki/Garbage_collection_(computer_science)
|
|
||||||
[immutable-object]: https://en.m.wikipedia.org/wiki/Immutable_object
|
|
||||||
[opaque-data-type]: https://en.m.wikipedia.org/wiki/Opaque_data_type
|
|
||||||
[unique-identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier
|
|
||||||
|
|
||||||
|
|
|
@ -30,8 +30,8 @@ Since `nix-copy-closure` calls `ssh`, you may be asked to type in the
|
||||||
appropriate password or passphrase. In fact, you may be asked _twice_
|
appropriate password or passphrase. In fact, you may be asked _twice_
|
||||||
because `nix-copy-closure` currently connects twice to the remote
|
because `nix-copy-closure` currently connects twice to the remote
|
||||||
machine, first to get the set of paths missing on the target machine,
|
machine, first to get the set of paths missing on the target machine,
|
||||||
and second to send the dump of those paths. If this bothers you, use
|
and second to send the dump of those paths. When using public key
|
||||||
`ssh-agent`.
|
authentication, you can avoid typing the passphrase with `ssh-agent`.
|
||||||
|
|
||||||
# Options
|
# Options
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ $ nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
To get a shell with a different compilation environment (e.g. stdenv,
|
To get a shell with a different compilation environment (e.g. stdenv,
|
||||||
gccStdenv, clangStdenv, clang11Stdenv):
|
gccStdenv, clangStdenv, clang11Stdenv, ccacheStdenv):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
|
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
|
||||||
|
@ -54,6 +54,9 @@ or if you have a flake-enabled nix:
|
||||||
$ nix develop .#clang11StdenvPackages
|
$ nix develop .#clang11StdenvPackages
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note: you can use `ccacheStdenv` to drastically improve rebuild
|
||||||
|
time. By default, ccache keeps artifacts in `~/.cache/ccache/`.
|
||||||
|
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -83,9 +86,7 @@ by:
|
||||||
$ nix develop
|
$ nix develop
|
||||||
```
|
```
|
||||||
|
|
||||||
## Testing
|
## Running tests
|
||||||
|
|
||||||
Nix comes with three different flavors of tests: unit, functional and integration.
|
|
||||||
|
|
||||||
### Unit-tests
|
### Unit-tests
|
||||||
|
|
||||||
|
@ -108,3 +109,72 @@ These tests include everything that needs to interact with external services or
|
||||||
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
Because these tests are expensive and require more than what the standard github-actions setup provides, they only run on the master branch (on <https://hydra.nixos.org/jobset/nix/master>).
|
||||||
|
|
||||||
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
You can run them manually with `nix build .#hydraJobs.tests.{testName}` or `nix-build -A hydraJobs.tests.{testName}`
|
||||||
|
|
||||||
|
### Installer tests
|
||||||
|
|
||||||
|
After a one-time setup, the Nix repository's GitHub Actions continuous integration (CI) workflow can test the installer each time you push to a branch.
|
||||||
|
|
||||||
|
Creating a Cachix cache for your installer tests and adding its authorization token to GitHub enables [two installer-specific jobs in the CI workflow](https://github.com/NixOS/nix/blob/88a45d6149c0e304f6eb2efcc2d7a4d0d569f8af/.github/workflows/ci.yml#L50-L91):
|
||||||
|
|
||||||
|
- The `installer` job generates installers for the platforms below and uploads them to your Cachix cache:
|
||||||
|
- `x86_64-linux`
|
||||||
|
- `armv6l-linux`
|
||||||
|
- `armv7l-linux`
|
||||||
|
- `x86_64-darwin`
|
||||||
|
|
||||||
|
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command.
|
||||||
|
|
||||||
|
#### One-time setup
|
||||||
|
|
||||||
|
1. Have a GitHub account with a fork of the [Nix repository](https://github.com/NixOS/nix).
|
||||||
|
2. At cachix.org:
|
||||||
|
- Create or log in to an account.
|
||||||
|
- Create a Cachix cache using the format `<github-username>-nix-install-tests`.
|
||||||
|
- Navigate to the new cache > Settings > Auth Tokens.
|
||||||
|
- Generate a new Cachix auth token and copy the generated value.
|
||||||
|
3. At github.com:
|
||||||
|
- Navigate to your Nix fork > Settings > Secrets > Actions > New repository secret.
|
||||||
|
- Name the secret `CACHIX_AUTH_TOKEN`.
|
||||||
|
- Paste the copied value of the Cachix cache auth token.
|
||||||
|
|
||||||
|
#### Using the CI-generated installer for manual testing
|
||||||
|
|
||||||
|
After the CI run completes, you can check the output to extract the installer URL:
|
||||||
|
1. Click into the detailed view of the CI run.
|
||||||
|
2. Click into any `installer_test` run (the URL you're here to extract will be the same in all of them).
|
||||||
|
3. Click into the `Run cachix/install-nix-action@v...` step and click the detail triangle next to the first log line (it will also be `Run cachix/install-nix-action@v...`)
|
||||||
|
4. Copy the value of `install_url`
|
||||||
|
5. To generate an install command, plug this `install_url` and your GitHub username into this template:
|
||||||
|
|
||||||
|
```console
|
||||||
|
sh <(curl -L <install_url>) --tarball-url-prefix https://<github-username>-nix-install-tests.cachix.org/serve
|
||||||
|
```
|
||||||
|
|
||||||
|
<!-- #### Manually generating test installers
|
||||||
|
|
||||||
|
There's obviously a manual way to do this, and it's still the only way for
|
||||||
|
platforms that lack GA runners.
|
||||||
|
|
||||||
|
I did do this back in Fall 2020 (before the GA approach encouraged here). I'll
|
||||||
|
sketch what I recall in case it encourages someone to fill in detail, but: I
|
||||||
|
didn't know what I was doing at the time and had to fumble/ask around a lot--
|
||||||
|
so I don't want to uphold any of it as "right". It may have been dumb or
|
||||||
|
the _hard_ way from the getgo. Fundamentals may have changed since.
|
||||||
|
|
||||||
|
Here's the build command I used to do this on and for x86_64-darwin:
|
||||||
|
nix build --out-link /tmp/foo ".#checks.x86_64-darwin.binaryTarball"
|
||||||
|
|
||||||
|
I used the stable out-link to make it easier to script the next steps:
|
||||||
|
link=$(readlink /tmp/foo)
|
||||||
|
cp $link/*-darwin.tar.xz ~/somewheres
|
||||||
|
|
||||||
|
I've lost the last steps and am just going from memory:
|
||||||
|
|
||||||
|
From here, I think I had to extract and modify the `install` script to point
|
||||||
|
it at this tarball (which I scped to my own site, but it might make more sense
|
||||||
|
to just share them locally). I extracted this script once and then just
|
||||||
|
search/replaced in it for each new build.
|
||||||
|
|
||||||
|
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
||||||
|
solved this need?
|
||||||
|
-->
|
||||||
|
|
|
@ -7,10 +7,44 @@
|
||||||
translated into low-level *store derivations* (implicitly by
|
translated into low-level *store derivations* (implicitly by
|
||||||
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
||||||
|
|
||||||
|
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
||||||
|
A derivation which has the
|
||||||
|
[`__contentAddressed`](language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||||
|
attribute set to `true`.
|
||||||
|
|
||||||
|
- [fixed-output derivation]{#gloss-fixed-output-derivation}\
|
||||||
|
A derivation which includes the
|
||||||
|
[`outputHash`](language/advanced-attributes.md#adv-attr-outputHash) attribute.
|
||||||
|
|
||||||
- [store]{#gloss-store}\
|
- [store]{#gloss-store}\
|
||||||
The location in the file system where store objects live. Typically
|
The location in the file system where store objects live. Typically
|
||||||
`/nix/store`.
|
`/nix/store`.
|
||||||
|
|
||||||
|
From the perspective of the location where Nix is
|
||||||
|
invoked, the Nix store can be referred to
|
||||||
|
as a "_local_" or a "_remote_" one:
|
||||||
|
|
||||||
|
+ A *local store* exists on the filesystem of
|
||||||
|
the machine where Nix is invoked. You can use other
|
||||||
|
local stores by passing the `--store` flag to the
|
||||||
|
`nix` command. Local stores can be used for building derivations.
|
||||||
|
|
||||||
|
+ A *remote store* exists anywhere other than the
|
||||||
|
local filesystem. One example is the `/nix/store`
|
||||||
|
directory on another machine, accessed via `ssh` or
|
||||||
|
served by the `nix-serve` Perl script.
|
||||||
|
|
||||||
|
- [chroot store]{#gloss-chroot-store}\
|
||||||
|
A local store whose canonical path is anything other than `/nix/store`.
|
||||||
|
|
||||||
|
- [binary cache]{#gloss-binary-cache}\
|
||||||
|
A *binary cache* is a Nix store which uses a different format: its
|
||||||
|
metadata and signatures are kept in `.narinfo` files rather than in a
|
||||||
|
Nix database. This different format simplifies serving store objects
|
||||||
|
over the network, but cannot host builds. Examples of binary caches
|
||||||
|
include S3 buckets and the [NixOS binary
|
||||||
|
cache](https://cache.nixos.org).
|
||||||
|
|
||||||
- [store path]{#gloss-store-path}\
|
- [store path]{#gloss-store-path}\
|
||||||
The location in the file system of a store object, i.e., an
|
The location in the file system of a store object, i.e., an
|
||||||
immediate child of the Nix store directory.
|
immediate child of the Nix store directory.
|
||||||
|
@ -22,6 +56,19 @@
|
||||||
derivation outputs (objects produced by running a build action), or
|
derivation outputs (objects produced by running a build action), or
|
||||||
derivations (files describing a build action).
|
derivations (files describing a build action).
|
||||||
|
|
||||||
|
- [input-addressed store object]{#gloss-input-addressed-store-object}\
|
||||||
|
A store object produced by building a
|
||||||
|
non-[content-addressed](#gloss-content-addressed-derivation),
|
||||||
|
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||||
|
derivation.
|
||||||
|
|
||||||
|
- [output-addressed store object]{#gloss-output-addressed-store-object}\
|
||||||
|
A store object whose store path hashes its content. This
|
||||||
|
includes derivations, the outputs of
|
||||||
|
[content-addressed derivations](#gloss-content-addressed-derivation),
|
||||||
|
and the outputs of
|
||||||
|
[fixed-output derivations](#gloss-fixed-output-derivation).
|
||||||
|
|
||||||
- [substitute]{#gloss-substitute}\
|
- [substitute]{#gloss-substitute}\
|
||||||
A substitute is a command invocation stored in the Nix database that
|
A substitute is a command invocation stored in the Nix database that
|
||||||
describes how to build a store object, bypassing the normal build
|
describes how to build a store object, bypassing the normal build
|
||||||
|
@ -29,6 +76,11 @@
|
||||||
store object by downloading a pre-built version of the store object
|
store object by downloading a pre-built version of the store object
|
||||||
from some server.
|
from some server.
|
||||||
|
|
||||||
|
- [substituter]{#gloss-substituter}\
|
||||||
|
A *substituter* is an additional store from which Nix will
|
||||||
|
copy store objects it doesn't have. For details, see the
|
||||||
|
[`substituters` option](command-ref/conf-file.html#conf-substituters).
|
||||||
|
|
||||||
- [purity]{#gloss-purity}\
|
- [purity]{#gloss-purity}\
|
||||||
The assumption that equal Nix derivations when run always produce
|
The assumption that equal Nix derivations when run always produce
|
||||||
the same output. This cannot be guaranteed in general (e.g., a
|
the same output. This cannot be guaranteed in general (e.g., a
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
|
||||||
|
defaults to `false`. If it is set to `true`, the `hash` and `sha256`
|
||||||
|
arguments will be ignored and the resulting derivation will have
|
||||||
|
`__impure` set to `true`, making it an impure derivation.
|
||||||
* Error traces have been reworked to provide detailed explanations and more
|
* Error traces have been reworked to provide detailed explanations and more
|
||||||
accurate error locations. A short excerpt of the trace is now shown by
|
accurate error locations. A short excerpt of the trace is now shown by
|
||||||
default when an error occurs.
|
default when an error occurs.
|
||||||
|
|
||||||
|
>>>>>>> origin/master
|
||||||
|
|
|
@ -5,6 +5,32 @@ rec {
|
||||||
|
|
||||||
concatStrings = concatStringsSep "";
|
concatStrings = concatStringsSep "";
|
||||||
|
|
||||||
|
replaceStringsRec = from: to: string:
|
||||||
|
# recursively replace occurrences of `from` with `to` within `string`
|
||||||
|
# example:
|
||||||
|
# replaceStringRec "--" "-" "hello-----world"
|
||||||
|
# => "hello-world"
|
||||||
|
let
|
||||||
|
replaced = replaceStrings [ from ] [ to ] string;
|
||||||
|
in
|
||||||
|
if replaced == string then string else replaceStringsRec from to replaced;
|
||||||
|
|
||||||
|
squash = replaceStringsRec "\n\n\n" "\n\n";
|
||||||
|
|
||||||
|
trim = string:
|
||||||
|
# trim trailing spaces and squash non-leading spaces
|
||||||
|
let
|
||||||
|
trimLine = line:
|
||||||
|
let
|
||||||
|
# separate leading spaces from the rest
|
||||||
|
parts = split "(^ *)" line;
|
||||||
|
spaces = head (elemAt parts 1);
|
||||||
|
rest = elemAt parts 2;
|
||||||
|
# drop trailing spaces
|
||||||
|
body = head (split " *$" rest);
|
||||||
|
in spaces + replaceStringsRec " " " " body;
|
||||||
|
in concatStringsSep "\n" (map trimLine (splitLines string));
|
||||||
|
|
||||||
# FIXME: O(n^2)
|
# FIXME: O(n^2)
|
||||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ let
|
||||||
|
|
||||||
root = {
|
root = {
|
||||||
uid = 0;
|
uid = 0;
|
||||||
shell = "/bin/bash";
|
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||||
home = "/root";
|
home = "/root";
|
||||||
gid = 0;
|
gid = 0;
|
||||||
};
|
};
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
|
|
||||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" ];
|
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
||||||
|
|
||||||
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
||||||
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
||||||
|
@ -546,6 +546,11 @@
|
||||||
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
||||||
} "touch $out");
|
} "touch $out");
|
||||||
|
|
||||||
|
installerTests = import ./tests/installer {
|
||||||
|
binaryTarballs = self.hydraJobs.binaryTarball;
|
||||||
|
inherit nixpkgsFor;
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
checks = forAllSystems (system: {
|
checks = forAllSystems (system: {
|
||||||
|
|
|
@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e
|
||||||
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
||||||
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
||||||
|
|
||||||
|
# Fish has different syntax than zsh/bash, treat it separate
|
||||||
|
readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish"
|
||||||
|
readonly PROFILE_FISH_PREFIXES=(
|
||||||
|
# each of these are common values of $__fish_sysconf_dir,
|
||||||
|
# under which Fish will look for a file named
|
||||||
|
# $PROFILE_FISH_SUFFIX.
|
||||||
|
"/etc/fish" # standard
|
||||||
|
"/usr/local/etc/fish" # their installer .pkg for macOS
|
||||||
|
"/opt/homebrew/etc/fish" # homebrew
|
||||||
|
"/opt/local/etc/fish" # macports
|
||||||
|
)
|
||||||
|
readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish"
|
||||||
|
|
||||||
readonly NIX_INSTALLED_NIX="@nix@"
|
readonly NIX_INSTALLED_NIX="@nix@"
|
||||||
readonly NIX_INSTALLED_CACERT="@cacert@"
|
readonly NIX_INSTALLED_CACERT="@cacert@"
|
||||||
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
||||||
|
@ -362,7 +375,7 @@ finish_fail() {
|
||||||
finish_cleanup
|
finish_cleanup
|
||||||
|
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
Jeeze, something went wrong. If you can take all the output and open
|
Oh no, something went wrong. If you can take all the output and open
|
||||||
an issue, we'd love to fix the problem so nobody else has this issue.
|
an issue, we'd love to fix the problem so nobody else has this issue.
|
||||||
|
|
||||||
:(
|
:(
|
||||||
|
@ -828,6 +841,19 @@ fi
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Fish has differing syntax
|
||||||
|
fish_source_lines() {
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
# Nix
|
||||||
|
if test -e '$PROFILE_NIX_FILE_FISH'
|
||||||
|
. '$PROFILE_NIX_FILE_FISH'
|
||||||
|
end
|
||||||
|
# End Nix
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
configure_shell_profile() {
|
configure_shell_profile() {
|
||||||
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
||||||
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
||||||
|
@ -849,6 +875,27 @@ configure_shell_profile() {
|
||||||
tee -a "$profile_target"
|
tee -a "$profile_target"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}"
|
||||||
|
for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do
|
||||||
|
if [ ! -d "$fish_prefix" ]; then
|
||||||
|
# this specific prefix (ie: /etc/fish) is very likely to exist
|
||||||
|
# if Fish is installed with this sysconfdir.
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}"
|
||||||
|
conf_dir=$(dirname "$profile_target")
|
||||||
|
if [ ! -d "$conf_dir" ]; then
|
||||||
|
_sudo "create $conf_dir for our Fish hook" \
|
||||||
|
mkdir "$conf_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fish_source_lines \
|
||||||
|
| _sudo "write nix-daemon settings to $profile_target" \
|
||||||
|
tee "$profile_target"
|
||||||
|
done
|
||||||
|
|
||||||
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
||||||
# their way less disruptively, but a counter-argument is that they won't
|
# their way less disruptively, but a counter-argument is that they won't
|
||||||
# immediately notice if something didn't get set up right?
|
# immediately notice if something didn't get set up right?
|
||||||
|
|
|
@ -209,31 +209,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
added=
|
added=
|
||||||
p=$HOME/.nix-profile/etc/profile.d/nix.sh
|
p=
|
||||||
|
p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh
|
||||||
|
p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish
|
||||||
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
# Make the shell source nix.sh during login.
|
# Make the shell source nix.sh during login.
|
||||||
for i in .bash_profile .bash_login .profile; do
|
for i in .bash_profile .bash_login .profile; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
for i in .zshenv .zshrc; do
|
for i in .zshenv .zshrc; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ -d "$HOME/.config/fish" ]; then
|
||||||
|
fishdir=$HOME/.config/fish/conf.d
|
||||||
|
if [ ! -d "$fishdir" ]; then
|
||||||
|
mkdir -p "$fishdir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fn="$fishdir/nix.fish"
|
||||||
|
echo "placing $fn..." >&2
|
||||||
|
printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn"
|
||||||
|
added=1
|
||||||
|
p=${p_fish}
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
p=${p_sh}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$added" ]; then
|
if [ -z "$added" ]; then
|
||||||
|
|
|
@ -40,12 +40,12 @@ case "$(uname -s).$(uname -m)" in
|
||||||
path=@tarballPath_aarch64-linux@
|
path=@tarballPath_aarch64-linux@
|
||||||
system=aarch64-linux
|
system=aarch64-linux
|
||||||
;;
|
;;
|
||||||
Linux.armv6l_linux)
|
Linux.armv6l)
|
||||||
hash=@tarballHash_armv6l-linux@
|
hash=@tarballHash_armv6l-linux@
|
||||||
path=@tarballPath_armv6l-linux@
|
path=@tarballPath_armv6l-linux@
|
||||||
system=armv6l-linux
|
system=armv6l-linux
|
||||||
;;
|
;;
|
||||||
Linux.armv7l_linux)
|
Linux.armv7l)
|
||||||
hash=@tarballHash_armv7l-linux@
|
hash=@tarballHash_armv7l-linux@
|
||||||
path=@tarballPath_armv7l-linux@
|
path=@tarballPath_armv7l-linux@
|
||||||
system=armv7l-linux
|
system=armv7l-linux
|
||||||
|
|
|
@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts)
|
||||||
profiledir = $(sysconfdir)/profile.d
|
profiledir = $(sysconfdir)/profile.d
|
||||||
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
|
||||||
|
|
||||||
clean-files += $(nix_noinst_scripts)
|
clean-files += $(nix_noinst_scripts)
|
||||||
|
|
35
scripts/nix-profile-daemon.fish.in
Normal file
35
scripts/nix-profile-daemon.fish.in
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Only execute this file once per shell.
|
||||||
|
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
||||||
|
exit
|
||||||
|
end
|
||||||
|
|
||||||
|
set __ETC_PROFILE_NIX_SOURCED 1
|
||||||
|
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
else
|
||||||
|
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
||||||
|
for i in $NIX_PROFILES
|
||||||
|
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin"
|
||||||
|
fish_add_path --prepend --global "$HOME/.nix-profile/bin"
|
35
scripts/nix-profile.fish.in
Normal file
35
scripts/nix-profile.fish.in
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
if test -n "$HOME" && test -n "$USER"
|
||||||
|
|
||||||
|
# Set up the per-user profile.
|
||||||
|
|
||||||
|
set NIX_LINK $HOME/.nix-profile
|
||||||
|
|
||||||
|
# Set up environment.
|
||||||
|
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Only use MANPATH if it is already set. In general `man` will just simply
|
||||||
|
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
|
||||||
|
# which is in the $PATH. For more info, run `manpath -d`.
|
||||||
|
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
||||||
|
|
||||||
|
fish_add_path --prepend --global "$NIX_LINK/bin"
|
||||||
|
set --erase NIX_LINK
|
||||||
|
end
|
|
@ -1,7 +1,6 @@
|
||||||
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
||||||
|
|
||||||
# Set up the per-user profile.
|
# Set up the per-user profile.
|
||||||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
|
|
||||||
|
|
||||||
NIX_LINK=$HOME/.nix-profile
|
NIX_LINK=$HOME/.nix-profile
|
||||||
|
|
||||||
|
|
|
@ -88,7 +88,8 @@ EvalCommand::EvalCommand()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "debugger",
|
.longName = "debugger",
|
||||||
.description = "start an interactive environment if evaluation fails",
|
.description = "Start an interactive environment if evaluation fails.",
|
||||||
|
.category = MixEvalArgs::category,
|
||||||
.handler = {&startReplOnEvalErrors, true},
|
.handler = {&startReplOnEvalErrors, true},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,8 +13,6 @@ namespace nix {
|
||||||
|
|
||||||
MixEvalArgs::MixEvalArgs()
|
MixEvalArgs::MixEvalArgs()
|
||||||
{
|
{
|
||||||
auto category = "Common evaluation options";
|
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "arg",
|
.longName = "arg",
|
||||||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||||
|
|
|
@ -10,6 +10,8 @@ class Bindings;
|
||||||
|
|
||||||
struct MixEvalArgs : virtual Args
|
struct MixEvalArgs : virtual Args
|
||||||
{
|
{
|
||||||
|
static constexpr auto category = "Common evaluation options";
|
||||||
|
|
||||||
MixEvalArgs();
|
MixEvalArgs();
|
||||||
|
|
||||||
Bindings * getAutoArgs(EvalState & state);
|
Bindings * getAutoArgs(EvalState & state);
|
||||||
|
|
|
@ -242,7 +242,11 @@ void NixRepl::mainLoop()
|
||||||
|
|
||||||
// Allow nix-repl specific settings in .inputrc
|
// Allow nix-repl specific settings in .inputrc
|
||||||
rl_readline_name = "nix-repl";
|
rl_readline_name = "nix-repl";
|
||||||
|
try {
|
||||||
createDirs(dirOf(historyFile));
|
createDirs(dirOf(historyFile));
|
||||||
|
} catch (SysError & e) {
|
||||||
|
logWarning(e.info());
|
||||||
|
}
|
||||||
#ifndef READLINE
|
#ifndef READLINE
|
||||||
el_hist_size = 1000;
|
el_hist_size = 1000;
|
||||||
#endif
|
#endif
|
||||||
|
@ -1046,7 +1050,7 @@ struct CmdRepl : InstallablesCommand
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void prepare()
|
void prepare() override
|
||||||
{
|
{
|
||||||
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
|
if (!settings.isExperimentalFeatureEnabled(Xp::ReplFlake) && !(file) && this->_installables.size() >= 1) {
|
||||||
warn("future versions of Nix will require using `--file` to load a file");
|
warn("future versions of Nix will require using `--file` to load a file");
|
||||||
|
|
|
@ -12,13 +12,13 @@
|
||||||
, executable ? false
|
, executable ? false
|
||||||
, unpack ? false
|
, unpack ? false
|
||||||
, name ? baseNameOf (toString url)
|
, name ? baseNameOf (toString url)
|
||||||
|
, impure ? false
|
||||||
}:
|
}:
|
||||||
|
|
||||||
derivation {
|
derivation ({
|
||||||
builder = "builtin:fetchurl";
|
builder = "builtin:fetchurl";
|
||||||
|
|
||||||
# New-style output content requirements.
|
# New-style output content requirements.
|
||||||
inherit outputHashAlgo outputHash;
|
|
||||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||||
|
|
||||||
inherit name url executable unpack;
|
inherit name url executable unpack;
|
||||||
|
@ -38,4 +38,6 @@ derivation {
|
||||||
|
|
||||||
# To make "nix-prefetch-url" work.
|
# To make "nix-prefetch-url" work.
|
||||||
urls = [ url ];
|
urls = [ url ];
|
||||||
}
|
} // (if impure
|
||||||
|
then { __impure = true; }
|
||||||
|
else { inherit outputHashAlgo outputHash; }))
|
||||||
|
|
|
@ -68,7 +68,7 @@ void ConfigFile::apply()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!trusted) {
|
if (!trusted) {
|
||||||
warn("ignoring untrusted flake configuration setting '%s'", name);
|
warn("ignoring untrusted flake configuration setting '%s'.\nPass '%s' to trust it", name, "--accept-flake-config");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -483,12 +483,12 @@ LockedFlake lockFlake(
|
||||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||||
if (! trustLock) {
|
if (! trustLock) {
|
||||||
// It is possible that the flake has changed,
|
// It is possible that the flake has changed,
|
||||||
// so we must confirm all the follows that are in the lockfile are also in the flake.
|
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||||
auto overridePath(inputPath);
|
auto overridePath(inputPath);
|
||||||
overridePath.push_back(i.first);
|
overridePath.push_back(i.first);
|
||||||
auto o = overrides.find(overridePath);
|
auto o = overrides.find(overridePath);
|
||||||
// If the override disappeared, we have to refetch the flake,
|
// If the override disappeared, we have to refetch the flake,
|
||||||
// since some of the inputs may not be present in the lockfile.
|
// since some of the inputs may not be present in the lock file.
|
||||||
if (o == overrides.end()) {
|
if (o == overrides.end()) {
|
||||||
mustRefetch = true;
|
mustRefetch = true;
|
||||||
// There's no point populating the rest of the fake inputs,
|
// There's no point populating the rest of the fake inputs,
|
||||||
|
|
|
@ -36,7 +36,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
|
||||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||||
{
|
{
|
||||||
if (!lockedRef.input.isLocked())
|
if (!lockedRef.input.isLocked())
|
||||||
throw Error("lockfile contains mutable lock '%s'",
|
throw Error("lock file contains mutable lock '%s'",
|
||||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2423,8 +2423,8 @@ static RegisterPrimOp primop_intersectAttrs({
|
||||||
.name = "__intersectAttrs",
|
.name = "__intersectAttrs",
|
||||||
.args = {"e1", "e2"},
|
.args = {"e1", "e2"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Return a set consisting of the attributes in the set *e2* that also
|
Return a set consisting of the attributes in the set *e2* which have the
|
||||||
exist in the set *e1*.
|
same name as some attribute in *e1*.
|
||||||
)",
|
)",
|
||||||
.fun = prim_intersectAttrs,
|
.fun = prim_intersectAttrs,
|
||||||
});
|
});
|
||||||
|
@ -3818,8 +3818,8 @@ static RegisterPrimOp primop_parseDrvName({
|
||||||
.args = {"s"},
|
.args = {"s"},
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Split the string *s* into a package name and version. The package
|
Split the string *s* into a package name and version. The package
|
||||||
name is everything up to but not including the first dash followed
|
name is everything up to but not including the first dash not followed
|
||||||
by a digit, and the version is everything following that dash. The
|
by a letter, and the version is everything following that dash. The
|
||||||
result is returned in a set `{ name, version }`. Thus,
|
result is returned in a set `{ name, version }`. Thus,
|
||||||
`builtins.parseDrvName "nix-0.12pre12876"` returns `{ name =
|
`builtins.parseDrvName "nix-0.12pre12876"` returns `{ name =
|
||||||
"nix"; version = "0.12pre12876"; }`.
|
"nix"; version = "0.12pre12876"; }`.
|
||||||
|
|
|
@ -32,6 +32,7 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "option",
|
.longName = "option",
|
||||||
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
|
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
|
||||||
|
.category = miscCategory,
|
||||||
.labels = {"name", "value"},
|
.labels = {"name", "value"},
|
||||||
.handler = {[](std::string name, std::string value) {
|
.handler = {[](std::string name, std::string value) {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -6,6 +6,7 @@ namespace nix {
|
||||||
|
|
||||||
//static constexpr auto commonArgsCategory = "Miscellaneous common options";
|
//static constexpr auto commonArgsCategory = "Miscellaneous common options";
|
||||||
static constexpr auto loggingCategory = "Logging-related options";
|
static constexpr auto loggingCategory = "Logging-related options";
|
||||||
|
static constexpr auto miscCategory = "Miscellaneous global options";
|
||||||
|
|
||||||
class MixCommonArgs : public virtual Args
|
class MixCommonArgs : public virtual Args
|
||||||
{
|
{
|
||||||
|
|
|
@ -503,7 +503,7 @@ public:
|
||||||
return s[0];
|
return s[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void setPrintBuildLogs(bool printBuildLogs)
|
void setPrintBuildLogs(bool printBuildLogs) override
|
||||||
{
|
{
|
||||||
this->printBuildLogs = printBuildLogs;
|
this->printBuildLogs = printBuildLogs;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "gc-store.hh"
|
#include "gc-store.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
|
#include "progress-bar.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cctype>
|
#include <cctype>
|
||||||
|
@ -422,6 +423,8 @@ RunPager::RunPager()
|
||||||
if (!pager) pager = getenv("PAGER");
|
if (!pager) pager = getenv("PAGER");
|
||||||
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
||||||
|
|
||||||
|
stopProgressBar();
|
||||||
|
|
||||||
Pipe toPager;
|
Pipe toPager;
|
||||||
toPager.create();
|
toPager.create();
|
||||||
|
|
||||||
|
|
|
@ -113,5 +113,25 @@ struct PrintFreed
|
||||||
/* Install a SIGSEGV handler to detect stack overflows. */
|
/* Install a SIGSEGV handler to detect stack overflows. */
|
||||||
void detectStackOverflow();
|
void detectStackOverflow();
|
||||||
|
|
||||||
|
/* Pluggable behavior to run in case of a stack overflow.
|
||||||
|
|
||||||
|
Default value: defaultStackOverflowHandler.
|
||||||
|
|
||||||
|
This is called by the handler installed by detectStackOverflow().
|
||||||
|
|
||||||
|
This gives Nix library consumers a limit opportunity to report the error
|
||||||
|
condition. The handler should exit the process.
|
||||||
|
See defaultStackOverflowHandler() for a reference implementation.
|
||||||
|
|
||||||
|
NOTE: Use with diligence, because this runs in the signal handler, with very
|
||||||
|
limited stack space and a potentially a corrupted heap, all while the failed
|
||||||
|
thread is blocked indefinitely. All functions called must be reentrant. */
|
||||||
|
extern std::function<void(siginfo_t * info, void * ctx)> stackOverflowHandler;
|
||||||
|
|
||||||
|
/* The default, robust implementation of stackOverflowHandler.
|
||||||
|
|
||||||
|
Prints an error message directly to stderr using a syscall instead of the
|
||||||
|
logger. Exits the process immediately after. */
|
||||||
|
void defaultStackOverflowHandler(siginfo_t * info, void * ctx);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "error.hh"
|
#include "error.hh"
|
||||||
|
#include "shared.hh"
|
||||||
|
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
|
@ -29,9 +30,7 @@ static void sigsegvHandler(int signo, siginfo_t * info, void * ctx)
|
||||||
ptrdiff_t diff = (char *) info->si_addr - sp;
|
ptrdiff_t diff = (char *) info->si_addr - sp;
|
||||||
if (diff < 0) diff = -diff;
|
if (diff < 0) diff = -diff;
|
||||||
if (diff < 4096) {
|
if (diff < 4096) {
|
||||||
char msg[] = "error: stack overflow (possible infinite recursion)\n";
|
nix::stackOverflowHandler(info, ctx);
|
||||||
[[gnu::unused]] auto res = write(2, msg, strlen(msg));
|
|
||||||
_exit(1); // maybe abort instead?
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,5 +66,12 @@ void detectStackOverflow()
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::function<void(siginfo_t * info, void * ctx)> stackOverflowHandler(defaultStackOverflowHandler);
|
||||||
|
|
||||||
|
void defaultStackOverflowHandler(siginfo_t * info, void * ctx) {
|
||||||
|
char msg[] = "error: stack overflow (possible infinite recursion)\n";
|
||||||
|
[[gnu::unused]] auto res = write(2, msg, strlen(msg));
|
||||||
|
_exit(1); // maybe abort instead?
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1594,6 +1594,8 @@ void LocalDerivationGoal::runChild()
|
||||||
/* Warning: in the child we should absolutely not make any SQLite
|
/* Warning: in the child we should absolutely not make any SQLite
|
||||||
calls! */
|
calls! */
|
||||||
|
|
||||||
|
bool sendException = true;
|
||||||
|
|
||||||
try { /* child */
|
try { /* child */
|
||||||
|
|
||||||
commonChildInit(builderOut);
|
commonChildInit(builderOut);
|
||||||
|
@ -2050,6 +2052,8 @@ void LocalDerivationGoal::runChild()
|
||||||
/* Indicate that we managed to set up the build environment. */
|
/* Indicate that we managed to set up the build environment. */
|
||||||
writeFull(STDERR_FILENO, std::string("\2\n"));
|
writeFull(STDERR_FILENO, std::string("\2\n"));
|
||||||
|
|
||||||
|
sendException = false;
|
||||||
|
|
||||||
/* Execute the program. This should not return. */
|
/* Execute the program. This should not return. */
|
||||||
if (drv->isBuiltin()) {
|
if (drv->isBuiltin()) {
|
||||||
try {
|
try {
|
||||||
|
@ -2103,10 +2107,13 @@ void LocalDerivationGoal::runChild()
|
||||||
throw SysError("executing '%1%'", drv->builder);
|
throw SysError("executing '%1%'", drv->builder);
|
||||||
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
|
if (sendException) {
|
||||||
writeFull(STDERR_FILENO, "\1\n");
|
writeFull(STDERR_FILENO, "\1\n");
|
||||||
FdSink sink(STDERR_FILENO);
|
FdSink sink(STDERR_FILENO);
|
||||||
sink << e;
|
sink << e;
|
||||||
sink.flush();
|
sink.flush();
|
||||||
|
} else
|
||||||
|
std::cerr << e.msg();
|
||||||
_exit(1);
|
_exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -239,6 +239,8 @@ struct ClientSettings
|
||||||
else if (trusted
|
else if (trusted
|
||||||
|| name == settings.buildTimeout.name
|
|| name == settings.buildTimeout.name
|
||||||
|| name == settings.buildRepeat.name
|
|| name == settings.buildRepeat.name
|
||||||
|
|| name == settings.maxSilentTime.name
|
||||||
|
|| name == settings.pollInterval.name
|
||||||
|| name == "connect-timeout"
|
|| name == "connect-timeout"
|
||||||
|| (name == "builders" && value == ""))
|
|| (name == "builders" && value == ""))
|
||||||
settings.set(name, value);
|
settings.set(name, value);
|
||||||
|
|
|
@ -322,7 +322,6 @@ struct curlFileTransfer : public FileTransfer
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.verifyTLS) {
|
if (request.verifyTLS) {
|
||||||
debug("verify TLS: Nix CA file = '%s'", settings.caFile);
|
|
||||||
if (settings.caFile != "")
|
if (settings.caFile != "")
|
||||||
curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
|
curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.c_str());
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -619,6 +619,17 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
||||||
Path path = storeDir + "/" + std::string(baseName);
|
Path path = storeDir + "/" + std::string(baseName);
|
||||||
Path realPath = realStoreDir + "/" + std::string(baseName);
|
Path realPath = realStoreDir + "/" + std::string(baseName);
|
||||||
|
|
||||||
|
/* There may be temp directories in the store that are still in use
|
||||||
|
by another process. We need to be sure that we can acquire an
|
||||||
|
exclusive lock before deleting them. */
|
||||||
|
if (baseName.find("tmp-", 0) == 0) {
|
||||||
|
AutoCloseFD tmpDirFd = open(realPath.c_str(), O_RDONLY | O_DIRECTORY);
|
||||||
|
if (tmpDirFd.get() == -1 || !lockFile(tmpDirFd.get(), ltWrite, false)) {
|
||||||
|
debug("skipping locked tempdir '%s'", realPath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
printInfo("deleting '%1%'", path);
|
printInfo("deleting '%1%'", path);
|
||||||
|
|
||||||
results.paths.insert(path);
|
results.paths.insert(path);
|
||||||
|
|
|
@ -154,13 +154,9 @@ StringSet Settings::getDefaultExtraPlatforms()
|
||||||
// machines. Note that we can’t force processes from executing
|
// machines. Note that we can’t force processes from executing
|
||||||
// x86_64 in aarch64 environments or vice versa since they can
|
// x86_64 in aarch64 environments or vice versa since they can
|
||||||
// always exec with their own binary preferences.
|
// always exec with their own binary preferences.
|
||||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") ||
|
if (std::string{SYSTEM} == "aarch64-darwin" &&
|
||||||
pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
runProgram(RunOptions {.program = "arch", .args = {"-arch", "x86_64", "/usr/bin/true"}, .mergeStderrToStdout = true}).first == 0)
|
||||||
if (std::string{SYSTEM} == "x86_64-darwin")
|
|
||||||
extraPlatforms.insert("aarch64-darwin");
|
|
||||||
else if (std::string{SYSTEM} == "aarch64-darwin")
|
|
||||||
extraPlatforms.insert("x86_64-darwin");
|
extraPlatforms.insert("x86_64-darwin");
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
return extraPlatforms;
|
return extraPlatforms;
|
||||||
|
|
|
@ -560,9 +560,15 @@ public:
|
||||||
R"(
|
R"(
|
||||||
If set to `true` (the default), any non-content-addressed path added
|
If set to `true` (the default), any non-content-addressed path added
|
||||||
or copied to the Nix store (e.g. when substituting from a binary
|
or copied to the Nix store (e.g. when substituting from a binary
|
||||||
cache) must have a valid signature, that is, be signed using one of
|
cache) must have a signature by a trusted key. A trusted key is one
|
||||||
the keys listed in `trusted-public-keys` or `secret-key-files`. Set
|
listed in `trusted-public-keys`, or a public key counterpart to a
|
||||||
to `false` to disable signature checking.
|
private key stored in a file listed in `secret-key-files`.
|
||||||
|
|
||||||
|
Set to `false` to disable signature checking and trust all
|
||||||
|
non-content-addressed paths unconditionally.
|
||||||
|
|
||||||
|
(Content-addressed paths are inherently trustworthy and thus
|
||||||
|
unaffected by this configuration option.)
|
||||||
)"};
|
)"};
|
||||||
|
|
||||||
Setting<StringSet> extraPlatforms{
|
Setting<StringSet> extraPlatforms{
|
||||||
|
@ -613,6 +619,14 @@ public:
|
||||||
are tried based on their Priority value, which each substituter can set
|
are tried based on their Priority value, which each substituter can set
|
||||||
independently. Lower value means higher priority.
|
independently. Lower value means higher priority.
|
||||||
The default is `https://cache.nixos.org`, with a Priority of 40.
|
The default is `https://cache.nixos.org`, with a Priority of 40.
|
||||||
|
|
||||||
|
Nix will copy a store path from a remote store only if one
|
||||||
|
of the following is true:
|
||||||
|
|
||||||
|
- the store object is signed by one of the [`trusted-public-keys`](#conf-trusted-public-keys)
|
||||||
|
- the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
||||||
|
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||||
|
- the store object is [output-addressed](glossary.md#gloss-output-addressed-store-object)
|
||||||
)",
|
)",
|
||||||
{"binary-caches"}};
|
{"binary-caches"}};
|
||||||
|
|
||||||
|
|
|
@ -158,7 +158,7 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
|
writeFile(schemaPath, fmt("%d", nixCASchemaVersion), 0666, true);
|
||||||
lockFile(lockFd.get(), ltRead, true);
|
lockFile(lockFd.get(), ltRead, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -281,7 +281,7 @@ LocalStore::LocalStore(const Params & params)
|
||||||
else if (curSchema == 0) { /* new store */
|
else if (curSchema == 0) { /* new store */
|
||||||
curSchema = nixSchemaVersion;
|
curSchema = nixSchemaVersion;
|
||||||
openDB(*state, true);
|
openDB(*state, true);
|
||||||
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
|
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (curSchema < nixSchemaVersion) {
|
else if (curSchema < nixSchemaVersion) {
|
||||||
|
@ -329,7 +329,7 @@ LocalStore::LocalStore(const Params & params)
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
|
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str(), 0666, true);
|
||||||
|
|
||||||
lockFile(globalLock.get(), ltRead, true);
|
lockFile(globalLock.get(), ltRead, true);
|
||||||
}
|
}
|
||||||
|
@ -751,7 +751,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
|
||||||
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
|
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
|
||||||
registerDrvOutput(info);
|
registerDrvOutput(info);
|
||||||
else
|
else
|
||||||
throw Error("cannot register realisation '%s' because it lacks a valid signature", info.outPath.to_string());
|
throw Error("cannot register realisation '%s' because it lacks a signature by a trusted key", info.outPath.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
void LocalStore::registerDrvOutput(const Realisation & info)
|
void LocalStore::registerDrvOutput(const Realisation & info)
|
||||||
|
@ -1266,7 +1266,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||||
{
|
{
|
||||||
if (checkSigs && pathInfoIsUntrusted(info))
|
if (checkSigs && pathInfoIsUntrusted(info))
|
||||||
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
throw Error("cannot add path '%s' because it lacks a signature by a trusted key", printStorePath(info.path));
|
||||||
|
|
||||||
addTempRoot(info.path);
|
addTempRoot(info.path);
|
||||||
|
|
||||||
|
@ -1382,13 +1382,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||||
|
|
||||||
std::unique_ptr<AutoDelete> delTempDir;
|
std::unique_ptr<AutoDelete> delTempDir;
|
||||||
Path tempPath;
|
Path tempPath;
|
||||||
|
Path tempDir;
|
||||||
|
AutoCloseFD tempDirFd;
|
||||||
|
|
||||||
if (!inMemory) {
|
if (!inMemory) {
|
||||||
/* Drain what we pulled so far, and then keep on pulling */
|
/* Drain what we pulled so far, and then keep on pulling */
|
||||||
StringSource dumpSource { dump };
|
StringSource dumpSource { dump };
|
||||||
ChainSource bothSource { dumpSource, source };
|
ChainSource bothSource { dumpSource, source };
|
||||||
|
|
||||||
auto tempDir = createTempDir(realStoreDir, "add");
|
std::tie(tempDir, tempDirFd) = createTempDirInStore();
|
||||||
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||||
tempPath = tempDir + "/x";
|
tempPath = tempDir + "/x";
|
||||||
|
|
||||||
|
@ -1507,18 +1509,24 @@ StorePath LocalStore::addTextToStore(
|
||||||
|
|
||||||
|
|
||||||
/* Create a temporary directory in the store that won't be
|
/* Create a temporary directory in the store that won't be
|
||||||
garbage-collected. */
|
garbage-collected until the returned FD is closed. */
|
||||||
Path LocalStore::createTempDirInStore()
|
std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
|
||||||
{
|
{
|
||||||
Path tmpDir;
|
Path tmpDirFn;
|
||||||
|
AutoCloseFD tmpDirFd;
|
||||||
|
bool lockedByUs = false;
|
||||||
do {
|
do {
|
||||||
/* There is a slight possibility that `tmpDir' gets deleted by
|
/* There is a slight possibility that `tmpDir' gets deleted by
|
||||||
the GC between createTempDir() and addTempRoot(), so repeat
|
the GC between createTempDir() and when we acquire a lock on it.
|
||||||
until `tmpDir' exists. */
|
We'll repeat until 'tmpDir' exists and we've locked it. */
|
||||||
tmpDir = createTempDir(realStoreDir);
|
tmpDirFn = createTempDir(realStoreDir, "tmp");
|
||||||
addTempRoot(parseStorePath(tmpDir));
|
tmpDirFd = open(tmpDirFn.c_str(), O_RDONLY | O_DIRECTORY);
|
||||||
} while (!pathExists(tmpDir));
|
if (tmpDirFd.get() < 0) {
|
||||||
return tmpDir;
|
continue;
|
||||||
|
}
|
||||||
|
lockedByUs = lockFile(tmpDirFd.get(), ltWrite, true);
|
||||||
|
} while (!pathExists(tmpDirFn) || !lockedByUs);
|
||||||
|
return {tmpDirFn, std::move(tmpDirFd)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -256,7 +256,7 @@ private:
|
||||||
|
|
||||||
void findRuntimeRoots(Roots & roots, bool censor);
|
void findRuntimeRoots(Roots & roots, bool censor);
|
||||||
|
|
||||||
Path createTempDirInStore();
|
std::pair<Path, AutoCloseFD> createTempDirInStore();
|
||||||
|
|
||||||
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
|
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
|
||||||
|
|
||||||
|
|
|
@ -75,6 +75,9 @@ struct NarAccessor : public FSAccessor
|
||||||
createMember(path, {FSAccessor::Type::tRegular, false, 0, 0});
|
createMember(path, {FSAccessor::Type::tRegular, false, 0, 0});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void closeRegularFile() override
|
||||||
|
{ }
|
||||||
|
|
||||||
void isExecutable() override
|
void isExecutable() override
|
||||||
{
|
{
|
||||||
parents.top()->isExecutable = true;
|
parents.top()->isExecutable = true;
|
||||||
|
|
|
@ -1363,9 +1363,9 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
return std::make_shared<LocalStore>(params);
|
return std::make_shared<LocalStore>(params);
|
||||||
}
|
}
|
||||||
warn("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
|
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
||||||
} else
|
} else
|
||||||
debug("'/nix' does not exist, so Nix will use '%s' as a chroot store", chrootStore);
|
debug("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
||||||
Store::Params params2;
|
Store::Params params2;
|
||||||
params2["root"] = chrootStore;
|
params2["root"] = chrootStore;
|
||||||
return std::make_shared<LocalStore>(params2);
|
return std::make_shared<LocalStore>(params2);
|
||||||
|
|
|
@ -234,6 +234,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
|
||||||
|
|
||||||
else if (s == "contents" && type == tpRegular) {
|
else if (s == "contents" && type == tpRegular) {
|
||||||
parseContents(sink, source, path);
|
parseContents(sink, source, path);
|
||||||
|
sink.closeRegularFile();
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (s == "executable" && type == tpRegular) {
|
else if (s == "executable" && type == tpRegular) {
|
||||||
|
@ -324,6 +325,12 @@ struct RestoreSink : ParseSink
|
||||||
if (!fd) throw SysError("creating file '%1%'", p);
|
if (!fd) throw SysError("creating file '%1%'", p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void closeRegularFile() override
|
||||||
|
{
|
||||||
|
/* Call close explicitly to make sure the error is checked */
|
||||||
|
fd.close();
|
||||||
|
}
|
||||||
|
|
||||||
void isExecutable() override
|
void isExecutable() override
|
||||||
{
|
{
|
||||||
struct stat st;
|
struct stat st;
|
||||||
|
|
|
@ -60,6 +60,7 @@ struct ParseSink
|
||||||
virtual void createDirectory(const Path & path) { };
|
virtual void createDirectory(const Path & path) { };
|
||||||
|
|
||||||
virtual void createRegularFile(const Path & path) { };
|
virtual void createRegularFile(const Path & path) { };
|
||||||
|
virtual void closeRegularFile() { };
|
||||||
virtual void isExecutable() { };
|
virtual void isExecutable() { };
|
||||||
virtual void preallocateContents(uint64_t size) { };
|
virtual void preallocateContents(uint64_t size) { };
|
||||||
virtual void receiveContents(std::string_view data) { };
|
virtual void receiveContents(std::string_view data) { };
|
||||||
|
|
|
@ -216,7 +216,7 @@ nlohmann::json Args::toJSON()
|
||||||
if (flag->shortName)
|
if (flag->shortName)
|
||||||
j["shortName"] = std::string(1, flag->shortName);
|
j["shortName"] = std::string(1, flag->shortName);
|
||||||
if (flag->description != "")
|
if (flag->description != "")
|
||||||
j["description"] = flag->description;
|
j["description"] = trim(flag->description);
|
||||||
j["category"] = flag->category;
|
j["category"] = flag->category;
|
||||||
if (flag->handler.arity != ArityAny)
|
if (flag->handler.arity != ArityAny)
|
||||||
j["arity"] = flag->handler.arity;
|
j["arity"] = flag->handler.arity;
|
||||||
|
@ -237,7 +237,7 @@ nlohmann::json Args::toJSON()
|
||||||
}
|
}
|
||||||
|
|
||||||
auto res = nlohmann::json::object();
|
auto res = nlohmann::json::object();
|
||||||
res["description"] = description();
|
res["description"] = trim(description());
|
||||||
res["flags"] = std::move(flags);
|
res["flags"] = std::move(flags);
|
||||||
res["args"] = std::move(args);
|
res["args"] = std::move(args);
|
||||||
auto s = doc();
|
auto s = doc();
|
||||||
|
@ -379,7 +379,7 @@ nlohmann::json MultiCommand::toJSON()
|
||||||
auto j = command->toJSON();
|
auto j = command->toJSON();
|
||||||
auto cat = nlohmann::json::object();
|
auto cat = nlohmann::json::object();
|
||||||
cat["id"] = command->category();
|
cat["id"] = command->category();
|
||||||
cat["description"] = categories[command->category()];
|
cat["description"] = trim(categories[command->category()]);
|
||||||
j["category"] = std::move(cat);
|
j["category"] = std::move(cat);
|
||||||
cmds[name] = std::move(j);
|
cmds[name] = std::move(j);
|
||||||
}
|
}
|
||||||
|
|
|
@ -353,7 +353,7 @@ void readFile(const Path & path, Sink & sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void writeFile(const Path & path, std::string_view s, mode_t mode)
|
void writeFile(const Path & path, std::string_view s, mode_t mode, bool sync)
|
||||||
{
|
{
|
||||||
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
|
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
|
||||||
if (!fd)
|
if (!fd)
|
||||||
|
@ -364,10 +364,16 @@ void writeFile(const Path & path, std::string_view s, mode_t mode)
|
||||||
e.addTrace({}, "writing file '%1%'", path);
|
e.addTrace({}, "writing file '%1%'", path);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
if (sync)
|
||||||
|
fd.fsync();
|
||||||
|
// Explicitly close to make sure exceptions are propagated.
|
||||||
|
fd.close();
|
||||||
|
if (sync)
|
||||||
|
syncParent(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void writeFile(const Path & path, Source & source, mode_t mode)
|
void writeFile(const Path & path, Source & source, mode_t mode, bool sync)
|
||||||
{
|
{
|
||||||
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
|
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
|
||||||
if (!fd)
|
if (!fd)
|
||||||
|
@ -386,6 +392,20 @@ void writeFile(const Path & path, Source & source, mode_t mode)
|
||||||
e.addTrace({}, "writing file '%1%'", path);
|
e.addTrace({}, "writing file '%1%'", path);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
if (sync)
|
||||||
|
fd.fsync();
|
||||||
|
// Explicitly close to make sure exceptions are propagated.
|
||||||
|
fd.close();
|
||||||
|
if (sync)
|
||||||
|
syncParent(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
void syncParent(const Path & path)
|
||||||
|
{
|
||||||
|
AutoCloseFD fd = open(dirOf(path).c_str(), O_RDONLY, 0);
|
||||||
|
if (!fd)
|
||||||
|
throw SysError("opening file '%1%'", path);
|
||||||
|
fd.fsync();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string readLine(int fd)
|
std::string readLine(int fd)
|
||||||
|
@ -841,6 +861,20 @@ void AutoCloseFD::close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AutoCloseFD::fsync()
|
||||||
|
{
|
||||||
|
if (fd != -1) {
|
||||||
|
int result;
|
||||||
|
#if __APPLE__
|
||||||
|
result = ::fcntl(fd, F_FULLFSYNC);
|
||||||
|
#else
|
||||||
|
result = ::fsync(fd);
|
||||||
|
#endif
|
||||||
|
if (result == -1)
|
||||||
|
throw SysError("fsync file descriptor %1%", fd);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
AutoCloseFD::operator bool() const
|
AutoCloseFD::operator bool() const
|
||||||
{
|
{
|
||||||
|
|
|
@ -115,9 +115,12 @@ std::string readFile(const Path & path);
|
||||||
void readFile(const Path & path, Sink & sink);
|
void readFile(const Path & path, Sink & sink);
|
||||||
|
|
||||||
/* Write a string to a file. */
|
/* Write a string to a file. */
|
||||||
void writeFile(const Path & path, std::string_view s, mode_t mode = 0666);
|
void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false);
|
||||||
|
|
||||||
void writeFile(const Path & path, Source & source, mode_t mode = 0666);
|
void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false);
|
||||||
|
|
||||||
|
/* Flush a file's parent directory to disk */
|
||||||
|
void syncParent(const Path & path);
|
||||||
|
|
||||||
/* Read a line from a file descriptor. */
|
/* Read a line from a file descriptor. */
|
||||||
std::string readLine(int fd);
|
std::string readLine(int fd);
|
||||||
|
@ -231,6 +234,7 @@ public:
|
||||||
explicit operator bool() const;
|
explicit operator bool() const;
|
||||||
int release();
|
int release();
|
||||||
void close();
|
void close();
|
||||||
|
void fsync();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -85,7 +85,6 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
Strings attrPaths;
|
Strings attrPaths;
|
||||||
Strings left;
|
Strings left;
|
||||||
RepairFlag repair = NoRepair;
|
RepairFlag repair = NoRepair;
|
||||||
Path gcRoot;
|
|
||||||
BuildMode buildMode = bmNormal;
|
BuildMode buildMode = bmNormal;
|
||||||
bool readStdin = false;
|
bool readStdin = false;
|
||||||
|
|
||||||
|
@ -167,9 +166,6 @@ static void main_nix_build(int argc, char * * argv)
|
||||||
else if (*arg == "--out-link" || *arg == "-o")
|
else if (*arg == "--out-link" || *arg == "-o")
|
||||||
outLink = getArg(*arg, arg, end);
|
outLink = getArg(*arg, arg, end);
|
||||||
|
|
||||||
else if (*arg == "--add-root")
|
|
||||||
gcRoot = getArg(*arg, arg, end);
|
|
||||||
|
|
||||||
else if (*arg == "--dry-run")
|
else if (*arg == "--dry-run")
|
||||||
dryRun = true;
|
dryRun = true;
|
||||||
|
|
||||||
|
|
|
@ -246,6 +246,7 @@ struct Common : InstallableCommand, MixProfile
|
||||||
"NIX_LOG_FD",
|
"NIX_LOG_FD",
|
||||||
"NIX_REMOTE",
|
"NIX_REMOTE",
|
||||||
"PPID",
|
"PPID",
|
||||||
|
"SHELL",
|
||||||
"SHELLOPTS",
|
"SHELLOPTS",
|
||||||
"SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
|
"SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt
|
||||||
"TEMP",
|
"TEMP",
|
||||||
|
|
|
@ -66,6 +66,12 @@ R""(
|
||||||
`nixpkgs#glibc` in `~/my-glibc` and want to compile another package
|
`nixpkgs#glibc` in `~/my-glibc` and want to compile another package
|
||||||
against it.
|
against it.
|
||||||
|
|
||||||
|
* Run a series of script commands:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix develop --command bash -c "mkdir build && cmake .. && make"
|
||||||
|
```
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
`nix develop` starts a `bash` shell that provides an interactive build
|
`nix develop` starts a `bash` shell that provides an interactive build
|
||||||
|
|
|
@ -74,6 +74,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "help",
|
.longName = "help",
|
||||||
.description = "Show usage information.",
|
.description = "Show usage information.",
|
||||||
|
.category = miscCategory,
|
||||||
.handler = {[&]() { throw HelpRequested(); }},
|
.handler = {[&]() { throw HelpRequested(); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -88,6 +89,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "version",
|
.longName = "version",
|
||||||
.description = "Show version information.",
|
.description = "Show version information.",
|
||||||
|
.category = miscCategory,
|
||||||
.handler = {[&]() { showVersion = true; }},
|
.handler = {[&]() { showVersion = true; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -95,12 +97,14 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
.longName = "offline",
|
.longName = "offline",
|
||||||
.aliases = {"no-net"}, // FIXME: remove
|
.aliases = {"no-net"}, // FIXME: remove
|
||||||
.description = "Disable substituters and consider all previously downloaded files up-to-date.",
|
.description = "Disable substituters and consider all previously downloaded files up-to-date.",
|
||||||
|
.category = miscCategory,
|
||||||
.handler = {[&]() { useNet = false; }},
|
.handler = {[&]() { useNet = false; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "refresh",
|
.longName = "refresh",
|
||||||
.description = "Consider all previously downloaded files out-of-date.",
|
.description = "Consider all previously downloaded files out-of-date.",
|
||||||
|
.category = miscCategory,
|
||||||
.handler = {[&]() { refresh = true; }},
|
.handler = {[&]() { refresh = true; }},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -187,7 +191,7 @@ static void showHelp(std::vector<std::string> subcommand, MultiCommand & topleve
|
||||||
*vUtils);
|
*vUtils);
|
||||||
|
|
||||||
auto attrs = state.buildBindings(16);
|
auto attrs = state.buildBindings(16);
|
||||||
attrs.alloc("command").mkString(toplevel.toJSON().dump());
|
attrs.alloc("toplevel").mkString(toplevel.toJSON().dump());
|
||||||
|
|
||||||
auto vRes = state.allocValue();
|
auto vRes = state.allocValue();
|
||||||
state.callFunction(*vGenerateManpage, state.allocValue()->mkAttrs(attrs), *vRes, noPos);
|
state.callFunction(*vGenerateManpage, state.allocValue()->mkAttrs(attrs), *vRes, noPos);
|
||||||
|
@ -325,7 +329,7 @@ void mainWrapped(int argc, char * * argv)
|
||||||
std::cout << "attrs\n"; break;
|
std::cout << "attrs\n"; break;
|
||||||
}
|
}
|
||||||
for (auto & s : *completions)
|
for (auto & s : *completions)
|
||||||
std::cout << s.completion << "\t" << s.description << "\n";
|
std::cout << s.completion << "\t" << trim(s.description) << "\n";
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ R""(
|
||||||
|
|
||||||
```console
|
```console
|
||||||
# nix copy --to /tmp/nix --trusted-public-keys '' nixpkgs#hello
|
# nix copy --to /tmp/nix --trusted-public-keys '' nixpkgs#hello
|
||||||
cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a valid signature
|
cannot add path '/nix/store/zy9wbxwcygrwnh8n2w9qbbcr6zk87m26-libunistring-0.9.10' because it lacks a signature by a trusted key
|
||||||
```
|
```
|
||||||
|
|
||||||
* Create a content-addressed representation of the current NixOS
|
* Create a content-addressed representation of the current NixOS
|
||||||
|
|
|
@ -23,6 +23,12 @@ R""(
|
||||||
Hi everybody!
|
Hi everybody!
|
||||||
```
|
```
|
||||||
|
|
||||||
|
* Run multiple commands in a shell environment:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix shell nixpkgs#gnumake -c sh -c "cd src && make"
|
||||||
|
```
|
||||||
|
|
||||||
* Run GNU Hello in a chroot store:
|
* Run GNU Hello in a chroot store:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
|
|
@ -41,7 +41,7 @@ struct CmdVerify : StorePathsCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "sigs-needed",
|
.longName = "sigs-needed",
|
||||||
.shortName = 'n',
|
.shortName = 'n',
|
||||||
.description = "Require that each path has at least *n* valid signatures.",
|
.description = "Require that each path is signed by at least *n* different keys.",
|
||||||
.labels = {"n"},
|
.labels = {"n"},
|
||||||
.handler = {&sigsNeeded}
|
.handler = {&sigsNeeded}
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,9 +18,6 @@ nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built"
|
||||||
# Now new command:
|
# Now new command:
|
||||||
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
|
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
|
||||||
|
|
||||||
# TODO: XXX: FIXME: #1793
|
|
||||||
# Disable this part of the test until the problem is resolved:
|
|
||||||
if [ -n "$ISSUE_1795_IS_FIXED" ]; then
|
|
||||||
clearStore
|
clearStore
|
||||||
clearCache
|
clearCache
|
||||||
|
|
||||||
|
@ -28,7 +25,6 @@ clearCache
|
||||||
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
|
nix build -f dependencies.nix --dry-run 2>&1 | grep "will be built"
|
||||||
# Now old command:
|
# Now old command:
|
||||||
nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built"
|
nix-build --no-out-link dependencies.nix --dry-run 2>&1 | grep "will be built"
|
||||||
fi
|
|
||||||
|
|
||||||
###################################################
|
###################################################
|
||||||
# Check --dry-run doesn't create links with --dry-run
|
# Check --dry-run doesn't create links with --dry-run
|
||||||
|
|
220
tests/installer/default.nix
Normal file
220
tests/installer/default.nix
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
{ binaryTarballs
|
||||||
|
, nixpkgsFor
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
installScripts = {
|
||||||
|
install-default = {
|
||||||
|
script = ''
|
||||||
|
tar -xf ./nix.tar.xz
|
||||||
|
mv ./nix-* nix
|
||||||
|
./nix/install --no-channel-add
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
install-force-no-daemon = {
|
||||||
|
script = ''
|
||||||
|
tar -xf ./nix.tar.xz
|
||||||
|
mv ./nix-* nix
|
||||||
|
./nix/install --no-daemon
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
install-force-daemon = {
|
||||||
|
script = ''
|
||||||
|
tar -xf ./nix.tar.xz
|
||||||
|
mv ./nix-* nix
|
||||||
|
./nix/install --daemon --no-channel-add
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
disableSELinux = "sudo setenforce 0";
|
||||||
|
|
||||||
|
images = {
|
||||||
|
|
||||||
|
/*
|
||||||
|
"ubuntu-14-04" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/ubuntu/boxes/trusty64/versions/20190514.0.0/providers/virtualbox.box";
|
||||||
|
hash = "sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=";
|
||||||
|
};
|
||||||
|
rootDisk = "box-disk1.vmdk";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
"ubuntu-16-04" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/ubuntu1604/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-lO4oYQR2tCh5auxAYe6bPOgEqOgv3Y3GC1QM1tEEEU8=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
};
|
||||||
|
|
||||||
|
"ubuntu-22-04" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/ubuntu2204/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-HNll0Qikw/xGIcogni5lz01vUv+R3o8xowP2EtqjuUQ=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
};
|
||||||
|
|
||||||
|
"fedora-36" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/fedora36/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-rxPgnDnFkTDwvdqn2CV3ZUo3re9AdPtSZ9SvOHNvaks=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
postBoot = disableSELinux;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Currently fails with 'error while loading shared libraries:
|
||||||
|
# libsodium.so.23: cannot stat shared object: Invalid argument'.
|
||||||
|
/*
|
||||||
|
"rhel-6" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/rhel6/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-QwzbvRoRRGqUCQptM7X/InRWFSP2sqwRt2HaaO6zBGM=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
"rhel-7" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/rhel7/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
};
|
||||||
|
|
||||||
|
"rhel-8" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/rhel8/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-zFOPjSputy1dPgrQRixBXmlyN88cAKjJ21VvjSWUCUY=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
postBoot = disableSELinux;
|
||||||
|
};
|
||||||
|
|
||||||
|
"rhel-9" = {
|
||||||
|
image = import <nix/fetchurl.nix> {
|
||||||
|
url = "https://app.vagrantup.com/generic/boxes/rhel9/versions/4.1.12/providers/libvirt.box";
|
||||||
|
hash = "sha256-vL/FbB3kK1rcSaR627nWmScYGKGk4seSmAdq6N5diMg=";
|
||||||
|
};
|
||||||
|
rootDisk = "box.img";
|
||||||
|
system = "x86_64-linux";
|
||||||
|
postBoot = disableSELinux;
|
||||||
|
extraQemuOpts = "-cpu Westmere-v2";
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
makeTest = imageName: testName:
|
||||||
|
let image = images.${imageName}; in
|
||||||
|
with nixpkgsFor.${image.system};
|
||||||
|
runCommand
|
||||||
|
"installer-test-${imageName}-${testName}"
|
||||||
|
{ buildInputs = [ qemu_kvm openssh ];
|
||||||
|
image = image.image;
|
||||||
|
postBoot = image.postBoot or "";
|
||||||
|
installScript = installScripts.${testName}.script;
|
||||||
|
binaryTarball = binaryTarballs.${system};
|
||||||
|
}
|
||||||
|
''
|
||||||
|
shopt -s nullglob
|
||||||
|
|
||||||
|
echo "Unpacking Vagrant box $image..."
|
||||||
|
tar xvf $image
|
||||||
|
|
||||||
|
image_type=$(qemu-img info ${image.rootDisk} | sed 's/file format: \(.*\)/\1/; t; d')
|
||||||
|
|
||||||
|
qemu-img create -b ./${image.rootDisk} -F "$image_type" -f qcow2 ./disk.qcow2
|
||||||
|
|
||||||
|
extra_qemu_opts="${image.extraQemuOpts or ""}"
|
||||||
|
|
||||||
|
# Add the config disk, required by the Ubuntu images.
|
||||||
|
config_drive=$(echo *configdrive.vmdk || true)
|
||||||
|
if [[ -n $config_drive ]]; then
|
||||||
|
extra_qemu_opts+=" -drive id=disk2,file=$config_drive,if=virtio"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Starting qemu..."
|
||||||
|
qemu-kvm -m 4096 -nographic \
|
||||||
|
-drive id=disk1,file=./disk.qcow2,if=virtio \
|
||||||
|
-netdev user,id=net0,restrict=yes,hostfwd=tcp::20022-:22 -device virtio-net-pci,netdev=net0 \
|
||||||
|
$extra_qemu_opts &
|
||||||
|
qemu_pid=$!
|
||||||
|
trap "kill $qemu_pid" EXIT
|
||||||
|
|
||||||
|
if ! [ -e ./vagrant_insecure_key ]; then
|
||||||
|
cp ${./vagrant_insecure_key} vagrant_insecure_key
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod 0400 ./vagrant_insecure_key
|
||||||
|
|
||||||
|
ssh_opts="-o StrictHostKeyChecking=no -o HostKeyAlgorithms=+ssh-rsa -o PubkeyAcceptedKeyTypes=+ssh-rsa -i ./vagrant_insecure_key"
|
||||||
|
ssh="ssh -p 20022 -q $ssh_opts vagrant@localhost"
|
||||||
|
|
||||||
|
echo "Waiting for SSH..."
|
||||||
|
for ((i = 0; i < 120; i++)); do
|
||||||
|
echo "[ssh] Trying to connect..."
|
||||||
|
if $ssh -- true; then
|
||||||
|
echo "[ssh] Connected!"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
if ! kill -0 $qemu_pid; then
|
||||||
|
echo "qemu died unexpectedly"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -n $postBoot ]]; then
|
||||||
|
echo "Running post-boot commands..."
|
||||||
|
$ssh "set -ex; $postBoot"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Copying installer..."
|
||||||
|
scp -P 20022 $ssh_opts $binaryTarball/nix-*.tar.xz vagrant@localhost:nix.tar.xz
|
||||||
|
|
||||||
|
echo "Running installer..."
|
||||||
|
$ssh "set -eux; $installScript"
|
||||||
|
|
||||||
|
echo "Testing Nix installation..."
|
||||||
|
$ssh <<EOF
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
# FIXME: get rid of this; ideally ssh should just work.
|
||||||
|
source ~/.bash_profile || true
|
||||||
|
source ~/.bash_login || true
|
||||||
|
source ~/.profile || true
|
||||||
|
source /etc/bashrc || true
|
||||||
|
|
||||||
|
nix-env --version
|
||||||
|
nix --extra-experimental-features nix-command store ping
|
||||||
|
|
||||||
|
out=\$(nix-build --no-substitute -E 'derivation { name = "foo"; system = "x86_64-linux"; builder = "/bin/sh"; args = ["-c" "echo foobar > \$out"]; }')
|
||||||
|
[[ \$(cat \$out) = foobar ]]
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo "Done!"
|
||||||
|
touch $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
builtins.mapAttrs (imageName: image:
|
||||||
|
{ ${image.system} = builtins.mapAttrs (testName: test:
|
||||||
|
makeTest imageName testName
|
||||||
|
) installScripts;
|
||||||
|
}
|
||||||
|
) images
|
27
tests/installer/vagrant_insecure_key
Normal file
27
tests/installer/vagrant_insecure_key
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEogIBAAKCAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzI
|
||||||
|
w+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoP
|
||||||
|
kcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2
|
||||||
|
hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NO
|
||||||
|
Td0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcW
|
||||||
|
yLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQIBIwKCAQEA4iqWPJXtzZA68mKd
|
||||||
|
ELs4jJsdyky+ewdZeNds5tjcnHU5zUYE25K+ffJED9qUWICcLZDc81TGWjHyAqD1
|
||||||
|
Bw7XpgUwFgeUJwUlzQurAv+/ySnxiwuaGJfhFM1CaQHzfXphgVml+fZUvnJUTvzf
|
||||||
|
TK2Lg6EdbUE9TarUlBf/xPfuEhMSlIE5keb/Zz3/LUlRg8yDqz5w+QWVJ4utnKnK
|
||||||
|
iqwZN0mwpwU7YSyJhlT4YV1F3n4YjLswM5wJs2oqm0jssQu/BT0tyEXNDYBLEF4A
|
||||||
|
sClaWuSJ2kjq7KhrrYXzagqhnSei9ODYFShJu8UWVec3Ihb5ZXlzO6vdNQ1J9Xsf
|
||||||
|
4m+2ywKBgQD6qFxx/Rv9CNN96l/4rb14HKirC2o/orApiHmHDsURs5rUKDx0f9iP
|
||||||
|
cXN7S1uePXuJRK/5hsubaOCx3Owd2u9gD6Oq0CsMkE4CUSiJcYrMANtx54cGH7Rk
|
||||||
|
EjFZxK8xAv1ldELEyxrFqkbE4BKd8QOt414qjvTGyAK+OLD3M2QdCQKBgQDtx8pN
|
||||||
|
CAxR7yhHbIWT1AH66+XWN8bXq7l3RO/ukeaci98JfkbkxURZhtxV/HHuvUhnPLdX
|
||||||
|
3TwygPBYZFNo4pzVEhzWoTtnEtrFueKxyc3+LjZpuo+mBlQ6ORtfgkr9gBVphXZG
|
||||||
|
YEzkCD3lVdl8L4cw9BVpKrJCs1c5taGjDgdInQKBgHm/fVvv96bJxc9x1tffXAcj
|
||||||
|
3OVdUN0UgXNCSaf/3A/phbeBQe9xS+3mpc4r6qvx+iy69mNBeNZ0xOitIjpjBo2+
|
||||||
|
dBEjSBwLk5q5tJqHmy/jKMJL4n9ROlx93XS+njxgibTvU6Fp9w+NOFD/HvxB3Tcz
|
||||||
|
6+jJF85D5BNAG3DBMKBjAoGBAOAxZvgsKN+JuENXsST7F89Tck2iTcQIT8g5rwWC
|
||||||
|
P9Vt74yboe2kDT531w8+egz7nAmRBKNM751U/95P9t88EDacDI/Z2OwnuFQHCPDF
|
||||||
|
llYOUI+SpLJ6/vURRbHSnnn8a/XG+nzedGH5JGqEJNQsz+xT2axM0/W/CRknmGaJ
|
||||||
|
kda/AoGANWrLCz708y7VYgAtW2Uf1DPOIYMdvo6fxIB5i9ZfISgcJ/bbCUkFrhoH
|
||||||
|
+vq/5CIWxCPp0f85R4qxxQ5ihxJ0YDQT9Jpx4TMss4PSavPaBH3RXow5Ohe+bYoQ
|
||||||
|
NE5OgEXk2wVfZczCZpigBKbKZHNYcelXtTt/nP3rsCuGcM4h53s=
|
||||||
|
-----END RSA PRIVATE KEY-----
|
|
@ -4,6 +4,7 @@ let
|
||||||
name2 = "hello";
|
name2 = "hello";
|
||||||
name3 = "915resolution-0.5.2";
|
name3 = "915resolution-0.5.2";
|
||||||
name4 = "xf86-video-i810-1.7.4";
|
name4 = "xf86-video-i810-1.7.4";
|
||||||
|
name5 = "name-that-ends-with-dash--1.0";
|
||||||
|
|
||||||
eq = 0;
|
eq = 0;
|
||||||
lt = builtins.sub 0 1;
|
lt = builtins.sub 0 1;
|
||||||
|
@ -23,6 +24,8 @@ let
|
||||||
((builtins.parseDrvName name3).version == "0.5.2")
|
((builtins.parseDrvName name3).version == "0.5.2")
|
||||||
((builtins.parseDrvName name4).name == "xf86-video-i810")
|
((builtins.parseDrvName name4).name == "xf86-video-i810")
|
||||||
((builtins.parseDrvName name4).version == "1.7.4")
|
((builtins.parseDrvName name4).version == "1.7.4")
|
||||||
|
((builtins.parseDrvName name5).name == "name-that-ends-with-dash")
|
||||||
|
((builtins.parseDrvName name5).version == "-1.0")
|
||||||
(versionTest "1.0" "2.3" lt)
|
(versionTest "1.0" "2.3" lt)
|
||||||
(versionTest "2.1" "2.3" lt)
|
(versionTest "2.1" "2.3" lt)
|
||||||
(versionTest "2.3" "2.3" eq)
|
(versionTest "2.3" "2.3" eq)
|
||||||
|
|
|
@ -81,7 +81,7 @@ info=$(nix path-info --store file://$cacheDir --json $outPath2)
|
||||||
[[ $info =~ 'cache1.example.org' ]]
|
[[ $info =~ 'cache1.example.org' ]]
|
||||||
[[ $info =~ 'cache2.example.org' ]]
|
[[ $info =~ 'cache2.example.org' ]]
|
||||||
|
|
||||||
# Copying to a diverted store should fail due to a lack of valid signatures.
|
# Copying to a diverted store should fail due to a lack of signatures by trusted keys.
|
||||||
chmod -R u+w $TEST_ROOT/store0 || true
|
chmod -R u+w $TEST_ROOT/store0 || true
|
||||||
rm -rf $TEST_ROOT/store0
|
rm -rf $TEST_ROOT/store0
|
||||||
(! nix copy --to $TEST_ROOT/store0 $outPath)
|
(! nix copy --to $TEST_ROOT/store0 $outPath)
|
||||||
|
|
Loading…
Reference in a new issue