forked from lix-project/lix
Merge branch 'master' into referenceablePaths
This commit is contained in:
commit
4aaf0ee52e
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Motivation
|
||||||
|
<!-- Briefly explain what the change is about and why it is desirable. -->
|
||||||
|
|
||||||
|
# Context
|
||||||
|
<!-- Provide context. Reference open issues if available. -->
|
||||||
|
|
||||||
|
<!-- Non-trivial change: Briefly outline the implementation strategy. -->
|
||||||
|
|
||||||
|
<!-- Invasive change: Discuss alternative designs or approaches you considered. -->
|
||||||
|
|
||||||
|
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
||||||
|
|
||||||
|
# Checklist for maintainers
|
||||||
|
|
||||||
|
<!-- Contributors: please leave this as is -->
|
||||||
|
|
||||||
|
Maintainers: tick if completed or explain if not relevant
|
||||||
|
|
||||||
|
- [ ] agreed on idea
|
||||||
|
- [ ] agreed on implementation strategy
|
||||||
|
- [ ] tests, as appropriate
|
||||||
|
- functional tests - `tests/**.sh`
|
||||||
|
- unit tests - `src/*/tests`
|
||||||
|
- integration tests - `tests/nixos/*`
|
||||||
|
- [ ] documentation in the manual
|
||||||
|
- [ ] code and comments are self-explanatory
|
||||||
|
- [ ] commit message explains why the change was made
|
||||||
|
- [ ] new feature or bug fix: updated release notes
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v1.0.1
|
uses: zeebe-io/backport-action@v1.1.0
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
77
boehmgc-coroutine-sp-fallback.diff
Normal file
77
boehmgc-coroutine-sp-fallback.diff
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
|
||||||
|
index 3dbaa3fb..36a1d1f7 100644
|
||||||
|
--- a/darwin_stop_world.c
|
||||||
|
+++ b/darwin_stop_world.c
|
||||||
|
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
int nthreads = 0;
|
||||||
|
word total_size = 0;
|
||||||
|
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
|
||||||
|
+ size_t stack_limit;
|
||||||
|
if (!EXPECT(GC_thr_initialized, TRUE))
|
||||||
|
GC_thr_init();
|
||||||
|
|
||||||
|
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
|
||||||
|
}
|
||||||
|
if (altstack_lo) {
|
||||||
|
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||||
|
+ // control flow returns to the thread.
|
||||||
|
+ // While in the coroutine, the sp points outside the thread stack,
|
||||||
|
+ // so we can detect this and push the entire thread stack instead,
|
||||||
|
+ // as an approximation.
|
||||||
|
+ // We assume that the coroutine has similarly added its entire stack.
|
||||||
|
+ // This could be made accurate by cooperating with the application
|
||||||
|
+ // via new functions and/or callbacks.
|
||||||
|
+ stack_limit = pthread_get_stacksize_np(p->id);
|
||||||
|
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
|
||||||
|
+ altstack_lo = altstack_hi - stack_limit;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
total_size += altstack_hi - altstack_lo;
|
||||||
|
GC_push_all_stack(altstack_lo, altstack_hi);
|
||||||
|
}
|
||||||
|
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
||||||
|
index b5d71e62..aed7b0bf 100644
|
||||||
|
--- a/pthread_stop_world.c
|
||||||
|
+++ b/pthread_stop_world.c
|
||||||
|
@@ -768,6 +768,8 @@ STATIC void GC_restart_handler(int sig)
|
||||||
|
/* world is stopped. Should not fail if it isn't. */
|
||||||
|
GC_INNER void GC_push_all_stacks(void)
|
||||||
|
{
|
||||||
|
+ size_t stack_limit;
|
||||||
|
+ pthread_attr_t pattr;
|
||||||
|
GC_bool found_me = FALSE;
|
||||||
|
size_t nthreads = 0;
|
||||||
|
int i;
|
||||||
|
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
hi = p->altstack + p->altstack_size;
|
||||||
|
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||||
|
/* FIXME: Assume stack grows down */
|
||||||
|
+ } else {
|
||||||
|
+ if (pthread_getattr_np(p->id, &pattr)) {
|
||||||
|
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
|
||||||
|
+ }
|
||||||
|
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
||||||
|
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
|
||||||
|
+ }
|
||||||
|
+ if (pthread_attr_destroy(&pattr)) {
|
||||||
|
+ ABORT("GC_push_all_stacks: pthread_attr_destroy failed!");
|
||||||
|
+ }
|
||||||
|
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||||
|
+ // control flow returns to the thread.
|
||||||
|
+ // While in the coroutine, the sp points outside the thread stack,
|
||||||
|
+ // so we can detect this and push the entire thread stack instead,
|
||||||
|
+ // as an approximation.
|
||||||
|
+ // We assume that the coroutine has similarly added its entire stack.
|
||||||
|
+ // This could be made accurate by cooperating with the application
|
||||||
|
+ // via new functions and/or callbacks.
|
||||||
|
+ #ifndef STACK_GROWS_UP
|
||||||
|
+ if (lo >= hi || lo < hi - stack_limit) { // sp outside stack
|
||||||
|
+ lo = hi - stack_limit;
|
||||||
|
+ }
|
||||||
|
+ #else
|
||||||
|
+ #error "STACK_GROWS_UP not supported in boost_coroutine2 (as of june 2021), so we don't support it in Nix."
|
||||||
|
+ #endif
|
||||||
|
}
|
||||||
|
GC_push_all_stack_sections(lo, hi, traced_stack_sect);
|
||||||
|
# ifdef STACK_GROWS_UP
|
|
@ -274,6 +274,12 @@ fi
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
|
||||||
|
|
||||||
|
# Look for rapidcheck.
|
||||||
|
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
||||||
|
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
||||||
|
AC_CHECK_LIB([rapidcheck], [])
|
||||||
|
|
||||||
|
|
||||||
# Look for nlohmann/json.
|
# Look for nlohmann/json.
|
||||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
|
@ -10,3 +10,12 @@ git-repository-url = "https://github.com/NixOS/nix"
|
||||||
[preprocessor.anchors]
|
[preprocessor.anchors]
|
||||||
renderers = ["html"]
|
renderers = ["html"]
|
||||||
command = "jq --from-file doc/manual/anchors.jq"
|
command = "jq --from-file doc/manual/anchors.jq"
|
||||||
|
|
||||||
|
[output.linkcheck]
|
||||||
|
# no Internet during the build (in the sandbox)
|
||||||
|
follow-web-links = false
|
||||||
|
|
||||||
|
# mdbook-linkcheck does not understand [foo]{#bar} style links, resulting in
|
||||||
|
# excessive "Potential incomplete link" warnings. No other kind of warning was
|
||||||
|
# produced at the time of writing.
|
||||||
|
warning-policy = "ignore"
|
||||||
|
|
|
@ -50,11 +50,16 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||||
@rm -rf $@
|
@rm -rf $@
|
||||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
||||||
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) sed -i $@.tmp/*.md -e 's^@docroot@^../..^g'
|
||||||
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||||
|
| sed -e 's^@docroot@^..^g'>> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
$(d)/nix.json: $(bindir)/nix
|
$(d)/nix.json: $(bindir)/nix
|
||||||
|
@ -67,7 +72,9 @@ $(d)/conf-file.json: $(bindir)/nix
|
||||||
|
|
||||||
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||||
|
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||||
|
| sed -e 's^@docroot@^..^g' >> $@.tmp
|
||||||
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
|
@ -102,6 +109,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
||||||
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
|
$(trace-gen) \
|
||||||
|
set -euo pipefail; \
|
||||||
|
RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
|
||||||
|
| { grep -Fv "because fragment resolution isn't implemented" || :; }
|
||||||
|
@rm -rf $(DESTDIR)$(docdir)/manual
|
||||||
|
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
|
||||||
|
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -67,6 +67,7 @@
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md)
|
||||||
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
|
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
|
||||||
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
|
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
|
||||||
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
||||||
|
|
|
@ -68,7 +68,7 @@ It can also execute build plans to produce new data, which are made available to
|
||||||
A build plan itself is a series of *build tasks*, together with their build inputs.
|
A build plan itself is a series of *build tasks*, together with their build inputs.
|
||||||
|
|
||||||
> **Important**
|
> **Important**
|
||||||
> A build task in Nix is called [derivation](../glossary#gloss-derivation).
|
> A build task in Nix is called [derivation](../glossary.md#gloss-derivation).
|
||||||
|
|
||||||
Each build task has a special build input executed as *build instructions* in order to perform the build.
|
Each build task has a special build input executed as *build instructions* in order to perform the build.
|
||||||
The result of a build task can be input to another build task.
|
The result of a build task can be input to another build task.
|
||||||
|
|
|
@ -11,7 +11,7 @@ Most Nix commands interpret the following environment variables:
|
||||||
expressions using [paths](../language/values.md#type-path)
|
expressions using [paths](../language/values.md#type-path)
|
||||||
enclosed in angle brackets (i.e., `<path>`),
|
enclosed in angle brackets (i.e., `<path>`),
|
||||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||||
[`-I` option](./opt-common#opt-I).
|
[`-I` option](./opt-common.md#opt-I).
|
||||||
|
|
||||||
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
||||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||||
|
|
|
@ -49,7 +49,7 @@ authentication, you can avoid typing the passphrase with `ssh-agent`.
|
||||||
- `--include-outputs`\
|
- `--include-outputs`\
|
||||||
Also copy the outputs of [store derivation]s included in the closure.
|
Also copy the outputs of [store derivation]s included in the closure.
|
||||||
|
|
||||||
[store derivation]: ../../glossary.md#gloss-store-derivation
|
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
- `--use-substitutes` / `-s`\
|
- `--use-substitutes` / `-s`\
|
||||||
Attempt to download missing paths on the target machine using Nix’s
|
Attempt to download missing paths on the target machine using Nix’s
|
||||||
|
|
|
@ -66,11 +66,11 @@ The operation `--realise` essentially “builds” the specified store
|
||||||
paths. Realisation is a somewhat overloaded term:
|
paths. Realisation is a somewhat overloaded term:
|
||||||
|
|
||||||
- If the store path is a *derivation*, realisation ensures that the
|
- If the store path is a *derivation*, realisation ensures that the
|
||||||
output paths of the derivation are [valid](../glossary.md) (i.e.,
|
output paths of the derivation are [valid] (i.e.,
|
||||||
the output path and its closure exist in the file system). This
|
the output path and its closure exist in the file system). This
|
||||||
can be done in several ways. First, it is possible that the
|
can be done in several ways. First, it is possible that the
|
||||||
outputs are already valid, in which case we are done
|
outputs are already valid, in which case we are done
|
||||||
immediately. Otherwise, there may be [substitutes](../glossary.md)
|
immediately. Otherwise, there may be [substitutes]
|
||||||
that produce the outputs (e.g., by downloading them). Finally, the
|
that produce the outputs (e.g., by downloading them). Finally, the
|
||||||
outputs can be produced by running the build task described
|
outputs can be produced by running the build task described
|
||||||
by the derivation.
|
by the derivation.
|
||||||
|
@ -82,6 +82,9 @@ paths. Realisation is a somewhat overloaded term:
|
||||||
produced through substitutes. If there are no (successful)
|
produced through substitutes. If there are no (successful)
|
||||||
substitutes, realisation fails.
|
substitutes, realisation fails.
|
||||||
|
|
||||||
|
[valid]: ../glossary.md#gloss-validity
|
||||||
|
[substitutes]: ../glossary.md#gloss-substitute
|
||||||
|
|
||||||
The output path of each derivation is printed on standard output. (For
|
The output path of each derivation is printed on standard output. (For
|
||||||
non-derivations argument, the argument itself is printed.)
|
non-derivations argument, the argument itself is printed.)
|
||||||
|
|
||||||
|
@ -155,6 +158,12 @@ To test whether a previously-built derivation is deterministic:
|
||||||
$ nix-build '<nixpkgs>' -A hello --check -K
|
$ nix-build '<nixpkgs>' -A hello --check -K
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Use [`--read-log`](#operation---read-log) to show the stderr and stdout of a build:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-store --read-log $(nix-instantiate ./test.nix)
|
||||||
|
```
|
||||||
|
|
||||||
# Operation `--serve`
|
# Operation `--serve`
|
||||||
|
|
||||||
## Synopsis
|
## Synopsis
|
||||||
|
@ -289,8 +298,8 @@ error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4'
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
The operation `--query` displays various bits of information about the
|
The operation `--query` displays information about [store path]s.
|
||||||
store paths . The queries are described below. At most one query can be
|
The queries are described below. At most one query can be
|
||||||
specified. The default query is `--outputs`.
|
specified. The default query is `--outputs`.
|
||||||
|
|
||||||
The paths *paths* may also be symlinks from outside of the Nix store, to
|
The paths *paths* may also be symlinks from outside of the Nix store, to
|
||||||
|
@ -310,12 +319,12 @@ symlink.
|
||||||
## Queries
|
## Queries
|
||||||
|
|
||||||
- `--outputs`\
|
- `--outputs`\
|
||||||
Prints out the [output paths](../glossary.md) of the store
|
Prints out the [output path]s of the store
|
||||||
derivations *paths*. These are the paths that will be produced when
|
derivations *paths*. These are the paths that will be produced when
|
||||||
the derivation is built.
|
the derivation is built.
|
||||||
|
|
||||||
- `--requisites`; `-R`\
|
- `--requisites`; `-R`\
|
||||||
Prints out the [closure](../glossary.md) of the store path *paths*.
|
Prints out the [closure] of the given *paths*.
|
||||||
|
|
||||||
This query has one option:
|
This query has one option:
|
||||||
|
|
||||||
|
@ -332,10 +341,12 @@ symlink.
|
||||||
derivation and specifying the option `--include-outputs`.
|
derivation and specifying the option `--include-outputs`.
|
||||||
|
|
||||||
- `--references`\
|
- `--references`\
|
||||||
Prints the set of [references](../glossary.md) of the store paths
|
Prints the set of [references]s of the store paths
|
||||||
*paths*, that is, their immediate dependencies. (For *all*
|
*paths*, that is, their immediate dependencies. (For *all*
|
||||||
dependencies, use `--requisites`.)
|
dependencies, use `--requisites`.)
|
||||||
|
|
||||||
|
[reference]: ../glossary.md#gloss-reference
|
||||||
|
|
||||||
- `--referrers`\
|
- `--referrers`\
|
||||||
Prints the set of *referrers* of the store paths *paths*, that is,
|
Prints the set of *referrers* of the store paths *paths*, that is,
|
||||||
the store paths currently existing in the Nix store that refer to
|
the store paths currently existing in the Nix store that refer to
|
||||||
|
@ -350,11 +361,13 @@ symlink.
|
||||||
in the Nix store that are dependent on *paths*.
|
in the Nix store that are dependent on *paths*.
|
||||||
|
|
||||||
- `--deriver`; `-d`\
|
- `--deriver`; `-d`\
|
||||||
Prints the [deriver](../glossary.md) of the store paths *paths*. If
|
Prints the [deriver] of the store paths *paths*. If
|
||||||
the path has no deriver (e.g., if it is a source file), or if the
|
the path has no deriver (e.g., if it is a source file), or if the
|
||||||
deriver is not known (e.g., in the case of a binary-only
|
deriver is not known (e.g., in the case of a binary-only
|
||||||
deployment), the string `unknown-deriver` is printed.
|
deployment), the string `unknown-deriver` is printed.
|
||||||
|
|
||||||
|
[deriver]: ../glossary.md#gloss-deriver
|
||||||
|
|
||||||
- `--graph`\
|
- `--graph`\
|
||||||
Prints the references graph of the store paths *paths* in the format
|
Prints the references graph of the store paths *paths* in the format
|
||||||
of the `dot` tool of AT\&T's [Graphviz
|
of the `dot` tool of AT\&T's [Graphviz
|
||||||
|
|
|
@ -92,7 +92,8 @@ $ nix develop
|
||||||
|
|
||||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||||
under `src/{library_name}/tests` using the
|
under `src/{library_name}/tests` using the
|
||||||
[googletest](https://google.github.io/googletest/) framework.
|
[googletest](https://google.github.io/googletest/) and
|
||||||
|
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
||||||
|
|
||||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||||
|
|
||||||
|
@ -249,3 +250,36 @@ search/replaced in it for each new build.
|
||||||
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
||||||
solved this need?
|
solved this need?
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### Checking links in the manual
|
||||||
|
|
||||||
|
The build checks for broken internal links.
|
||||||
|
This happens late in the process, so `nix build` is not suitable for iterating.
|
||||||
|
To build the manual incrementally, run:
|
||||||
|
|
||||||
|
```console
|
||||||
|
make html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
||||||
|
|
||||||
|
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||||
|
|
||||||
|
```console
|
||||||
|
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
||||||
|
```
|
||||||
|
|
||||||
|
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||||
|
|
||||||
|
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||||
|
[URI fragments]: https://en.m.wikipedia.org/wiki/URI_fragment
|
||||||
|
|
||||||
|
#### `@docroot@` variable
|
||||||
|
|
||||||
|
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||||
|
|
||||||
|
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||||
|
|
||||||
|
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||||
|
See existing `@docroot@` logic in the [Makefile].
|
||||||
|
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||||
|
|
|
@ -19,6 +19,17 @@
|
||||||
|
|
||||||
[store derivation]: #gloss-store-derivation
|
[store derivation]: #gloss-store-derivation
|
||||||
|
|
||||||
|
- [realise]{#gloss-realise}, realisation\
|
||||||
|
Ensure a [store path] is [valid][validity].
|
||||||
|
|
||||||
|
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
|
||||||
|
|
||||||
|
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](./command-ref/nix-store.md#operation---realise).
|
||||||
|
|
||||||
|
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||||
|
|
||||||
|
[realise]: #gloss-realise
|
||||||
|
|
||||||
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
||||||
A derivation which has the
|
A derivation which has the
|
||||||
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||||
|
@ -101,6 +112,8 @@
|
||||||
copy store objects it doesn't have. For details, see the
|
copy store objects it doesn't have. For details, see the
|
||||||
[`substituters` option](./command-ref/conf-file.md#conf-substituters).
|
[`substituters` option](./command-ref/conf-file.md#conf-substituters).
|
||||||
|
|
||||||
|
[substituter]: #gloss-substituter
|
||||||
|
|
||||||
- [purity]{#gloss-purity}\
|
- [purity]{#gloss-purity}\
|
||||||
The assumption that equal Nix derivations when run always produce
|
The assumption that equal Nix derivations when run always produce
|
||||||
the same output. This cannot be guaranteed in general (e.g., a
|
the same output. This cannot be guaranteed in general (e.g., a
|
||||||
|
@ -143,19 +156,25 @@
|
||||||
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||||
references `R` then `R` is also in the closure of `P`.
|
references `R` then `R` is also in the closure of `P`.
|
||||||
|
|
||||||
|
[closure]: #gloss-closure
|
||||||
|
|
||||||
- [output path]{#gloss-output-path}\
|
- [output path]{#gloss-output-path}\
|
||||||
A [store path] produced by a [derivation].
|
A [store path] produced by a [derivation].
|
||||||
|
|
||||||
[output path]: #gloss-output-path
|
[output path]: #gloss-output-path
|
||||||
|
|
||||||
- [deriver]{#gloss-deriver}\
|
- [deriver]{#gloss-deriver}\
|
||||||
The deriver of an *output path* is the store
|
The [store derivation] that produced an [output path].
|
||||||
derivation that built it.
|
|
||||||
|
|
||||||
- [validity]{#gloss-validity}\
|
- [validity]{#gloss-validity}\
|
||||||
A store path is considered *valid* if it exists in the file system,
|
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||||
is listed in the Nix database as being valid, and if all paths in
|
|
||||||
its closure are also valid.
|
For a local store, this means:
|
||||||
|
- The store path leads to an existing [store object] in that [store].
|
||||||
|
- The store path is listed in the Nix database as being valid.
|
||||||
|
- All paths in the store path's [closure] are valid.
|
||||||
|
|
||||||
|
[validity]: #gloss-validity
|
||||||
|
|
||||||
- [user environment]{#gloss-user-env}\
|
- [user environment]{#gloss-user-env}\
|
||||||
An automatically generated store object that consists of a set of
|
An automatically generated store object that consists of a set of
|
||||||
|
|
|
@ -120,10 +120,10 @@ sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defe
|
||||||
Remove build users and their group:
|
Remove build users and their group:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
for i in $(seq 30001 30032); do
|
for i in $(seq 1 32); do
|
||||||
sudo userdel $i
|
sudo userdel nixbld$i
|
||||||
done
|
done
|
||||||
sudo groupdel 30000
|
sudo groupdel nixbld
|
||||||
```
|
```
|
||||||
|
|
||||||
There may also be references to Nix in
|
There may also be references to Nix in
|
||||||
|
|
|
@ -191,12 +191,12 @@ This is an incomplete overview of language features, by example.
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
<nixpkgs>
|
`<nixpkgs>`
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
Search path. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
|
Search path for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
|
@ -1,28 +1,167 @@
|
||||||
# Operators
|
# Operators
|
||||||
|
|
||||||
The table below lists the operators in the Nix language, in
|
| Name | Syntax | Associativity | Precedence |
|
||||||
order of precedence (from strongest to weakest binding).
|
|----------------------------------------|--------------------------------------------|---------------|------------|
|
||||||
|
| [Attribute selection] | *attrset* `.` *attrpath* \[ `or` *expr* \] | none | 1 |
|
||||||
|
| Function application | *func* *expr* | left | 2 |
|
||||||
|
| [Arithmetic negation][arithmetic] | `-` *number* | none | 3 |
|
||||||
|
| [Has attribute] | *attrset* `?` *attrpath* | none | 4 |
|
||||||
|
| List concatenation | *list* `++` *list* | right | 5 |
|
||||||
|
| [Multiplication][arithmetic] | *number* `*` *number* | left | 6 |
|
||||||
|
| [Division][arithmetic] | *number* `/` *number* | left | 6 |
|
||||||
|
| [Subtraction][arithmetic] | *number* `-` *number* | left | 7 |
|
||||||
|
| [Addition][arithmetic] | *number* `+` *number* | left | 7 |
|
||||||
|
| [String concatenation] | *string* `+` *string* | left | 7 |
|
||||||
|
| [Path concatenation] | *path* `+` *path* | left | 7 |
|
||||||
|
| [Path and string concatenation] | *path* `+` *string* | left | 7 |
|
||||||
|
| [String and path concatenation] | *string* `+` *path* | left | 7 |
|
||||||
|
| Logical negation (`NOT`) | `!` *bool* | none | 8 |
|
||||||
|
| [Update] | *attrset* `//` *attrset* | right | 9 |
|
||||||
|
| [Less than][Comparison] | *expr* `<` *expr* | none | 10 |
|
||||||
|
| [Less than or equal to][Comparison] | *expr* `<=` *expr* | none | 10 |
|
||||||
|
| [Greater than][Comparison] | *expr* `>` *expr* | none | 10 |
|
||||||
|
| [Greater than or equal to][Comparison] | *expr* `>=` *expr* | none | 10 |
|
||||||
|
| [Equality] | *expr* `==` *expr* | none | 11 |
|
||||||
|
| Inequality | *expr* `!=` *expr* | none | 11 |
|
||||||
|
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
|
||||||
|
| Logical disjunction (`OR`) | *bool* `\|\|` *bool* | left | 13 |
|
||||||
|
| [Logical implication] | *bool* `->` *bool* | none | 14 |
|
||||||
|
|
||||||
|
[string]: ./values.md#type-string
|
||||||
|
[path]: ./values.md#type-path
|
||||||
|
[number]: ./values.md#type-number
|
||||||
|
[list]: ./values.md#list
|
||||||
|
[attribute set]: ./values.md#attribute-set
|
||||||
|
|
||||||
|
## Attribute selection
|
||||||
|
|
||||||
|
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||||
|
If the attribute doesn’t exist, return *value* if provided, otherwise abort evaluation.
|
||||||
|
|
||||||
|
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
|
||||||
|
|
||||||
|
An attribute path is a dot-separated list of attribute names.
|
||||||
|
An attribute name can be an identifier or a string.
|
||||||
|
|
||||||
|
> *attrpath* = *name* [ `.` *name* ]...
|
||||||
|
> *name* = *identifier* | *string*
|
||||||
|
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
||||||
|
|
||||||
|
[Attribute selection]: #attribute-selection
|
||||||
|
|
||||||
|
## Has attribute
|
||||||
|
|
||||||
|
> *attrset* `?` *attrpath*
|
||||||
|
|
||||||
|
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
||||||
|
The result is a [Boolean] value.
|
||||||
|
|
||||||
|
[Boolean]: ./values.md#type-boolean
|
||||||
|
|
||||||
|
[Has attribute]: #has-attribute
|
||||||
|
|
||||||
|
## Arithmetic
|
||||||
|
|
||||||
|
Numbers are type-compatible:
|
||||||
|
Pure integer operations will always return integers, whereas any operation involving at least one floating point number return a floating point number.
|
||||||
|
|
||||||
|
See also [Comparison] and [Equality].
|
||||||
|
|
||||||
|
The `+` operator is overloaded to also work on strings and paths.
|
||||||
|
|
||||||
|
[arithmetic]: #arithmetic
|
||||||
|
|
||||||
|
## String concatenation
|
||||||
|
|
||||||
|
> *string* `+` *string*
|
||||||
|
|
||||||
|
Concatenate two [string]s and merge their string contexts.
|
||||||
|
|
||||||
|
[String concatenation]: #string-concatenation
|
||||||
|
|
||||||
|
## Path concatenation
|
||||||
|
|
||||||
|
> *path* `+` *path*
|
||||||
|
|
||||||
|
Concatenate two [path]s.
|
||||||
|
The result is a path.
|
||||||
|
|
||||||
|
[Path concatenation]: #path-concatenation
|
||||||
|
|
||||||
|
## Path and string concatenation
|
||||||
|
|
||||||
|
> *path* + *string*
|
||||||
|
|
||||||
|
Concatenate *[path]* with *[string]*.
|
||||||
|
The result is a path.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
>
|
||||||
|
> The string must not have a string context that refers to a [store path].
|
||||||
|
|
||||||
|
[Path and string concatenation]: #path-and-string-concatenation
|
||||||
|
|
||||||
|
## String and path concatenation
|
||||||
|
|
||||||
|
> *string* + *path*
|
||||||
|
|
||||||
|
Concatenate *[string]* with *[path]*.
|
||||||
|
The result is a string.
|
||||||
|
|
||||||
|
> **Important**
|
||||||
|
>
|
||||||
|
> The file or directory at *path* must exist and is copied to the [store].
|
||||||
|
> The path appears in the result as the corresponding [store path].
|
||||||
|
|
||||||
|
[store path]: ../glossary.md#gloss-store-path
|
||||||
|
[store]: ../glossary.md#gloss-store
|
||||||
|
|
||||||
|
[Path and string concatenation]: #path-and-string-concatenation
|
||||||
|
|
||||||
|
## Update
|
||||||
|
|
||||||
|
> *attrset1* // *attrset2*
|
||||||
|
|
||||||
|
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
||||||
|
|
||||||
|
The returned attribute set will have of all the attributes in *attrset1* and *attrset2*.
|
||||||
|
If an attribute name is present in both, the attribute value from the latter is taken.
|
||||||
|
|
||||||
|
[Update]: #update
|
||||||
|
|
||||||
|
## Comparison
|
||||||
|
|
||||||
|
Comparison is
|
||||||
|
|
||||||
|
- [arithmetic] for [number]s
|
||||||
|
- lexicographic for [string]s and [path]s
|
||||||
|
- item-wise lexicographic for [list]s:
|
||||||
|
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||||
|
|
||||||
|
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||||
|
|
||||||
|
| comparison | implementation |
|
||||||
|
|--------------|-----------------------|
|
||||||
|
| *a* `<=` *b* | `! (` *b* `<` *a* `)` |
|
||||||
|
| *a* `>` *b* | *b* `<` *a* |
|
||||||
|
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||||
|
|
||||||
|
[Comparison]: #comparison-operators
|
||||||
|
|
||||||
|
## Equality
|
||||||
|
|
||||||
|
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
||||||
|
- Comparison of [function]s always returns `false`.
|
||||||
|
- Numbers are type-compatible, see [arithmetic] operators.
|
||||||
|
- Floating point numbers only differ up to a limited precision.
|
||||||
|
|
||||||
|
[function]: ./constructs.md#functions
|
||||||
|
|
||||||
|
[Equality]: #equality
|
||||||
|
|
||||||
|
## Logical implication
|
||||||
|
|
||||||
|
Equivalent to `!`*b1* `||` *b2*.
|
||||||
|
|
||||||
|
[Logical implication]: #logical-implication
|
||||||
|
|
||||||
| Name | Syntax | Associativity | Description | Precedence |
|
|
||||||
| ------------------------ | ----------------------------------- | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- |
|
|
||||||
| Select | *e* `.` *attrpath* \[ `or` *def* \] | none | Select attribute denoted by the attribute path *attrpath* from set *e*. (An attribute path is a dot-separated list of attribute names.) If the attribute doesn’t exist, return *def* if provided, otherwise abort evaluation. | 1 |
|
|
||||||
| Application | *e1* *e2* | left | Call function *e1* with argument *e2*. | 2 |
|
|
||||||
| Arithmetic Negation | `-` *e* | none | Arithmetic negation. | 3 |
|
|
||||||
| Has Attribute | *e* `?` *attrpath* | none | Test whether set *e* contains the attribute denoted by *attrpath*; return `true` or `false`. | 4 |
|
|
||||||
| List Concatenation | *e1* `++` *e2* | right | List concatenation. | 5 |
|
|
||||||
| Multiplication | *e1* `*` *e2*, | left | Arithmetic multiplication. | 6 |
|
|
||||||
| Division | *e1* `/` *e2* | left | Arithmetic division. | 6 |
|
|
||||||
| Addition | *e1* `+` *e2* | left | Arithmetic addition. | 7 |
|
|
||||||
| Subtraction | *e1* `-` *e2* | left | Arithmetic subtraction. | 7 |
|
|
||||||
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
|
|
||||||
| Not | `!` *e* | none | Boolean negation. | 8 |
|
|
||||||
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
|
|
||||||
| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
|
||||||
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
|
|
||||||
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
|
||||||
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
|
||||||
| Logical OR | *e1* <code>||</code> *e2* | left | Logical OR. | 13 |
|
|
||||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to <code>!e1 || e2</code>). | 14 |
|
|
||||||
|
|
|
@ -85,9 +85,10 @@
|
||||||
Numbers, which can be *integers* (like `123`) or *floating point*
|
Numbers, which can be *integers* (like `123`) or *floating point*
|
||||||
(like `123.43` or `.27e13`).
|
(like `123.43` or `.27e13`).
|
||||||
|
|
||||||
Numbers are type-compatible: pure integer operations will always
|
See [arithmetic] and [comparison] operators for semantics.
|
||||||
return integers, whereas any operation involving at least one
|
|
||||||
floating point number will have a floating point number as a result.
|
[arithmetic]: ./operators.md#arithmetic
|
||||||
|
[comparison]: ./operators.md#comparison
|
||||||
|
|
||||||
- <a id="type-path" href="#type-path">Path</a>
|
- <a id="type-path" href="#type-path">Path</a>
|
||||||
|
|
||||||
|
|
|
@ -32,13 +32,13 @@ which should print something like:
|
||||||
Priority: 30
|
Priority: 30
|
||||||
|
|
||||||
On the client side, you can tell Nix to use your binary cache using
|
On the client side, you can tell Nix to use your binary cache using
|
||||||
`--option extra-binary-caches`, e.g.:
|
`--substituters`, e.g.:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
|
$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/
|
||||||
```
|
```
|
||||||
|
|
||||||
The option `extra-binary-caches` tells Nix to use this binary cache in
|
The option `substituters` tells Nix to use this binary cache in
|
||||||
addition to your default caches, such as <https://cache.nixos.org>.
|
addition to your default caches, such as <https://cache.nixos.org>.
|
||||||
Thus, for any path in the closure of Firefox, Nix will first check if
|
Thus, for any path in the closure of Firefox, Nix will first check if
|
||||||
the path is available on the server `avalon` or another binary caches.
|
the path is available on the server `avalon` or another binary caches.
|
||||||
|
@ -47,4 +47,4 @@ If not, it will fall back to building from source.
|
||||||
You can also tell Nix to always use your binary cache by adding a line
|
You can also tell Nix to always use your binary cache by adding a line
|
||||||
to the `nix.conf` configuration file like this:
|
to the `nix.conf` configuration file like this:
|
||||||
|
|
||||||
binary-caches = http://avalon:8080/ https://cache.nixos.org/
|
substituters = http://avalon:8080/ https://cache.nixos.org/
|
||||||
|
|
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Release 2.13 (2023-01-17)
|
||||||
|
|
||||||
|
* The `repeat` and `enforce-determinism` options have been removed
|
||||||
|
since they had been broken under many circumstances for a long time.
|
||||||
|
|
||||||
|
* You can now use [flake references] in the [old command line interface], e.g.
|
||||||
|
|
||||||
|
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
|
||||||
|
[old command line interface]: ../command-ref/main-commands.md
|
||||||
|
|
||||||
|
```shell-session
|
||||||
|
# nix-build flake:nixpkgs -A hello
|
||||||
|
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
||||||
|
'<nixpkgs>' -A hello
|
||||||
|
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
||||||
|
```
|
||||||
|
|
||||||
|
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
|
||||||
|
Historical release notes were not changed.
|
||||||
|
|
||||||
|
* Error traces have been reworked to provide detailed explanations and more
|
||||||
|
accurate error locations. A short excerpt of the trace is now shown by
|
||||||
|
default when an error occurs.
|
||||||
|
|
||||||
|
* Allow explicitly selecting outputs in a store derivation installable, just like we can do with other sorts of installables.
|
||||||
|
For example,
|
||||||
|
```shell-session
|
||||||
|
# nix build /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev
|
||||||
|
```
|
||||||
|
now works just as
|
||||||
|
```shell-session
|
||||||
|
# nix build nixpkgs#glibc^dev
|
||||||
|
```
|
||||||
|
does already.
|
||||||
|
|
||||||
|
* On Linux, `nix develop` now sets the
|
||||||
|
[*personality*](https://man7.org/linux/man-pages/man2/personality.2.html)
|
||||||
|
for the development shell in the same way as the actual build of the
|
||||||
|
derivation. This makes shells for `i686-linux` derivations work
|
||||||
|
correctly on `x86_64-linux`.
|
||||||
|
|
||||||
|
* You can now disable the global flake registry by setting the `flake-registry`
|
||||||
|
configuration option to an empty string. The same can be achieved at runtime with
|
||||||
|
`--flake-registry ""`.
|
|
@ -1,26 +1,11 @@
|
||||||
# Release X.Y (202?-??-??)
|
* A new function `builtins.readFileType` is available. It is similar to
|
||||||
|
`builtins.readDir` but acts on a single file or directory.
|
||||||
|
|
||||||
* The `repeat` and `enforce-determinism` options have been removed
|
* The `builtins.readDir` function has been optimized when encountering not-yet-known
|
||||||
since they had been broken under many circumstances for a long time.
|
file types from POSIX's `readdir`. In such cases the type of each file is/was
|
||||||
|
discovered by making multiple syscalls. This change makes these operations
|
||||||
* You can now use [flake references] in the [old command line interface], e.g.
|
lazy such that these lookups will only be performed if the attribute is used.
|
||||||
|
This optimization affects a minority of filesystems and operating systems.
|
||||||
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
|
|
||||||
[old command line interface]: ../command-ref/main-commands.md
|
|
||||||
|
|
||||||
```
|
|
||||||
# nix-build flake:nixpkgs -A hello
|
|
||||||
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
|
||||||
'<nixpkgs>' -A hello
|
|
||||||
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
|
||||||
```
|
|
||||||
|
|
||||||
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
|
|
||||||
Historical release notes were not changed.
|
|
||||||
|
|
||||||
* Error traces have been reworked to provide detailed explanations and more
|
|
||||||
accurate error locations. A short excerpt of the trace is now shown by
|
|
||||||
default when an error occurs.
|
|
||||||
|
|
||||||
* In derivations that use structured attributes, you can now use `unsafeDiscardReferences`
|
* In derivations that use structured attributes, you can now use `unsafeDiscardReferences`
|
||||||
to disable scanning a given output for runtime dependencies:
|
to disable scanning a given output for runtime dependencies:
|
||||||
|
|
86
flake.nix
86
flake.nix
|
@ -82,7 +82,9 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
configureFlags =
|
configureFlags =
|
||||||
lib.optionals stdenv.isLinux [
|
[
|
||||||
|
"CXXFLAGS=-I${lib.getDev rapidcheck}/extras/gtest/include"
|
||||||
|
] ++ lib.optionals stdenv.isLinux [
|
||||||
"--with-boost=${boost}/lib"
|
"--with-boost=${boost}/lib"
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
]
|
]
|
||||||
|
@ -96,6 +98,7 @@
|
||||||
buildPackages.flex
|
buildPackages.flex
|
||||||
(lib.getBin buildPackages.lowdown-nix)
|
(lib.getBin buildPackages.lowdown-nix)
|
||||||
buildPackages.mdbook
|
buildPackages.mdbook
|
||||||
|
buildPackages.mdbook-linkcheck
|
||||||
buildPackages.autoconf-archive
|
buildPackages.autoconf-archive
|
||||||
buildPackages.autoreconfHook
|
buildPackages.autoreconfHook
|
||||||
buildPackages.pkg-config
|
buildPackages.pkg-config
|
||||||
|
@ -115,6 +118,7 @@
|
||||||
boost
|
boost
|
||||||
lowdown-nix
|
lowdown-nix
|
||||||
gtest
|
gtest
|
||||||
|
rapidcheck
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
|
@ -127,9 +131,14 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
propagatedDeps =
|
propagatedDeps =
|
||||||
[ (boehmgc.override {
|
[ ((boehmgc.override {
|
||||||
enableLargeConfig = true;
|
enableLargeConfig = true;
|
||||||
|
}).overrideAttrs(o: {
|
||||||
|
patches = (o.patches or []) ++ [
|
||||||
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
|
];
|
||||||
})
|
})
|
||||||
|
)
|
||||||
nlohmann_json
|
nlohmann_json
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
@ -400,6 +409,18 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
nixos-lib = import (nixpkgs + "/nixos/lib") { };
|
||||||
|
|
||||||
|
# https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests
|
||||||
|
runNixOSTestFor = system: test: nixos-lib.runTest {
|
||||||
|
imports = [ test ];
|
||||||
|
hostPkgs = nixpkgsFor.${system};
|
||||||
|
defaults = {
|
||||||
|
nixpkgs.pkgs = nixpkgsFor.${system};
|
||||||
|
};
|
||||||
|
_module.args.nixpkgs = nixpkgs;
|
||||||
|
};
|
||||||
|
|
||||||
in {
|
in {
|
||||||
|
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
|
@ -456,6 +477,10 @@
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
|
||||||
|
configureFlags = [
|
||||||
|
"CXXFLAGS=-I${lib.getDev pkgs.rapidcheck}/extras/gtest/include"
|
||||||
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
|
@ -474,49 +499,22 @@
|
||||||
};
|
};
|
||||||
|
|
||||||
# System tests.
|
# System tests.
|
||||||
tests.remoteBuilds = import ./tests/remote-builds.nix {
|
tests.remoteBuilds = runNixOSTestFor "x86_64-linux" ./tests/nixos/remote-builds.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
};
|
|
||||||
|
|
||||||
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
|
tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
};
|
|
||||||
|
|
||||||
tests.nssPreload = (import ./tests/nss-preload.nix rec {
|
tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
|
tests.sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/sourcehut-flakes.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.containers = (import ./tests/containers.nix rec {
|
tests.containers = runNixOSTestFor "x86_64-linux" ./tests/nixos/containers/containers.nix;
|
||||||
system = "x86_64-linux";
|
|
||||||
inherit nixpkgs;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.setuid = nixpkgs.lib.genAttrs
|
tests.setuid = nixpkgs.lib.genAttrs
|
||||||
["i686-linux" "x86_64-linux"]
|
["i686-linux" "x86_64-linux"]
|
||||||
(system:
|
(system: runNixOSTestFor system ./tests/nixos/setuid.nix);
|
||||||
import ./tests/setuid.nix rec {
|
|
||||||
inherit nixpkgs system;
|
|
||||||
overlay = self.overlays.default;
|
|
||||||
});
|
|
||||||
|
|
||||||
# Make sure that nix-env still produces the exact same result
|
# Make sure that nix-env still produces the exact same result
|
||||||
# on a particular version of Nixpkgs.
|
# on a particular version of Nixpkgs.
|
||||||
|
@ -531,6 +529,12 @@
|
||||||
mkdir $out
|
mkdir $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
tests.nixpkgsLibTests =
|
||||||
|
nixpkgs.lib.genAttrs systems (system:
|
||||||
|
import (nixpkgs + "/lib/tests/release.nix")
|
||||||
|
{ pkgs = nixpkgsFor.${system}; }
|
||||||
|
);
|
||||||
|
|
||||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||||
pkgs = nixpkgsFor.x86_64-linux;
|
pkgs = nixpkgsFor.x86_64-linux;
|
||||||
nixpkgs = nixpkgs-regression;
|
nixpkgs = nixpkgs-regression;
|
||||||
|
@ -561,6 +565,7 @@
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
});
|
});
|
||||||
|
@ -642,6 +647,7 @@
|
||||||
inherit system crossSystem;
|
inherit system crossSystem;
|
||||||
overlays = [ self.overlays.default ];
|
overlays = [ self.overlays.default ];
|
||||||
};
|
};
|
||||||
|
inherit (nixpkgsCross) lib;
|
||||||
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
||||||
name = "nix-${version}";
|
name = "nix-${version}";
|
||||||
|
|
||||||
|
@ -654,7 +660,11 @@
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
buildInputs = buildDeps ++ propagatedDeps;
|
buildInputs = buildDeps ++ propagatedDeps;
|
||||||
|
|
||||||
configureFlags = [ "--sysconfdir=/etc" "--disable-doc-gen" ];
|
configureFlags = [
|
||||||
|
"CXXFLAGS=-I${lib.getDev nixpkgsCross.rapidcheck}/extras/gtest/include"
|
||||||
|
"--sysconfdir=/etc"
|
||||||
|
"--disable-doc-gen"
|
||||||
|
];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
|
|
@ -36,17 +36,45 @@ Issues on the board progress through the following states:
|
||||||
|
|
||||||
- No Status
|
- No Status
|
||||||
|
|
||||||
Team members can add pull requests or issues to discuss or review together.
|
|
||||||
|
|
||||||
During the discussion meeting, the team triages new items.
|
During the discussion meeting, the team triages new items.
|
||||||
|
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
|
||||||
|
|
||||||
|
On every meeting, at least one item from each of the following categories is inspected:
|
||||||
|
|
||||||
|
1. [critical](https://github.com/NixOS/nix/labels/critical)
|
||||||
|
2. [security](https://github.com/NixOS/nix/labels/security)
|
||||||
|
3. [regression](https://github.com/NixOS/nix/labels/regression)
|
||||||
|
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
|
||||||
|
|
||||||
|
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
|
||||||
|
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||||
|
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
|
||||||
|
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||||
|
|
||||||
|
Team members can also add pull requests or issues they would like the whole team to consider.
|
||||||
|
|
||||||
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
|
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
|
||||||
|
|
||||||
- To discuss
|
- To discuss
|
||||||
|
|
||||||
Pull requests and issues that are important and controversial are discussed by the team during discussion meetings.
|
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||||
|
|
||||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||||
|
|
||||||
|
As a general guideline, the order of items is determined as follows:
|
||||||
|
|
||||||
|
- Prioritise pull requests over issues
|
||||||
|
|
||||||
|
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
|
||||||
|
|
||||||
|
- Prioritise fixing bugs over documentation, improvements or new features
|
||||||
|
|
||||||
|
The team values stability and accessibility higher than raw functionality.
|
||||||
|
|
||||||
|
- Interleave issues and PRs
|
||||||
|
|
||||||
|
This way issues without attempts at a solution get a chance to get addressed.
|
||||||
|
|
||||||
- In review
|
- In review
|
||||||
|
|
||||||
Pull requests in this column are reviewed together during work meetings.
|
Pull requests in this column are reviewed together during work meetings.
|
||||||
|
|
|
@ -24,12 +24,17 @@ $1
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
escape_systemd_env() {
|
||||||
|
temp_var="${1//\'/\\\'}"
|
||||||
|
echo "${temp_var//\%/%%}"
|
||||||
|
}
|
||||||
|
|
||||||
# Gather all non-empty proxy environment variables into a string
|
# Gather all non-empty proxy environment variables into a string
|
||||||
create_systemd_proxy_env() {
|
create_systemd_proxy_env() {
|
||||||
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
|
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
|
||||||
for v in $vars; do
|
for v in $vars; do
|
||||||
if [ "x${!v:-}" != "x" ]; then
|
if [ "x${!v:-}" != "x" ]; then
|
||||||
echo "Environment=${v}=${!v}"
|
echo "Environment=${v}=$(escape_systemd_env ${!v})"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,15 @@
|
||||||
|
function add_path --argument-names new_path
|
||||||
|
if type -q fish_add_path
|
||||||
|
# fish 3.2.0 or newer
|
||||||
|
fish_add_path --prepend --global $new_path
|
||||||
|
else
|
||||||
|
# older versions of fish
|
||||||
|
if not contains $new_path $fish_user_paths
|
||||||
|
set --global fish_user_paths $new_path $fish_user_paths
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Only execute this file once per shell.
|
# Only execute this file once per shell.
|
||||||
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
||||||
exit
|
exit
|
||||||
|
@ -31,5 +43,7 @@ else
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin"
|
add_path "@localstatedir@/nix/profiles/default/bin"
|
||||||
fish_add_path --prepend --global "$HOME/.nix-profile/bin"
|
add_path "$HOME/.nix-profile/bin"
|
||||||
|
|
||||||
|
functions -e add_path
|
||||||
|
|
|
@ -1,3 +1,15 @@
|
||||||
|
function add_path --argument-names new_path
|
||||||
|
if type -q fish_add_path
|
||||||
|
# fish 3.2.0 or newer
|
||||||
|
fish_add_path --prepend --global $new_path
|
||||||
|
else
|
||||||
|
# older versions of fish
|
||||||
|
if not contains $new_path $fish_user_paths
|
||||||
|
set --global fish_user_paths $new_path $fish_user_paths
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
if test -n "$HOME" && test -n "$USER"
|
if test -n "$HOME" && test -n "$USER"
|
||||||
|
|
||||||
# Set up the per-user profile.
|
# Set up the per-user profile.
|
||||||
|
@ -32,6 +44,8 @@ if test -n "$HOME" && test -n "$USER"
|
||||||
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
||||||
end
|
end
|
||||||
|
|
||||||
fish_add_path --prepend --global "$NIX_LINK/bin"
|
add_path "$NIX_LINK/bin"
|
||||||
set --erase NIX_LINK
|
set --erase NIX_LINK
|
||||||
end
|
end
|
||||||
|
|
||||||
|
functions -e add_path
|
||||||
|
|
|
@ -34,8 +34,8 @@ MixEvalArgs::MixEvalArgs()
|
||||||
.shortName = 'I',
|
.shortName = 'I',
|
||||||
.description = R"(
|
.description = R"(
|
||||||
Add *path* to the Nix search path. The Nix search path is
|
Add *path* to the Nix search path. The Nix search path is
|
||||||
initialized from the colon-separated [`NIX_PATH`](./env-common.md#env-NIX_PATH) environment
|
initialized from the colon-separated [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment
|
||||||
variable, and is used to look up the location of Nix expressions using [paths](../language/values.md#type-path) enclosed in angle
|
variable, and is used to look up the location of Nix expressions using [paths](@docroot@/language/values.md#type-path) enclosed in angle
|
||||||
brackets (i.e., `<nixpkgs>`).
|
brackets (i.e., `<nixpkgs>`).
|
||||||
|
|
||||||
For instance, passing
|
For instance, passing
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "installables.hh"
|
#include "installables.hh"
|
||||||
|
#include "outputs-spec.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
|
@ -358,7 +359,7 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DerivedPath Installable::toDerivedPath()
|
DerivedPathWithInfo Installable::toDerivedPath()
|
||||||
{
|
{
|
||||||
auto buildables = toDerivedPaths();
|
auto buildables = toDerivedPaths();
|
||||||
if (buildables.size() != 1)
|
if (buildables.size() != 1)
|
||||||
|
@ -401,18 +402,6 @@ struct InstallableStorePath : Installable
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
DerivedPath req;
|
DerivedPath req;
|
||||||
|
|
||||||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
|
||||||
: store(store),
|
|
||||||
req(storePath.isDerivation()
|
|
||||||
? (DerivedPath) DerivedPath::Built {
|
|
||||||
.drvPath = std::move(storePath),
|
|
||||||
.outputs = {},
|
|
||||||
}
|
|
||||||
: (DerivedPath) DerivedPath::Opaque {
|
|
||||||
.path = std::move(storePath),
|
|
||||||
})
|
|
||||||
{ }
|
|
||||||
|
|
||||||
InstallableStorePath(ref<Store> store, DerivedPath && req)
|
InstallableStorePath(ref<Store> store, DerivedPath && req)
|
||||||
: store(store), req(std::move(req))
|
: store(store), req(std::move(req))
|
||||||
{ }
|
{ }
|
||||||
|
@ -422,21 +411,9 @@ struct InstallableStorePath : Installable
|
||||||
return req.to_string(*store);
|
return req.to_string(*store);
|
||||||
}
|
}
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override
|
DerivedPathsWithInfo toDerivedPaths() override
|
||||||
{
|
{
|
||||||
return { req };
|
return {{.path = req, .info = {} }};
|
||||||
}
|
|
||||||
|
|
||||||
StorePathSet toDrvPaths(ref<Store> store) override
|
|
||||||
{
|
|
||||||
return std::visit(overloaded {
|
|
||||||
[&](const DerivedPath::Built & bfd) -> StorePathSet {
|
|
||||||
return { bfd.drvPath };
|
|
||||||
},
|
|
||||||
[&](const DerivedPath::Opaque & bo) -> StorePathSet {
|
|
||||||
return { getDeriver(store, *this, bo.path) };
|
|
||||||
},
|
|
||||||
}, req.raw());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<StorePath> getStorePath() override
|
std::optional<StorePath> getStorePath() override
|
||||||
|
@ -452,52 +429,24 @@ struct InstallableStorePath : Installable
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
DerivedPaths InstallableValue::toDerivedPaths()
|
|
||||||
{
|
|
||||||
DerivedPaths res;
|
|
||||||
|
|
||||||
std::map<StorePath, std::set<std::string>> drvsToOutputs;
|
|
||||||
RealisedPath::Set drvsToCopy;
|
|
||||||
|
|
||||||
// Group by derivation, helps with .all in particular
|
|
||||||
for (auto & drv : toDerivations()) {
|
|
||||||
for (auto & outputName : drv.outputsToInstall)
|
|
||||||
drvsToOutputs[drv.drvPath].insert(outputName);
|
|
||||||
drvsToCopy.insert(drv.drvPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (auto & i : drvsToOutputs)
|
|
||||||
res.push_back(DerivedPath::Built { i.first, i.second });
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
|
||||||
{
|
|
||||||
StorePathSet res;
|
|
||||||
for (auto & drv : toDerivations())
|
|
||||||
res.insert(drv.drvPath);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct InstallableAttrPath : InstallableValue
|
struct InstallableAttrPath : InstallableValue
|
||||||
{
|
{
|
||||||
SourceExprCommand & cmd;
|
SourceExprCommand & cmd;
|
||||||
RootValue v;
|
RootValue v;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
OutputsSpec outputsSpec;
|
ExtendedOutputsSpec extendedOutputsSpec;
|
||||||
|
|
||||||
InstallableAttrPath(
|
InstallableAttrPath(
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
SourceExprCommand & cmd,
|
SourceExprCommand & cmd,
|
||||||
Value * v,
|
Value * v,
|
||||||
const std::string & attrPath,
|
const std::string & attrPath,
|
||||||
OutputsSpec outputsSpec)
|
ExtendedOutputsSpec extendedOutputsSpec)
|
||||||
: InstallableValue(state)
|
: InstallableValue(state)
|
||||||
, cmd(cmd)
|
, cmd(cmd)
|
||||||
, v(allocRootValue(v))
|
, v(allocRootValue(v))
|
||||||
, attrPath(attrPath)
|
, attrPath(attrPath)
|
||||||
, outputsSpec(std::move(outputsSpec))
|
, extendedOutputsSpec(std::move(extendedOutputsSpec))
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
std::string what() const override { return attrPath; }
|
std::string what() const override { return attrPath; }
|
||||||
|
@ -509,40 +458,54 @@ struct InstallableAttrPath : InstallableValue
|
||||||
return {vRes, pos};
|
return {vRes, pos};
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual std::vector<InstallableValue::DerivationInfo> toDerivations() override;
|
DerivedPathsWithInfo toDerivedPaths() override
|
||||||
};
|
{
|
||||||
|
auto v = toValue(*state).first;
|
||||||
|
|
||||||
std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations()
|
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||||
{
|
|
||||||
auto v = toValue(*state).first;
|
|
||||||
|
|
||||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
DrvInfos drvInfos;
|
||||||
|
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
||||||
|
|
||||||
DrvInfos drvInfos;
|
// Backward compatibility hack: group results by drvPath. This
|
||||||
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
// helps keep .all output together.
|
||||||
|
std::map<StorePath, OutputsSpec> byDrvPath;
|
||||||
|
|
||||||
std::vector<DerivationInfo> res;
|
for (auto & drvInfo : drvInfos) {
|
||||||
for (auto & drvInfo : drvInfos) {
|
auto drvPath = drvInfo.queryDrvPath();
|
||||||
auto drvPath = drvInfo.queryDrvPath();
|
if (!drvPath)
|
||||||
if (!drvPath)
|
throw Error("'%s' is not a derivation", what());
|
||||||
throw Error("'%s' is not a derivation", what());
|
|
||||||
|
|
||||||
std::set<std::string> outputsToInstall;
|
auto newOutputs = std::visit(overloaded {
|
||||||
|
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||||
|
std::set<std::string> outputsToInstall;
|
||||||
|
for (auto & output : drvInfo.queryOutputs(false, true))
|
||||||
|
outputsToInstall.insert(output.first);
|
||||||
|
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||||
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||||
|
return e;
|
||||||
|
},
|
||||||
|
}, extendedOutputsSpec.raw());
|
||||||
|
|
||||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
|
||||||
outputsToInstall = *outputNames;
|
|
||||||
else
|
|
||||||
for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
|
|
||||||
outputsToInstall.insert(output.first);
|
|
||||||
|
|
||||||
res.push_back(DerivationInfo {
|
if (!didInsert)
|
||||||
.drvPath = *drvPath,
|
iter->second = iter->second.union_(newOutputs);
|
||||||
.outputsToInstall = std::move(outputsToInstall)
|
}
|
||||||
});
|
|
||||||
|
DerivedPathsWithInfo res;
|
||||||
|
for (auto & [drvPath, outputs] : byDrvPath)
|
||||||
|
res.push_back({
|
||||||
|
.path = DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = outputs,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return res;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||||
{
|
{
|
||||||
|
@ -615,7 +578,7 @@ InstallableFlake::InstallableFlake(
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
OutputsSpec outputsSpec,
|
ExtendedOutputsSpec extendedOutputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags)
|
const flake::LockFlags & lockFlags)
|
||||||
|
@ -623,14 +586,14 @@ InstallableFlake::InstallableFlake(
|
||||||
flakeRef(flakeRef),
|
flakeRef(flakeRef),
|
||||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||||
outputsSpec(std::move(outputsSpec)),
|
extendedOutputsSpec(std::move(extendedOutputsSpec)),
|
||||||
lockFlags(lockFlags)
|
lockFlags(lockFlags)
|
||||||
{
|
{
|
||||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||||
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
||||||
}
|
}
|
||||||
|
|
||||||
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
|
DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
|
||||||
|
|
||||||
|
@ -638,56 +601,84 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
||||||
|
|
||||||
auto attrPath = attr->getAttrPathStr();
|
auto attrPath = attr->getAttrPathStr();
|
||||||
|
|
||||||
if (!attr->isDerivation())
|
if (!attr->isDerivation()) {
|
||||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
|
||||||
|
// FIXME: use eval cache?
|
||||||
|
auto v = attr->forceValue();
|
||||||
|
|
||||||
|
if (v.type() == nPath) {
|
||||||
|
PathSet context;
|
||||||
|
auto storePath = state->copyPathToStore(context, Path(v.path));
|
||||||
|
return {{
|
||||||
|
.path = DerivedPath::Opaque {
|
||||||
|
.path = std::move(storePath),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (v.type() == nString) {
|
||||||
|
PathSet context;
|
||||||
|
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
|
||||||
|
auto storePath = state->store->maybeParseStorePath(s);
|
||||||
|
if (storePath && context.count(std::string(s))) {
|
||||||
|
return {{
|
||||||
|
.path = DerivedPath::Opaque {
|
||||||
|
.path = std::move(*storePath),
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
} else
|
||||||
|
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
|
||||||
|
}
|
||||||
|
|
||||||
auto drvPath = attr->forceDerivation();
|
auto drvPath = attr->forceDerivation();
|
||||||
|
|
||||||
std::set<std::string> outputsToInstall;
|
|
||||||
std::optional<NixInt> priority;
|
std::optional<NixInt> priority;
|
||||||
|
|
||||||
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
if (attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||||
if (aOutputSpecified->getBool()) {
|
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||||
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
|
||||||
outputsToInstall = { aOutputName->getString() };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
|
||||||
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
|
||||||
for (auto & s : aOutputsToInstall->getListOfStrings())
|
|
||||||
outputsToInstall.insert(s);
|
|
||||||
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||||
priority = aPriority->getInt();
|
priority = aPriority->getInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
|
return {{
|
||||||
outputsToInstall.clear();
|
.path = DerivedPath::Built {
|
||||||
if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
|
.drvPath = std::move(drvPath),
|
||||||
for (auto & s : aOutputs->getListOfStrings())
|
.outputs = std::visit(overloaded {
|
||||||
outputsToInstall.insert(s);
|
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||||
}
|
std::set<std::string> outputsToInstall;
|
||||||
|
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||||
|
if (aOutputSpecified->getBool()) {
|
||||||
|
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
||||||
|
outputsToInstall = { aOutputName->getString() };
|
||||||
|
}
|
||||||
|
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||||
|
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||||
|
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||||
|
outputsToInstall.insert(s);
|
||||||
|
}
|
||||||
|
|
||||||
if (outputsToInstall.empty())
|
if (outputsToInstall.empty())
|
||||||
outputsToInstall.insert("out");
|
outputsToInstall.insert("out");
|
||||||
|
|
||||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||||
outputsToInstall = *outputNames;
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||||
auto drvInfo = DerivationInfo {
|
return e;
|
||||||
.drvPath = std::move(drvPath),
|
},
|
||||||
.outputsToInstall = std::move(outputsToInstall),
|
}, extendedOutputsSpec.raw()),
|
||||||
.priority = priority,
|
},
|
||||||
};
|
.info = {
|
||||||
|
.priority = priority,
|
||||||
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
.originalRef = flakeRef,
|
||||||
}
|
.resolvedRef = getLockedFlake()->flake.lockedRef,
|
||||||
|
.attrPath = attrPath,
|
||||||
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
|
.extendedOutputsSpec = extendedOutputsSpec,
|
||||||
{
|
}
|
||||||
std::vector<DerivationInfo> res;
|
}};
|
||||||
res.push_back(std::get<2>(toDerivation()));
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
||||||
|
@ -802,12 +793,12 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & s : ss) {
|
for (auto & s : ss) {
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec(s);
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
||||||
result.push_back(
|
result.push_back(
|
||||||
std::make_shared<InstallableAttrPath>(
|
std::make_shared<InstallableAttrPath>(
|
||||||
state, *this, vFile,
|
state, *this, vFile,
|
||||||
prefix == "." ? "" : prefix,
|
prefix == "." ? "" : std::string { prefix },
|
||||||
outputsSpec));
|
extendedOutputsSpec));
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -815,24 +806,46 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
for (auto & s : ss) {
|
for (auto & s : ss) {
|
||||||
std::exception_ptr ex;
|
std::exception_ptr ex;
|
||||||
|
|
||||||
auto found = s.rfind('^');
|
auto [prefix_, extendedOutputsSpec_] = ExtendedOutputsSpec::parse(s);
|
||||||
if (found != std::string::npos) {
|
// To avoid clang's pedantry
|
||||||
try {
|
auto prefix = std::move(prefix_);
|
||||||
result.push_back(std::make_shared<InstallableStorePath>(
|
auto extendedOutputsSpec = std::move(extendedOutputsSpec_);
|
||||||
store,
|
|
||||||
DerivedPath::Built::parse(*store, s.substr(0, found), s.substr(found + 1))));
|
|
||||||
continue;
|
|
||||||
} catch (BadStorePath &) {
|
|
||||||
} catch (...) {
|
|
||||||
if (!ex)
|
|
||||||
ex = std::current_exception();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
found = s.find('/');
|
auto found = prefix.find('/');
|
||||||
if (found != std::string::npos) {
|
if (found != std::string::npos) {
|
||||||
try {
|
try {
|
||||||
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
|
auto derivedPath = std::visit(overloaded {
|
||||||
|
// If the user did not use ^, we treat the output more liberally.
|
||||||
|
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||||
|
// First, we accept a symlink chain or an actual store path.
|
||||||
|
auto storePath = store->followLinksToStorePath(prefix);
|
||||||
|
// Second, we see if the store path ends in `.drv` to decide what sort
|
||||||
|
// of derived path they want.
|
||||||
|
//
|
||||||
|
// This handling predates the `^` syntax. The `^*` in
|
||||||
|
// `/nix/store/hash-foo.drv^*` unambiguously means "do the
|
||||||
|
// `DerivedPath::Built` case", so plain `/nix/store/hash-foo.drv` could
|
||||||
|
// also unambiguously mean "do the DerivedPath::Opaque` case".
|
||||||
|
//
|
||||||
|
// Issue #7261 tracks reconsidering this `.drv` dispatching.
|
||||||
|
return storePath.isDerivation()
|
||||||
|
? (DerivedPath) DerivedPath::Built {
|
||||||
|
.drvPath = std::move(storePath),
|
||||||
|
.outputs = OutputsSpec::All {},
|
||||||
|
}
|
||||||
|
: (DerivedPath) DerivedPath::Opaque {
|
||||||
|
.path = std::move(storePath),
|
||||||
|
};
|
||||||
|
},
|
||||||
|
// If the user did use ^, we just do exactly what is written.
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||||
|
return DerivedPath::Built {
|
||||||
|
.drvPath = store->parseStorePath(prefix),
|
||||||
|
.outputs = outputSpec,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}, extendedOutputsSpec.raw());
|
||||||
|
result.push_back(std::make_shared<InstallableStorePath>(store, std::move(derivedPath)));
|
||||||
continue;
|
continue;
|
||||||
} catch (BadStorePath &) {
|
} catch (BadStorePath &) {
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -842,13 +855,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
|
||||||
result.push_back(std::make_shared<InstallableFlake>(
|
result.push_back(std::make_shared<InstallableFlake>(
|
||||||
this,
|
this,
|
||||||
getEvalState(),
|
getEvalState(),
|
||||||
std::move(flakeRef),
|
std::move(flakeRef),
|
||||||
fragment,
|
fragment,
|
||||||
outputsSpec,
|
extendedOutputsSpec,
|
||||||
getDefaultFlakeAttrPaths(),
|
getDefaultFlakeAttrPaths(),
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
lockFlags));
|
lockFlags));
|
||||||
|
@ -895,13 +908,19 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
||||||
if (mode == Realise::Nothing)
|
if (mode == Realise::Nothing)
|
||||||
settings.readOnlyMode = true;
|
settings.readOnlyMode = true;
|
||||||
|
|
||||||
|
struct Aux
|
||||||
|
{
|
||||||
|
ExtraPathInfo info;
|
||||||
|
std::shared_ptr<Installable> installable;
|
||||||
|
};
|
||||||
|
|
||||||
std::vector<DerivedPath> pathsToBuild;
|
std::vector<DerivedPath> pathsToBuild;
|
||||||
std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap;
|
std::map<DerivedPath, std::vector<Aux>> backmap;
|
||||||
|
|
||||||
for (auto & i : installables) {
|
for (auto & i : installables) {
|
||||||
for (auto b : i->toDerivedPaths()) {
|
for (auto b : i->toDerivedPaths()) {
|
||||||
pathsToBuild.push_back(b);
|
pathsToBuild.push_back(b.path);
|
||||||
backmap[b].push_back(i);
|
backmap[b.path].push_back({.info = b.info, .installable = i});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -914,42 +933,18 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
||||||
printMissing(store, pathsToBuild, lvlError);
|
printMissing(store, pathsToBuild, lvlError);
|
||||||
|
|
||||||
for (auto & path : pathsToBuild) {
|
for (auto & path : pathsToBuild) {
|
||||||
for (auto & installable : backmap[path]) {
|
for (auto & aux : backmap[path]) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
OutputPathMap outputs;
|
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
res.push_back({aux.installable, {
|
||||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
.info = aux.info}});
|
||||||
for (auto & output : bfd.outputs) {
|
|
||||||
auto outputHash = get(outputHashes, output);
|
|
||||||
if (!outputHash)
|
|
||||||
throw Error(
|
|
||||||
"the derivation '%s' doesn't have an output named '%s'",
|
|
||||||
store->printStorePath(bfd.drvPath), output);
|
|
||||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
|
||||||
DrvOutput outputId { *outputHash, output };
|
|
||||||
auto realisation = store->queryRealisation(outputId);
|
|
||||||
if (!realisation)
|
|
||||||
throw Error(
|
|
||||||
"cannot operate on an output of the "
|
|
||||||
"unbuilt derivation '%s'",
|
|
||||||
outputId.to_string());
|
|
||||||
outputs.insert_or_assign(output, realisation->outPath);
|
|
||||||
} else {
|
|
||||||
// If ca-derivations isn't enabled, assume that
|
|
||||||
// the output path is statically known.
|
|
||||||
auto drvOutput = get(drvOutputs, output);
|
|
||||||
assert(drvOutput);
|
|
||||||
assert(drvOutput->second);
|
|
||||||
outputs.insert_or_assign(
|
|
||||||
output, *drvOutput->second);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }}});
|
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }}});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Opaque { bo.path },
|
||||||
|
.info = aux.info}});
|
||||||
},
|
},
|
||||||
}, path.raw());
|
}, path.raw());
|
||||||
}
|
}
|
||||||
|
@ -965,16 +960,22 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
||||||
if (!buildResult.success())
|
if (!buildResult.success())
|
||||||
buildResult.rethrow();
|
buildResult.rethrow();
|
||||||
|
|
||||||
for (auto & installable : backmap[buildResult.path]) {
|
for (auto & aux : backmap[buildResult.path]) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
std::map<std::string, StorePath> outputs;
|
std::map<std::string, StorePath> outputs;
|
||||||
for (auto & path : buildResult.builtOutputs)
|
for (auto & path : buildResult.builtOutputs)
|
||||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||||
res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }, .result = buildResult}});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||||
|
.info = aux.info,
|
||||||
|
.result = buildResult}});
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }, .result = buildResult}});
|
res.push_back({aux.installable, {
|
||||||
|
.path = BuiltPath::Opaque { bo.path },
|
||||||
|
.info = aux.info,
|
||||||
|
.result = buildResult}});
|
||||||
},
|
},
|
||||||
}, buildResult.path.raw());
|
}, buildResult.path.raw());
|
||||||
}
|
}
|
||||||
|
@ -1059,7 +1060,7 @@ StorePathSet Installable::toDerivations(
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
drvPaths.insert(bfd.drvPath);
|
drvPaths.insert(bfd.drvPath);
|
||||||
},
|
},
|
||||||
}, b.raw());
|
}, b.path.raw());
|
||||||
|
|
||||||
return drvPaths;
|
return drvPaths;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "path-with-outputs.hh"
|
#include "outputs-spec.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
@ -20,7 +20,7 @@ namespace eval_cache { class EvalCache; class AttrCursor; }
|
||||||
|
|
||||||
struct App
|
struct App
|
||||||
{
|
{
|
||||||
std::vector<StorePathWithOutputs> context;
|
std::vector<DerivedPath> context;
|
||||||
Path program;
|
Path program;
|
||||||
// FIXME: add args, sandbox settings, metadata, ...
|
// FIXME: add args, sandbox settings, metadata, ...
|
||||||
};
|
};
|
||||||
|
@ -52,26 +52,42 @@ enum class OperateOn {
|
||||||
Derivation
|
Derivation
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct ExtraPathInfo
|
||||||
|
{
|
||||||
|
std::optional<NixInt> priority;
|
||||||
|
std::optional<FlakeRef> originalRef;
|
||||||
|
std::optional<FlakeRef> resolvedRef;
|
||||||
|
std::optional<std::string> attrPath;
|
||||||
|
// FIXME: merge with DerivedPath's 'outputs' field?
|
||||||
|
std::optional<ExtendedOutputsSpec> extendedOutputsSpec;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* A derived path with any additional info that commands might
|
||||||
|
need from the derivation. */
|
||||||
|
struct DerivedPathWithInfo
|
||||||
|
{
|
||||||
|
DerivedPath path;
|
||||||
|
ExtraPathInfo info;
|
||||||
|
};
|
||||||
|
|
||||||
struct BuiltPathWithResult
|
struct BuiltPathWithResult
|
||||||
{
|
{
|
||||||
BuiltPath path;
|
BuiltPath path;
|
||||||
|
ExtraPathInfo info;
|
||||||
std::optional<BuildResult> result;
|
std::optional<BuildResult> result;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef std::vector<DerivedPathWithInfo> DerivedPathsWithInfo;
|
||||||
|
|
||||||
struct Installable
|
struct Installable
|
||||||
{
|
{
|
||||||
virtual ~Installable() { }
|
virtual ~Installable() { }
|
||||||
|
|
||||||
virtual std::string what() const = 0;
|
virtual std::string what() const = 0;
|
||||||
|
|
||||||
virtual DerivedPaths toDerivedPaths() = 0;
|
virtual DerivedPathsWithInfo toDerivedPaths() = 0;
|
||||||
|
|
||||||
virtual StorePathSet toDrvPaths(ref<Store> store)
|
DerivedPathWithInfo toDerivedPath();
|
||||||
{
|
|
||||||
throw Error("'%s' cannot be converted to a derivation path", what());
|
|
||||||
}
|
|
||||||
|
|
||||||
DerivedPath toDerivedPath();
|
|
||||||
|
|
||||||
UnresolvedApp toApp(EvalState & state);
|
UnresolvedApp toApp(EvalState & state);
|
||||||
|
|
||||||
|
@ -146,19 +162,6 @@ struct InstallableValue : Installable
|
||||||
ref<EvalState> state;
|
ref<EvalState> state;
|
||||||
|
|
||||||
InstallableValue(ref<EvalState> state) : state(state) {}
|
InstallableValue(ref<EvalState> state) : state(state) {}
|
||||||
|
|
||||||
struct DerivationInfo
|
|
||||||
{
|
|
||||||
StorePath drvPath;
|
|
||||||
std::set<std::string> outputsToInstall;
|
|
||||||
std::optional<NixInt> priority;
|
|
||||||
};
|
|
||||||
|
|
||||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override;
|
|
||||||
|
|
||||||
StorePathSet toDrvPaths(ref<Store> store) override;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct InstallableFlake : InstallableValue
|
struct InstallableFlake : InstallableValue
|
||||||
|
@ -166,7 +169,7 @@ struct InstallableFlake : InstallableValue
|
||||||
FlakeRef flakeRef;
|
FlakeRef flakeRef;
|
||||||
Strings attrPaths;
|
Strings attrPaths;
|
||||||
Strings prefixes;
|
Strings prefixes;
|
||||||
OutputsSpec outputsSpec;
|
ExtendedOutputsSpec extendedOutputsSpec;
|
||||||
const flake::LockFlags & lockFlags;
|
const flake::LockFlags & lockFlags;
|
||||||
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
||||||
|
|
||||||
|
@ -175,7 +178,7 @@ struct InstallableFlake : InstallableValue
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
std::string_view fragment,
|
std::string_view fragment,
|
||||||
OutputsSpec outputsSpec,
|
ExtendedOutputsSpec extendedOutputsSpec,
|
||||||
Strings attrPaths,
|
Strings attrPaths,
|
||||||
Strings prefixes,
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags);
|
const flake::LockFlags & lockFlags);
|
||||||
|
@ -186,9 +189,7 @@ struct InstallableFlake : InstallableValue
|
||||||
|
|
||||||
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
|
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
|
||||||
|
|
||||||
std::tuple<std::string, FlakeRef, DerivationInfo> toDerivation();
|
DerivedPathsWithInfo toDerivedPaths() override;
|
||||||
|
|
||||||
std::vector<DerivationInfo> toDerivations() override;
|
|
||||||
|
|
||||||
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
||||||
|
|
||||||
|
|
|
@ -397,7 +397,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||||
Expr * e = parseString(expr);
|
Expr * e = parseString(expr);
|
||||||
Value v;
|
Value v;
|
||||||
e->eval(*state, *env, v);
|
e->eval(*state, *env, v);
|
||||||
state->forceAttrs(v, noPos, "nevermind, it is ignored anyway");
|
state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
|
||||||
|
|
||||||
for (auto & i : *v.attrs) {
|
for (auto & i : *v.attrs) {
|
||||||
std::string_view name = state->symbols[i.name];
|
std::string_view name = state->symbols[i.name];
|
||||||
|
@ -641,7 +641,12 @@ bool NixRepl::processLine(std::string line)
|
||||||
Path drvPathRaw = state->store->printStorePath(drvPath);
|
Path drvPathRaw = state->store->printStorePath(drvPath);
|
||||||
|
|
||||||
if (command == ":b" || command == ":bl") {
|
if (command == ":b" || command == ":bl") {
|
||||||
state->store->buildPaths({DerivedPath::Built{drvPath}});
|
state->store->buildPaths({
|
||||||
|
DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::All { },
|
||||||
|
},
|
||||||
|
});
|
||||||
auto drv = state->store->readDerivation(drvPath);
|
auto drv = state->store->readDerivation(drvPath);
|
||||||
logger->cout("\nThis derivation produced the following outputs:");
|
logger->cout("\nThis derivation produced the following outputs:");
|
||||||
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
|
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
|
||||||
|
|
|
@ -300,7 +300,7 @@ struct AttrDb
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
if (!queryAttribute.isNull(3))
|
if (!queryAttribute.isNull(3))
|
||||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||||
context.push_back(decodeContext(cfg, s));
|
context.push_back(NixStringContextElem::parse(cfg, s));
|
||||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||||
}
|
}
|
||||||
case AttrType::Bool:
|
case AttrType::Bool:
|
||||||
|
@ -592,7 +592,18 @@ string_t AttrCursor::getStringWithContext()
|
||||||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
bool valid = true;
|
bool valid = true;
|
||||||
for (auto & c : s->second) {
|
for (auto & c : s->second) {
|
||||||
if (!root->state.store->isValidPath(c.first)) {
|
const StorePath & path = std::visit(overloaded {
|
||||||
|
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & {
|
||||||
|
return d.drvPath;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||||
|
return b.drvPath;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
|
||||||
|
return o.path;
|
||||||
|
},
|
||||||
|
}, c.raw());
|
||||||
|
if (!root->state.store->isValidPath(path)) {
|
||||||
valid = false;
|
valid = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,9 @@
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
#include <iostream>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
|
#include <optional>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
#include <sys/resource.h>
|
#include <sys/resource.h>
|
||||||
|
@ -517,6 +519,7 @@ EvalState::EvalState(
|
||||||
static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes");
|
static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes");
|
||||||
|
|
||||||
/* Initialise the Nix expression search path. */
|
/* Initialise the Nix expression search path. */
|
||||||
|
evalSettings.nixPath.setDefault(evalSettings.getDefaultNixPath());
|
||||||
if (!evalSettings.pureEval) {
|
if (!evalSettings.pureEval) {
|
||||||
for (auto & i : _searchPath) addToSearchPath(i);
|
for (auto & i : _searchPath) addToSearchPath(i);
|
||||||
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
|
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
|
||||||
|
@ -1927,7 +1930,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
/* skip canonization of first path, which would only be not
|
/* skip canonization of first path, which would only be not
|
||||||
canonized in the first place if it's coming from a ./${foo} type
|
canonized in the first place if it's coming from a ./${foo} type
|
||||||
path */
|
path */
|
||||||
auto part = state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first, "while evaluating a path segment");
|
auto part = state.coerceToString(i_pos, vTmp, context,
|
||||||
|
"while evaluating a path segment",
|
||||||
|
false, firstType == nString, !first);
|
||||||
sSize += part->size();
|
sSize += part->size();
|
||||||
s.emplace_back(std::move(part));
|
s.emplace_back(std::move(part));
|
||||||
}
|
}
|
||||||
|
@ -2068,27 +2073,6 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
|
||||||
name>. */
|
|
||||||
NixStringContextElem decodeContext(const Store & store, std::string_view s)
|
|
||||||
{
|
|
||||||
if (s.at(0) == '!') {
|
|
||||||
size_t index = s.find("!", 1);
|
|
||||||
return {
|
|
||||||
store.parseStorePath(s.substr(index + 1)),
|
|
||||||
std::string(s.substr(1, index - 1)),
|
|
||||||
};
|
|
||||||
} else
|
|
||||||
return {
|
|
||||||
store.parseStorePath(
|
|
||||||
s.at(0) == '/'
|
|
||||||
? s
|
|
||||||
: s.substr(1)),
|
|
||||||
"",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void copyContext(const Value & v, PathSet & context)
|
void copyContext(const Value & v, PathSet & context)
|
||||||
{
|
{
|
||||||
if (v.string.context)
|
if (v.string.context)
|
||||||
|
@ -2103,7 +2087,7 @@ NixStringContext Value::getContext(const Store & store)
|
||||||
assert(internalType == tString);
|
assert(internalType == tString);
|
||||||
if (string.context)
|
if (string.context)
|
||||||
for (const char * * p = string.context; *p; ++p)
|
for (const char * * p = string.context; *p; ++p)
|
||||||
res.push_back(decodeContext(store, *p));
|
res.push_back(NixStringContextElem::parse(store, *p));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2144,15 +2128,16 @@ std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value &
|
||||||
if (i != v.attrs->end()) {
|
if (i != v.attrs->end()) {
|
||||||
Value v1;
|
Value v1;
|
||||||
callFunction(*i->value, v, v1, pos);
|
callFunction(*i->value, v, v1, pos);
|
||||||
return coerceToString(pos, v1, context, coerceMore, copyToStore,
|
return coerceToString(pos, v1, context,
|
||||||
"while evaluating the result of the `toString` attribute").toOwned();
|
"while evaluating the result of the `__toString` attribute",
|
||||||
|
coerceMore, copyToStore).toOwned();
|
||||||
}
|
}
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &context,
|
||||||
bool coerceMore, bool copyToStore, bool canonicalizePath, std::string_view errorCtx)
|
std::string_view errorCtx, bool coerceMore, bool copyToStore, bool canonicalizePath)
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
|
|
||||||
|
@ -2166,7 +2151,7 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
||||||
if (canonicalizePath)
|
if (canonicalizePath)
|
||||||
path = canonPath(*path);
|
path = canonPath(*path);
|
||||||
if (copyToStore)
|
if (copyToStore)
|
||||||
path = copyPathToStore(context, std::move(path).toOwned());
|
path = store->printStorePath(copyPathToStore(context, std::move(path).toOwned()));
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2175,13 +2160,23 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
||||||
if (maybeString)
|
if (maybeString)
|
||||||
return std::move(*maybeString);
|
return std::move(*maybeString);
|
||||||
auto i = v.attrs->find(sOutPath);
|
auto i = v.attrs->find(sOutPath);
|
||||||
if (i == v.attrs->end())
|
if (i == v.attrs->end()) {
|
||||||
error("cannot coerce a set to a string", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("cannot coerce %1% to a string", showType(v))
|
||||||
return coerceToString(pos, *i->value, context, coerceMore, copyToStore, canonicalizePath, errorCtx);
|
.withTrace(pos, errorCtx)
|
||||||
|
.debugThrow<TypeError>();
|
||||||
|
}
|
||||||
|
return coerceToString(pos, *i->value, context, errorCtx,
|
||||||
|
coerceMore, copyToStore, canonicalizePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v.type() == nExternal)
|
if (v.type() == nExternal) {
|
||||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore, errorCtx);
|
try {
|
||||||
|
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore);
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace(nullptr, errorCtx);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (coerceMore) {
|
if (coerceMore) {
|
||||||
/* Note that `false' is represented as an empty string for
|
/* Note that `false' is represented as an empty string for
|
||||||
|
@ -2196,8 +2191,9 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
||||||
std::string result;
|
std::string result;
|
||||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||||
try {
|
try {
|
||||||
result += *coerceToString(noPos, *v2, context, coerceMore, copyToStore, canonicalizePath,
|
result += *coerceToString(noPos, *v2, context,
|
||||||
"while evaluating one element of the list");
|
"while evaluating one element of the list",
|
||||||
|
coerceMore, copyToStore, canonicalizePath);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
|
@ -2211,37 +2207,39 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
error("cannot coerce %1% to a string", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error("cannot coerce %1% to a string", showType(v))
|
||||||
|
.withTrace(pos, errorCtx)
|
||||||
|
.debugThrow<TypeError>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
StorePath EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||||
{
|
{
|
||||||
if (nix::isDerivation(path))
|
if (nix::isDerivation(path))
|
||||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
||||||
|
|
||||||
Path dstPath;
|
auto dstPath = [&]() -> StorePath
|
||||||
auto i = srcToStore.find(path);
|
{
|
||||||
if (i != srcToStore.end())
|
auto i = srcToStore.find(path);
|
||||||
dstPath = store->printStorePath(i->second);
|
if (i != srcToStore.end()) return i->second;
|
||||||
else {
|
|
||||||
auto p = settings.readOnlyMode
|
auto dstPath = settings.readOnlyMode
|
||||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||||
dstPath = store->printStorePath(p);
|
allowPath(dstPath);
|
||||||
allowPath(p);
|
srcToStore.insert_or_assign(path, dstPath);
|
||||||
srcToStore.insert_or_assign(path, std::move(p));
|
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
return dstPath;
|
||||||
}
|
}();
|
||||||
|
|
||||||
context.insert(dstPath);
|
context.insert(store->printStorePath(dstPath));
|
||||||
return dstPath;
|
return dstPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
auto path = coerceToString(pos, v, context, false, false, true, errorCtx).toOwned();
|
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||||
if (path == "" || path[0] != '/')
|
if (path == "" || path[0] != '/')
|
||||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
return path;
|
return path;
|
||||||
|
@ -2250,7 +2248,7 @@ Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std
|
||||||
|
|
||||||
StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
auto path = coerceToString(pos, v, context, false, false, true, errorCtx).toOwned();
|
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||||
if (auto storePath = store->maybeParseStorePath(path))
|
if (auto storePath = store->maybeParseStorePath(path))
|
||||||
return *storePath;
|
return *storePath;
|
||||||
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||||
|
@ -2454,13 +2452,11 @@ void EvalState::printStats()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore, std::string_view errorCtx) const
|
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
|
||||||
{
|
{
|
||||||
auto e = TypeError({
|
throw TypeError({
|
||||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
||||||
});
|
});
|
||||||
e.addTrace(pos, errorCtx);
|
|
||||||
throw e;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2477,30 +2473,35 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) {
|
||||||
|
|
||||||
EvalSettings::EvalSettings()
|
EvalSettings::EvalSettings()
|
||||||
{
|
{
|
||||||
auto var = getEnv("NIX_PATH");
|
|
||||||
if (var) nixPath = parseNixPath(*var);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* impure => NIX_PATH or a default path
|
||||||
|
* restrict-eval => NIX_PATH
|
||||||
|
* pure-eval => empty
|
||||||
|
*/
|
||||||
Strings EvalSettings::getDefaultNixPath()
|
Strings EvalSettings::getDefaultNixPath()
|
||||||
{
|
{
|
||||||
Strings res;
|
if (pureEval)
|
||||||
auto add = [&](const Path & p, const std::string & s = std::string()) {
|
return {};
|
||||||
if (pathExists(p)) {
|
|
||||||
if (s.empty()) {
|
auto var = getEnv("NIX_PATH");
|
||||||
res.push_back(p);
|
if (var) {
|
||||||
} else {
|
return parseNixPath(*var);
|
||||||
res.push_back(s + "=" + p);
|
} else if (restrictEval) {
|
||||||
}
|
return {};
|
||||||
}
|
} else {
|
||||||
};
|
Strings res;
|
||||||
|
auto add = [&](const Path & p, const std::optional<std::string> & s = std::nullopt) {
|
||||||
|
if (pathExists(p))
|
||||||
|
res.push_back(s ? *s + "=" + p : p);
|
||||||
|
};
|
||||||
|
|
||||||
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
|
|
||||||
add(getHome() + "/.nix-defexpr/channels");
|
add(getHome() + "/.nix-defexpr/channels");
|
||||||
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
|
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
|
||||||
add(settings.nixStateDir + "/profiles/per-user/root/channels");
|
add(settings.nixStateDir + "/profiles/per-user/root/channels");
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EvalSettings::isPseudoUrl(std::string_view s)
|
bool EvalSettings::isPseudoUrl(std::string_view s)
|
||||||
|
|
|
@ -203,6 +203,9 @@ public:
|
||||||
throw std::move(error);
|
throw std::move(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is dangerous, but gets in line with the idea that error creation and
|
||||||
|
// throwing should not allocate on the stack of hot functions.
|
||||||
|
// as long as errors are immediately thrown, it works.
|
||||||
ErrorBuilder * errorBuilder;
|
ErrorBuilder * errorBuilder;
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
|
@ -375,11 +378,11 @@ public:
|
||||||
booleans and lists to a string. If `copyToStore' is set,
|
booleans and lists to a string. If `copyToStore' is set,
|
||||||
referenced paths are copied to the Nix store as a side effect. */
|
referenced paths are copied to the Nix store as a side effect. */
|
||||||
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||||
|
std::string_view errorCtx,
|
||||||
bool coerceMore = false, bool copyToStore = true,
|
bool coerceMore = false, bool copyToStore = true,
|
||||||
bool canonicalizePath = true,
|
bool canonicalizePath = true);
|
||||||
std::string_view errorCtx = "");
|
|
||||||
|
|
||||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
StorePath copyPathToStore(PathSet & context, const Path & path);
|
||||||
|
|
||||||
/* Path coercion. Converts strings, paths and derivations to a
|
/* Path coercion. Converts strings, paths and derivations to a
|
||||||
path. The result is guaranteed to be a canonicalised, absolute
|
path. The result is guaranteed to be a canonicalised, absolute
|
||||||
|
@ -551,10 +554,6 @@ struct DebugTraceStacker {
|
||||||
std::string_view showType(ValueType type);
|
std::string_view showType(ValueType type);
|
||||||
std::string showType(const Value & v);
|
std::string showType(const Value & v);
|
||||||
|
|
||||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
|
||||||
name>. */
|
|
||||||
NixStringContextElem decodeContext(const Store & store, std::string_view s);
|
|
||||||
|
|
||||||
/* If `path' refers to a directory, then append "/default.nix". */
|
/* If `path' refers to a directory, then append "/default.nix". */
|
||||||
Path resolveExprPath(Path path);
|
Path resolveExprPath(Path path);
|
||||||
|
|
||||||
|
@ -571,7 +570,7 @@ struct EvalSettings : Config
|
||||||
{
|
{
|
||||||
EvalSettings();
|
EvalSettings();
|
||||||
|
|
||||||
static Strings getDefaultNixPath();
|
Strings getDefaultNixPath();
|
||||||
|
|
||||||
static bool isPseudoUrl(std::string_view s);
|
static bool isPseudoUrl(std::string_view s);
|
||||||
|
|
||||||
|
@ -581,8 +580,15 @@ struct EvalSettings : Config
|
||||||
"Whether builtin functions that allow executing native code should be enabled."};
|
"Whether builtin functions that allow executing native code should be enabled."};
|
||||||
|
|
||||||
Setting<Strings> nixPath{
|
Setting<Strings> nixPath{
|
||||||
this, getDefaultNixPath(), "nix-path",
|
this, {}, "nix-path",
|
||||||
"List of directories to be searched for `<...>` file references."};
|
R"(
|
||||||
|
List of directories to be searched for `<...>` file references.
|
||||||
|
|
||||||
|
If [pure evaluation](#conf-pure-eval) is disabled,
|
||||||
|
this is initialised using the [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH)
|
||||||
|
environment variable, or, if it is unset and [restricted evaluation](#conf-restrict-eval)
|
||||||
|
is disabled, a default search path including the user's and `root`'s channels.
|
||||||
|
)"};
|
||||||
|
|
||||||
Setting<bool> restrictEval{
|
Setting<bool> restrictEval{
|
||||||
this, false, "restrict-eval",
|
this, false, "restrict-eval",
|
||||||
|
|
|
@ -264,7 +264,7 @@ static Flake getFlake(
|
||||||
PathSet emptyContext = {};
|
PathSet emptyContext = {};
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
state.symbols[setting.name],
|
state.symbols[setting.name],
|
||||||
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true, "") .toOwned());
|
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
|
||||||
}
|
}
|
||||||
else if (setting.value->type() == nInt)
|
else if (setting.value->type() == nInt)
|
||||||
flake.config.settings.emplace(
|
flake.config.settings.emplace(
|
||||||
|
|
|
@ -238,15 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::optional<Path> & baseDir,
|
const std::optional<Path> & baseDir,
|
||||||
bool allowMissing,
|
bool allowMissing,
|
||||||
bool isFlake)
|
bool isFlake)
|
||||||
{
|
{
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec(url);
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
|
||||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
|
||||||
return {std::move(flakeRef), fragment, outputsSpec};
|
return {std::move(flakeRef), fragment, extendedOutputsSpec};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "path-with-outputs.hh"
|
#include "outputs-spec.hh"
|
||||||
|
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::optional<Path> & baseDir = {},
|
const std::optional<Path> & baseDir = {},
|
||||||
bool allowMissing = false,
|
bool allowMissing = false,
|
||||||
|
|
|
@ -6,6 +6,7 @@ libexpr_DIR := $(d)
|
||||||
|
|
||||||
libexpr_SOURCES := \
|
libexpr_SOURCES := \
|
||||||
$(wildcard $(d)/*.cc) \
|
$(wildcard $(d)/*.cc) \
|
||||||
|
$(wildcard $(d)/value/*.cc) \
|
||||||
$(wildcard $(d)/primops/*.cc) \
|
$(wildcard $(d)/primops/*.cc) \
|
||||||
$(wildcard $(d)/flake/*.cc) \
|
$(wildcard $(d)/flake/*.cc) \
|
||||||
$(d)/lexer-tab.cc \
|
$(d)/lexer-tab.cc \
|
||||||
|
@ -37,6 +38,8 @@ clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexe
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||||
|
|
||||||
|
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||||
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||||
|
|
||||||
|
|
|
@ -43,16 +43,32 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
||||||
std::vector<DerivedPath::Built> drvs;
|
std::vector<DerivedPath::Built> drvs;
|
||||||
StringMap res;
|
StringMap res;
|
||||||
|
|
||||||
for (auto & i : context) {
|
for (auto & c_ : context) {
|
||||||
auto [ctx, outputName] = decodeContext(*store, i);
|
auto ensureValid = [&](const StorePath & p) {
|
||||||
auto ctxS = store->printStorePath(ctx);
|
if (!store->isValidPath(p))
|
||||||
if (!store->isValidPath(ctx))
|
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
||||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(ctx)));
|
};
|
||||||
if (!outputName.empty() && ctx.isDerivation()) {
|
auto c = NixStringContextElem::parse(*store, c_);
|
||||||
drvs.push_back({ctx, {outputName}});
|
std::visit(overloaded {
|
||||||
} else {
|
[&](const NixStringContextElem::Built & b) {
|
||||||
res.insert_or_assign(ctxS, ctxS);
|
drvs.push_back(DerivedPath::Built {
|
||||||
}
|
.drvPath = b.drvPath,
|
||||||
|
.outputs = OutputsSpec::Names { b.output },
|
||||||
|
});
|
||||||
|
ensureValid(b.drvPath);
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) {
|
||||||
|
auto ctxS = store->printStorePath(o.path);
|
||||||
|
res.insert_or_assign(ctxS, ctxS);
|
||||||
|
ensureValid(o.path);
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::DrvDeep & d) {
|
||||||
|
/* Treat same as Opaque */
|
||||||
|
auto ctxS = store->printStorePath(d.drvPath);
|
||||||
|
res.insert_or_assign(ctxS, ctxS);
|
||||||
|
ensureValid(d.drvPath);
|
||||||
|
},
|
||||||
|
}, c.raw());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (drvs.empty()) return {};
|
if (drvs.empty()) return {};
|
||||||
|
@ -68,16 +84,12 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
||||||
store->buildPaths(buildReqs);
|
store->buildPaths(buildReqs);
|
||||||
|
|
||||||
/* Get all the output paths corresponding to the placeholders we had */
|
/* Get all the output paths corresponding to the placeholders we had */
|
||||||
for (auto & [drvPath, outputs] : drvs) {
|
for (auto & drv : drvs) {
|
||||||
const auto outputPaths = store->queryDerivationOutputMap(drvPath);
|
auto outputs = resolveDerivedPath(*store, drv);
|
||||||
for (auto & outputName : outputs) {
|
for (auto & [outputName, outputPath] : outputs) {
|
||||||
auto outputPath = get(outputPaths, outputName);
|
|
||||||
if (!outputPath)
|
|
||||||
debugThrowLastTrace(Error("derivation '%s' does not have an output named '%s'",
|
|
||||||
store->printStorePath(drvPath), outputName));
|
|
||||||
res.insert_or_assign(
|
res.insert_or_assign(
|
||||||
downstreamPlaceholder(*store, drvPath, outputName),
|
downstreamPlaceholder(*store, drv.drvPath, outputName),
|
||||||
store->printStorePath(*outputPath)
|
store->printStorePath(outputPath)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -240,6 +252,7 @@ static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
|
||||||
static RegisterPrimOp primop_import({
|
static RegisterPrimOp primop_import({
|
||||||
.name = "import",
|
.name = "import",
|
||||||
.args = {"path"},
|
.args = {"path"},
|
||||||
|
// TODO turn "normal path values" into link below
|
||||||
.doc = R"(
|
.doc = R"(
|
||||||
Load, parse and return the Nix expression in the file *path*. If
|
Load, parse and return the Nix expression in the file *path*. If
|
||||||
*path* is a directory, the file ` default.nix ` in that directory
|
*path* is a directory, the file ` default.nix ` in that directory
|
||||||
|
@ -253,7 +266,7 @@ static RegisterPrimOp primop_import({
|
||||||
>
|
>
|
||||||
> Unlike some languages, `import` is a regular function in Nix.
|
> Unlike some languages, `import` is a regular function in Nix.
|
||||||
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
|
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
|
||||||
> are [normal path values](language-values.md).
|
> are normal [path values](@docroot@/language/values.md#type-path).
|
||||||
|
|
||||||
A Nix expression loaded by `import` must not contain any *free
|
A Nix expression loaded by `import` must not contain any *free
|
||||||
variables* (identifiers that are not defined in the Nix expression
|
variables* (identifiers that are not defined in the Nix expression
|
||||||
|
@ -337,26 +350,22 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
auto elems = args[0]->listElems();
|
auto elems = args[0]->listElems();
|
||||||
auto count = args[0]->listSize();
|
auto count = args[0]->listSize();
|
||||||
if (count == 0)
|
if (count == 0)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
||||||
.msg = hintfmt("at least one argument to 'exec' required"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto program = state.coerceToString(pos, *elems[0], context, false, false,
|
auto program = state.coerceToString(pos, *elems[0], context,
|
||||||
"while evaluating the first element of the argument passed to builtins.exec").toOwned();
|
"while evaluating the first element of the argument passed to builtins.exec",
|
||||||
|
false, false).toOwned();
|
||||||
Strings commandArgs;
|
Strings commandArgs;
|
||||||
for (unsigned int i = 1; i < args[0]->listSize(); ++i) {
|
for (unsigned int i = 1; i < args[0]->listSize(); ++i) {
|
||||||
commandArgs.push_back(state.coerceToString(pos, *elems[i], context, false, false,
|
commandArgs.push_back(
|
||||||
"while evaluating an element of the argument passed to builtins.exec").toOwned());
|
state.coerceToString(pos, *elems[i], context,
|
||||||
|
"while evaluating an element of the argument passed to builtins.exec",
|
||||||
|
false, false).toOwned());
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
||||||
} catch (InvalidPathError & e) {
|
} catch (InvalidPathError & e) {
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow<EvalError>();
|
||||||
.msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
|
|
||||||
program, e.path),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto output = runProgram(program, true, commandArgs);
|
auto output = runProgram(program, true, commandArgs);
|
||||||
|
@ -585,7 +594,8 @@ struct CompareValues
|
||||||
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||||
}
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(nullptr, errorCtx);
|
if (!errorCtx.empty())
|
||||||
|
e.addTrace(nullptr, errorCtx);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -607,15 +617,7 @@ static Bindings::iterator getAttr(
|
||||||
{
|
{
|
||||||
Bindings::iterator value = attrSet->find(attrSym);
|
Bindings::iterator value = attrSet->find(attrSym);
|
||||||
if (value == attrSet->end()) {
|
if (value == attrSet->end()) {
|
||||||
throw TypeError({
|
state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow<TypeError>();
|
||||||
.msg = hintfmt("attribute '%s' missing %s", state.symbols[attrSym], normaltxt(errorCtx)),
|
|
||||||
.errPos = state.positions[attrSet->pos],
|
|
||||||
});
|
|
||||||
// TODO XXX
|
|
||||||
// Adding another trace for the function name to make it clear
|
|
||||||
// which call received wrong arguments.
|
|
||||||
//e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", funcName));
|
|
||||||
//state.debugThrowLastTrace(e);
|
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
@ -788,8 +790,10 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
||||||
v = *args[1];
|
v = *args[1];
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
PathSet context;
|
PathSet context;
|
||||||
e.addTrace(nullptr, state.coerceToString(pos, *args[0], context,
|
auto message = state.coerceToString(pos, *args[0], context,
|
||||||
"while evaluating the error message passed to builtins.addErrorContext").toOwned());
|
"while evaluating the error message passed to builtins.addErrorContext",
|
||||||
|
false, false).toOwned();
|
||||||
|
e.addTrace(nullptr, message, true);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -993,6 +997,7 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||||
* Derivations
|
* Derivations
|
||||||
*************************************************************/
|
*************************************************************/
|
||||||
|
|
||||||
|
static void derivationStrictInternal(EvalState & state, const std::string & name, Bindings * attrs, Value & v);
|
||||||
|
|
||||||
/* Construct (as a unobservable side effect) a Nix derivation
|
/* Construct (as a unobservable side effect) a Nix derivation
|
||||||
expression that performs the derivation described by the argument
|
expression that performs the derivation described by the argument
|
||||||
|
@ -1003,32 +1008,68 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||||
derivation. */
|
derivation. */
|
||||||
static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
using nlohmann::json;
|
|
||||||
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict");
|
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict");
|
||||||
|
|
||||||
|
Bindings * attrs = args[0]->attrs;
|
||||||
|
|
||||||
/* Figure out the name first (for stack backtraces). */
|
/* Figure out the name first (for stack backtraces). */
|
||||||
Bindings::iterator attr = getAttr(state, state.sName, args[0]->attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
Bindings::iterator nameAttr = getAttr(state, state.sName, attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
||||||
|
|
||||||
std::string drvName;
|
std::string drvName;
|
||||||
const auto posDrvName = attr->pos;
|
|
||||||
try {
|
try {
|
||||||
drvName = state.forceStringNoCtx(*attr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict");
|
drvName = state.forceStringNoCtx(*nameAttr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict");
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[posDrvName], "while evaluating the derivation attribute 'name'");
|
e.addTrace(state.positions[nameAttr->pos], "while evaluating the derivation attribute 'name'");
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
derivationStrictInternal(state, drvName, attrs, v);
|
||||||
|
} catch (Error & e) {
|
||||||
|
Pos pos = state.positions[nameAttr->pos];
|
||||||
|
/*
|
||||||
|
* Here we make two abuses of the error system
|
||||||
|
*
|
||||||
|
* 1. We print the location as a string to avoid a code snippet being
|
||||||
|
* printed. While the location of the name attribute is a good hint, the
|
||||||
|
* exact code there is irrelevant.
|
||||||
|
*
|
||||||
|
* 2. We mark this trace as a frame trace, meaning that we stop printing
|
||||||
|
* less important traces from now on. In particular, this prevents the
|
||||||
|
* display of the automatic "while calling builtins.derivationStrict"
|
||||||
|
* trace, which is of little use for the public we target here.
|
||||||
|
*
|
||||||
|
* Please keep in mind that error reporting is done on a best-effort
|
||||||
|
* basis in nix. There is no accurate location for a derivation, as it
|
||||||
|
* often results from the composition of several functions
|
||||||
|
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
||||||
|
*/
|
||||||
|
e.addTrace(nullptr, hintfmt(
|
||||||
|
"while evaluating derivation '%s'\n"
|
||||||
|
" whose name attribute is located at %s",
|
||||||
|
drvName, pos), true);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void derivationStrictInternal(EvalState & state, const std::string &
|
||||||
|
drvName, Bindings * attrs, Value & v)
|
||||||
|
{
|
||||||
/* Check whether attributes should be passed as a JSON file. */
|
/* Check whether attributes should be passed as a JSON file. */
|
||||||
|
using nlohmann::json;
|
||||||
std::optional<json> jsonObject;
|
std::optional<json> jsonObject;
|
||||||
attr = args[0]->attrs->find(state.sStructuredAttrs);
|
auto attr = attrs->find(state.sStructuredAttrs);
|
||||||
if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict"))
|
if (attr != attrs->end() &&
|
||||||
|
state.forceBool(*attr->value, noPos,
|
||||||
|
"while evaluating the `__structuredAttrs` "
|
||||||
|
"attribute passed to builtins.derivationStrict"))
|
||||||
jsonObject = json::object();
|
jsonObject = json::object();
|
||||||
|
|
||||||
/* Check whether null attributes should be ignored. */
|
/* Check whether null attributes should be ignored. */
|
||||||
bool ignoreNulls = false;
|
bool ignoreNulls = false;
|
||||||
attr = args[0]->attrs->find(state.sIgnoreNulls);
|
attr = attrs->find(state.sIgnoreNulls);
|
||||||
if (attr != args[0]->attrs->end())
|
if (attr != attrs->end())
|
||||||
ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict");
|
ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||||
|
|
||||||
/* Build the derivation expression by processing the attributes. */
|
/* Build the derivation expression by processing the attributes. */
|
||||||
Derivation drv;
|
Derivation drv;
|
||||||
|
@ -1045,7 +1086,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
StringSet outputs;
|
StringSet outputs;
|
||||||
outputs.insert("out");
|
outputs.insert("out");
|
||||||
|
|
||||||
for (auto & i : args[0]->attrs->lexicographicOrder(state.symbols)) {
|
for (auto & i : attrs->lexicographicOrder(state.symbols)) {
|
||||||
if (i->name == state.sIgnoreNulls) continue;
|
if (i->name == state.sIgnoreNulls) continue;
|
||||||
const std::string & key = state.symbols[i->name];
|
const std::string & key = state.symbols[i->name];
|
||||||
vomit("processing attribute '%1%'", key);
|
vomit("processing attribute '%1%'", key);
|
||||||
|
@ -1056,7 +1097,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1066,7 +1107,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
if (outputs.find(j) != outputs.end())
|
if (outputs.find(j) != outputs.end())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
/* !!! Check whether j is a valid attribute
|
/* !!! Check whether j is a valid attribute
|
||||||
name. */
|
name. */
|
||||||
|
@ -1076,34 +1117,35 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
if (j == "drv")
|
if (j == "drv")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
outputs.insert(j);
|
outputs.insert(j);
|
||||||
}
|
}
|
||||||
if (outputs.empty())
|
if (outputs.empty())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// This try-catch block adds context for most errors.
|
||||||
|
// Use this empty error context to signify that we defer to it.
|
||||||
|
const std::string_view context_below("");
|
||||||
|
|
||||||
if (ignoreNulls) {
|
if (ignoreNulls) {
|
||||||
state.forceValue(*i->value, pos);
|
state.forceValue(*i->value, noPos);
|
||||||
if (i->value->type() == nNull) continue;
|
if (i->value->type() == nNull) continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i->name == state.sContentAddressed) {
|
if (i->name == state.sContentAddressed) {
|
||||||
contentAddressed = state.forceBool(*i->value, pos,
|
contentAddressed = state.forceBool(*i->value, noPos, context_below);
|
||||||
"while evaluating the `__contentAddressed` attribute passed to builtins.derivationStrict");
|
|
||||||
if (contentAddressed)
|
if (contentAddressed)
|
||||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (i->name == state.sImpure) {
|
else if (i->name == state.sImpure) {
|
||||||
isImpure = state.forceBool(*i->value, pos,
|
isImpure = state.forceBool(*i->value, noPos, context_below);
|
||||||
"while evaluating the 'impure' attribute passed to builtins.derivationStrict");
|
|
||||||
if (isImpure)
|
if (isImpure)
|
||||||
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
||||||
}
|
}
|
||||||
|
@ -1111,11 +1153,11 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
/* The `args' attribute is special: it supplies the
|
/* The `args' attribute is special: it supplies the
|
||||||
command-line arguments to the builder. */
|
command-line arguments to the builder. */
|
||||||
else if (i->name == state.sArgs) {
|
else if (i->name == state.sArgs) {
|
||||||
state.forceList(*i->value, pos,
|
state.forceList(*i->value, noPos, context_below);
|
||||||
"while evaluating the `args` attribute passed to builtins.derivationStrict");
|
|
||||||
for (auto elem : i->value->listItems()) {
|
for (auto elem : i->value->listItems()) {
|
||||||
auto s = state.coerceToString(posDrvName, *elem, context, true,
|
auto s = state.coerceToString(noPos, *elem, context,
|
||||||
"while evaluating an element of the `args` argument passed to builtins.derivationStrict").toOwned();
|
"while evaluating an element of the argument list",
|
||||||
|
true).toOwned();
|
||||||
drv.args.push_back(s);
|
drv.args.push_back(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1128,29 +1170,29 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
|
|
||||||
if (i->name == state.sStructuredAttrs) continue;
|
if (i->name == state.sStructuredAttrs) continue;
|
||||||
|
|
||||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
|
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context);
|
||||||
|
|
||||||
if (i->name == state.sBuilder)
|
if (i->name == state.sBuilder)
|
||||||
drv.builder = state.forceString(*i->value, context, posDrvName, "while evaluating the `builder` attribute passed to builtins.derivationStrict");
|
drv.builder = state.forceString(*i->value, context, noPos, context_below);
|
||||||
else if (i->name == state.sSystem)
|
else if (i->name == state.sSystem)
|
||||||
drv.platform = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `system` attribute passed to builtins.derivationStrict");
|
drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||||
else if (i->name == state.sOutputHash)
|
else if (i->name == state.sOutputHash)
|
||||||
outputHash = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHash` attribute passed to builtins.derivationStrict");
|
outputHash = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||||
else if (i->name == state.sOutputHashAlgo)
|
else if (i->name == state.sOutputHashAlgo)
|
||||||
outputHashAlgo = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHashAlgo` attribute passed to builtins.derivationStrict");
|
outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||||
else if (i->name == state.sOutputHashMode)
|
else if (i->name == state.sOutputHashMode)
|
||||||
handleHashMode(state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHashMode` attribute passed to builtins.derivationStrict"));
|
handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below));
|
||||||
else if (i->name == state.sOutputs) {
|
else if (i->name == state.sOutputs) {
|
||||||
/* Require ‘outputs’ to be a list of strings. */
|
/* Require ‘outputs’ to be a list of strings. */
|
||||||
state.forceList(*i->value, posDrvName, "while evaluating the `outputs` attribute passed to builtins.derivationStrict");
|
state.forceList(*i->value, noPos, context_below);
|
||||||
Strings ss;
|
Strings ss;
|
||||||
for (auto elem : i->value->listItems())
|
for (auto elem : i->value->listItems())
|
||||||
ss.emplace_back(state.forceStringNoCtx(*elem, posDrvName, "while evaluating an element of the `outputs` attribute passed to builtins.derivationStrict"));
|
ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below));
|
||||||
handleOutputs(ss);
|
handleOutputs(ss);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
auto s = state.coerceToString(i->pos, *i->value, context, true, "while evaluating an attribute passed to builtins.derivationStrict").toOwned();
|
auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned();
|
||||||
drv.env.emplace(key, s);
|
drv.env.emplace(key, s);
|
||||||
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
||||||
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
||||||
|
@ -1164,8 +1206,8 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(nullptr,
|
e.addTrace(state.positions[i->pos],
|
||||||
hintfmt("while evaluating the attribute '%1%' of the derivation '%2%'", key, drvName),
|
hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||||
true);
|
true);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
@ -1179,55 +1221,51 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
/* Everything in the context of the strings in the derivation
|
/* Everything in the context of the strings in the derivation
|
||||||
attributes should be added as dependencies of the resulting
|
attributes should be added as dependencies of the resulting
|
||||||
derivation. */
|
derivation. */
|
||||||
for (auto & path : context) {
|
for (auto & c_ : context) {
|
||||||
|
auto c = NixStringContextElem::parse(*state.store, c_);
|
||||||
/* Paths marked with `=' denote that the path of a derivation
|
std::visit(overloaded {
|
||||||
is explicitly passed to the builder. Since that allows the
|
/* Since this allows the builder to gain access to every
|
||||||
builder to gain access to every path in the dependency
|
path in the dependency graph of the derivation (including
|
||||||
graph of the derivation (including all outputs), all paths
|
all outputs), all paths in the graph must be added to
|
||||||
in the graph must be added to this derivation's list of
|
this derivation's list of inputs to ensure that they are
|
||||||
inputs to ensure that they are available when the builder
|
available when the builder runs. */
|
||||||
runs. */
|
[&](const NixStringContextElem::DrvDeep & d) {
|
||||||
if (path.at(0) == '=') {
|
/* !!! This doesn't work if readOnlyMode is set. */
|
||||||
/* !!! This doesn't work if readOnlyMode is set. */
|
StorePathSet refs;
|
||||||
StorePathSet refs;
|
state.store->computeFSClosure(d.drvPath, refs);
|
||||||
state.store->computeFSClosure(state.store->parseStorePath(std::string_view(path).substr(1)), refs);
|
for (auto & j : refs) {
|
||||||
for (auto & j : refs) {
|
drv.inputSrcs.insert(j);
|
||||||
drv.inputSrcs.insert(j);
|
if (j.isDerivation())
|
||||||
if (j.isDerivation())
|
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
|
||||||
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
|
}
|
||||||
}
|
},
|
||||||
}
|
[&](const NixStringContextElem::Built & b) {
|
||||||
|
drv.inputDrvs[b.drvPath].insert(b.output);
|
||||||
/* Handle derivation outputs of the form ‘!<name>!<path>’. */
|
},
|
||||||
else if (path.at(0) == '!') {
|
[&](const NixStringContextElem::Opaque & o) {
|
||||||
auto ctx = decodeContext(*state.store, path);
|
drv.inputSrcs.insert(o.path);
|
||||||
drv.inputDrvs[ctx.first].insert(ctx.second);
|
},
|
||||||
}
|
}, c.raw());
|
||||||
|
|
||||||
/* Otherwise it's a source file. */
|
|
||||||
else
|
|
||||||
drv.inputSrcs.insert(state.store->parseStorePath(path));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Do we have all required attributes? */
|
/* Do we have all required attributes? */
|
||||||
if (drv.builder == "")
|
if (drv.builder == "")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("required attribute 'builder' missing"),
|
.msg = hintfmt("required attribute 'builder' missing"),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
if (drv.platform == "")
|
if (drv.platform == "")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("required attribute 'system' missing"),
|
.msg = hintfmt("required attribute 'system' missing"),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
/* Check whether the derivation name is valid. */
|
/* Check whether the derivation name is valid. */
|
||||||
if (isDerivation(drvName))
|
if (isDerivation(drvName))
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.debugThrowLastTrace(EvalError({
|
||||||
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
if (outputHash) {
|
if (outputHash) {
|
||||||
|
@ -1238,7 +1276,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||||
state.debugThrowLastTrace(Error({
|
state.debugThrowLastTrace(Error({
|
||||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
}));
|
}));
|
||||||
|
|
||||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||||
|
@ -1259,7 +1297,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
if (contentAddressed && isImpure)
|
if (contentAddressed && isImpure)
|
||||||
throw EvalError({
|
throw EvalError({
|
||||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
||||||
.errPos = state.positions[posDrvName]
|
.errPos = state.positions[noPos]
|
||||||
});
|
});
|
||||||
|
|
||||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
||||||
|
@ -1303,7 +1341,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
if (!h)
|
if (!h)
|
||||||
throw AssertionError({
|
throw AssertionError({
|
||||||
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
||||||
.errPos = state.positions[posDrvName],
|
.errPos = state.positions[noPos],
|
||||||
});
|
});
|
||||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||||
drv.env[i] = state.store->printStorePath(outPath);
|
drv.env[i] = state.store->printStorePath(outPath);
|
||||||
|
@ -1336,11 +1374,12 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
drvHashes.lock()->insert_or_assign(drvPath, h);
|
drvHashes.lock()->insert_or_assign(drvPath, h);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto attrs = state.buildBindings(1 + drv.outputs.size());
|
auto result = state.buildBindings(1 + drv.outputs.size());
|
||||||
attrs.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS});
|
result.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS});
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputs)
|
||||||
mkOutputString(state, attrs, drvPath, drv, i);
|
mkOutputString(state, result, drvPath, drv, i);
|
||||||
v.mkAttrs(attrs);
|
|
||||||
|
v.mkAttrs(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
|
static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
|
||||||
|
@ -1483,7 +1522,9 @@ static RegisterPrimOp primop_pathExists({
|
||||||
static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.baseNameOf")), context);
|
v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to builtins.baseNameOf",
|
||||||
|
false, false)), context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_baseNameOf({
|
static RegisterPrimOp primop_baseNameOf({
|
||||||
|
@ -1503,7 +1544,9 @@ static RegisterPrimOp primop_baseNameOf({
|
||||||
static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto path = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.dirOf");
|
auto path = state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to builtins.dirOf",
|
||||||
|
false, false);
|
||||||
auto dir = dirOf(*path);
|
auto dir = dirOf(*path);
|
||||||
if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context);
|
if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context);
|
||||||
}
|
}
|
||||||
|
@ -1569,8 +1612,9 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
i = getAttr(state, state.sPath, v2->attrs, "in an element of the __nixPath");
|
i = getAttr(state, state.sPath, v2->attrs, "in an element of the __nixPath");
|
||||||
|
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto path = state.coerceToString(pos, *i->value, context, false, false,
|
auto path = state.coerceToString(pos, *i->value, context,
|
||||||
"while evaluating the `path` attribute of an element of the list passed to builtins.findFile").toOwned();
|
"while evaluating the `path` attribute of an element of the list passed to builtins.findFile",
|
||||||
|
false, false).toOwned();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto rewrites = state.realiseContext(context);
|
auto rewrites = state.realiseContext(context);
|
||||||
|
@ -1623,23 +1667,73 @@ static RegisterPrimOp primop_hashFile({
|
||||||
.fun = prim_hashFile,
|
.fun = prim_hashFile,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/* Stringize a directory entry enum. Used by `readFileType' and `readDir'. */
|
||||||
|
static const char * dirEntTypeToString(unsigned char dtType)
|
||||||
|
{
|
||||||
|
/* Enum DT_(DIR|LNK|REG|UNKNOWN) */
|
||||||
|
switch(dtType) {
|
||||||
|
case DT_REG: return "regular"; break;
|
||||||
|
case DT_DIR: return "directory"; break;
|
||||||
|
case DT_LNK: return "symlink"; break;
|
||||||
|
default: return "unknown"; break;
|
||||||
|
}
|
||||||
|
return "unknown"; /* Unreachable */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
auto path = realisePath(state, pos, *args[0]);
|
||||||
|
/* Retrieve the directory entry type and stringize it. */
|
||||||
|
v.mkString(dirEntTypeToString(getFileType(path)));
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterPrimOp primop_readFileType({
|
||||||
|
.name = "__readFileType",
|
||||||
|
.args = {"p"},
|
||||||
|
.doc = R"(
|
||||||
|
Determine the directory entry type of a filesystem node, being
|
||||||
|
one of "directory", "regular", "symlink", or "unknown".
|
||||||
|
)",
|
||||||
|
.fun = prim_readFileType,
|
||||||
|
});
|
||||||
|
|
||||||
/* Read a directory (without . or ..) */
|
/* Read a directory (without . or ..) */
|
||||||
static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
auto path = realisePath(state, pos, *args[0]);
|
auto path = realisePath(state, pos, *args[0]);
|
||||||
|
|
||||||
|
// Retrieve directory entries for all nodes in a directory.
|
||||||
|
// This is similar to `getFileType` but is optimized to reduce system calls
|
||||||
|
// on many systems.
|
||||||
DirEntries entries = readDirectory(path);
|
DirEntries entries = readDirectory(path);
|
||||||
|
|
||||||
auto attrs = state.buildBindings(entries.size());
|
auto attrs = state.buildBindings(entries.size());
|
||||||
|
|
||||||
|
// If we hit unknown directory entry types we may need to fallback to
|
||||||
|
// using `getFileType` on some systems.
|
||||||
|
// In order to reduce system calls we make each lookup lazy by using
|
||||||
|
// `builtins.readFileType` application.
|
||||||
|
Value * readFileType = nullptr;
|
||||||
|
|
||||||
for (auto & ent : entries) {
|
for (auto & ent : entries) {
|
||||||
if (ent.type == DT_UNKNOWN)
|
auto & attr = attrs.alloc(ent.name);
|
||||||
ent.type = getFileType(path + "/" + ent.name);
|
if (ent.type == DT_UNKNOWN) {
|
||||||
attrs.alloc(ent.name).mkString(
|
// Some filesystems or operating systems may not be able to return
|
||||||
ent.type == DT_REG ? "regular" :
|
// detailed node info quickly in this case we produce a thunk to
|
||||||
ent.type == DT_DIR ? "directory" :
|
// query the file type lazily.
|
||||||
ent.type == DT_LNK ? "symlink" :
|
auto epath = state.allocValue();
|
||||||
"unknown");
|
Path path2 = path + "/" + ent.name;
|
||||||
|
epath->mkString(path2);
|
||||||
|
if (!readFileType)
|
||||||
|
readFileType = &state.getBuiltin("readFileType");
|
||||||
|
attr.mkApp(readFileType, epath);
|
||||||
|
} else {
|
||||||
|
// This branch of the conditional is much more likely.
|
||||||
|
// Here we just stringize the directory entry type.
|
||||||
|
attr.mkString(dirEntTypeToString(ent.type));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
v.mkAttrs(attrs);
|
v.mkAttrs(attrs);
|
||||||
|
@ -1872,8 +1966,7 @@ static RegisterPrimOp primop_toFile({
|
||||||
path. The file has suffix *name*. This file can be used as an
|
path. The file has suffix *name*. This file can be used as an
|
||||||
input to derivations. One application is to write builders
|
input to derivations. One application is to write builders
|
||||||
“inline”. For instance, the following Nix expression combines the
|
“inline”. For instance, the following Nix expression combines the
|
||||||
[Nix expression for GNU Hello](expression-syntax.md) and its
|
Nix expression for GNU Hello and its build script into one file:
|
||||||
[build script](build-script.md) into one file:
|
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
{ stdenv, fetchurl, perl }:
|
{ stdenv, fetchurl, perl }:
|
||||||
|
@ -1917,7 +2010,7 @@ static RegisterPrimOp primop_toFile({
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that `${configFile}` is a
|
Note that `${configFile}` is a
|
||||||
[string interpolation](language/values.md#type-string), so the result of the
|
[string interpolation](@docroot@/language/values.md#type-string), so the result of the
|
||||||
expression `configFile`
|
expression `configFile`
|
||||||
(i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
|
(i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
|
||||||
spliced into the resulting string.
|
spliced into the resulting string.
|
||||||
|
@ -2614,14 +2707,9 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
|
|
||||||
for (unsigned int n = 0; n < listSize; ++n) {
|
for (unsigned int n = 0; n < listSize; ++n) {
|
||||||
Value * vElem = listElems[n];
|
Value * vElem = listElems[n];
|
||||||
try {
|
state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith");
|
||||||
state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith");
|
for (auto & attr : *vElem->attrs)
|
||||||
for (auto & attr : *vElem->attrs)
|
attrsSeen[attr.name].first++;
|
||||||
attrsSeen[attr.name].first++;
|
|
||||||
} catch (TypeError & e) {
|
|
||||||
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "zipAttrsWith"));
|
|
||||||
state.debugThrowLastTrace(e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto attrs = state.buildBindings(attrsSeen.size());
|
auto attrs = state.buildBindings(attrsSeen.size());
|
||||||
|
@ -2807,7 +2895,7 @@ static RegisterPrimOp primop_map({
|
||||||
example,
|
example,
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
map (x"foo" + x) [ "bar" "bla" "abc" ]
|
map (x: "foo" + x) [ "bar" "bla" "abc" ]
|
||||||
```
|
```
|
||||||
|
|
||||||
evaluates to `[ "foobar" "foobla" "fooabc" ]`.
|
evaluates to `[ "foobar" "foobla" "fooabc" ]`.
|
||||||
|
@ -3005,13 +3093,13 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||||
|
|
||||||
if (len < 0)
|
if (len < 0)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error("cannot create list of size %1%", len).debugThrow<EvalError>();
|
||||||
.msg = hintfmt("cannot create list of size %1%", len),
|
|
||||||
.errPos = state.positions[pos]
|
// More strict than striclty (!) necessary, but acceptable
|
||||||
}));
|
// as evaluating map without accessing any values makes little sense.
|
||||||
|
state.forceFunction(*args[0], noPos, "while evaluating the first argument passed to builtins.genList");
|
||||||
|
|
||||||
state.mkList(v, len);
|
state.mkList(v, len);
|
||||||
|
|
||||||
for (unsigned int n = 0; n < (unsigned int) len; ++n) {
|
for (unsigned int n = 0; n < (unsigned int) len; ++n) {
|
||||||
auto arg = state.allocValue();
|
auto arg = state.allocValue();
|
||||||
arg->mkInt(n);
|
arg->mkInt(n);
|
||||||
|
@ -3059,6 +3147,8 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
auto comparator = [&](Value * a, Value * b) {
|
auto comparator = [&](Value * a, Value * b) {
|
||||||
/* Optimization: if the comparator is lessThan, bypass
|
/* Optimization: if the comparator is lessThan, bypass
|
||||||
callFunction. */
|
callFunction. */
|
||||||
|
/* TODO: (layus) this is absurd. An optimisation like this
|
||||||
|
should be outside the lambda creation */
|
||||||
if (args[0]->isPrimOp() && args[0]->primOp->fun == prim_lessThan)
|
if (args[0]->isPrimOp() && args[0]->primOp->fun == prim_lessThan)
|
||||||
return CompareValues(state, noPos, "while evaluating the ordering function passed to builtins.sort")(a, b);
|
return CompareValues(state, noPos, "while evaluating the ordering function passed to builtins.sort")(a, b);
|
||||||
|
|
||||||
|
@ -3219,12 +3309,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
for (unsigned int n = 0; n < nrLists; ++n) {
|
for (unsigned int n = 0; n < nrLists; ++n) {
|
||||||
Value * vElem = args[1]->listElems()[n];
|
Value * vElem = args[1]->listElems()[n];
|
||||||
state.callFunction(*args[0], *vElem, lists[n], pos);
|
state.callFunction(*args[0], *vElem, lists[n], pos);
|
||||||
try {
|
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
|
||||||
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
|
|
||||||
} catch (TypeError &e) {
|
|
||||||
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "concatMap"));
|
|
||||||
state.debugThrowLastTrace(e);
|
|
||||||
}
|
|
||||||
len += lists[n].listSize();
|
len += lists[n].listSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3404,7 +3489,7 @@ static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
state.forceValue(*args[0], pos);
|
state.forceValue(*args[0], pos);
|
||||||
state.forceValue(*args[1], pos);
|
state.forceValue(*args[1], pos);
|
||||||
// pos is exact here, no need for a message.
|
// pos is exact here, no need for a message.
|
||||||
CompareValues comp(state, pos, "");
|
CompareValues comp(state, noPos, "");
|
||||||
v.mkBool(comp(args[0], args[1]));
|
v.mkBool(comp(args[0], args[1]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3431,7 +3516,9 @@ static RegisterPrimOp primop_lessThan({
|
||||||
static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto s = state.coerceToString(pos, *args[0], context, true, false, "while evaluating the first argument passed to builtins.toString");
|
auto s = state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to builtins.toString",
|
||||||
|
true, false);
|
||||||
v.mkString(*s, context);
|
v.mkString(*s, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3783,21 +3870,18 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
||||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
||||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
||||||
if (args[0]->listSize() != args[1]->listSize())
|
if (args[0]->listSize() != args[1]->listSize())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow<EvalError>();
|
||||||
.msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
std::vector<std::string> from;
|
std::vector<std::string> from;
|
||||||
from.reserve(args[0]->listSize());
|
from.reserve(args[0]->listSize());
|
||||||
for (auto elem : args[0]->listItems())
|
for (auto elem : args[0]->listItems())
|
||||||
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace in builtins.replaceStrings"));
|
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings"));
|
||||||
|
|
||||||
std::vector<std::pair<std::string, PathSet>> to;
|
std::vector<std::pair<std::string, PathSet>> to;
|
||||||
to.reserve(args[1]->listSize());
|
to.reserve(args[1]->listSize());
|
||||||
for (auto elem : args[1]->listItems()) {
|
for (auto elem : args[1]->listItems()) {
|
||||||
PathSet ctx;
|
PathSet ctx;
|
||||||
auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings of builtins.replaceStrings");
|
auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings");
|
||||||
to.emplace_back(s, std::move(ctx));
|
to.emplace_back(s, std::move(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,8 +37,15 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
|
||||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||||
|
|
||||||
PathSet context2;
|
PathSet context2;
|
||||||
for (auto & p : context)
|
for (auto && p : context) {
|
||||||
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
|
auto c = NixStringContextElem::parse(*state.store, p);
|
||||||
|
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
|
||||||
|
context2.emplace(state.store->printStorePath(ptr->drvPath));
|
||||||
|
} else {
|
||||||
|
/* Can reuse original item */
|
||||||
|
context2.emplace(std::move(p));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
v.mkString(*s, context2);
|
v.mkString(*s, context2);
|
||||||
}
|
}
|
||||||
|
@ -74,34 +81,20 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
};
|
};
|
||||||
PathSet context;
|
PathSet context;
|
||||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
||||||
auto contextInfos = std::map<Path, ContextInfo>();
|
auto contextInfos = std::map<StorePath, ContextInfo>();
|
||||||
for (const auto & p : context) {
|
for (const auto & p : context) {
|
||||||
Path drv;
|
NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p);
|
||||||
std::string output;
|
std::visit(overloaded {
|
||||||
const Path * path = &p;
|
[&](NixStringContextElem::DrvDeep & d) {
|
||||||
if (p.at(0) == '=') {
|
contextInfos[d.drvPath].allOutputs = true;
|
||||||
drv = std::string(p, 1);
|
},
|
||||||
path = &drv;
|
[&](NixStringContextElem::Built & b) {
|
||||||
} else if (p.at(0) == '!') {
|
contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output));
|
||||||
NixStringContextElem ctx = decodeContext(*state.store, p);
|
},
|
||||||
drv = state.store->printStorePath(ctx.first);
|
[&](NixStringContextElem::Opaque & o) {
|
||||||
output = ctx.second;
|
contextInfos[o.path].path = true;
|
||||||
path = &drv;
|
},
|
||||||
}
|
}, ctx.raw());
|
||||||
auto isPath = drv.empty();
|
|
||||||
auto isAllOutputs = (!drv.empty()) && output.empty();
|
|
||||||
|
|
||||||
auto iter = contextInfos.find(*path);
|
|
||||||
if (iter == contextInfos.end()) {
|
|
||||||
contextInfos.emplace(*path, ContextInfo{isPath, isAllOutputs, output.empty() ? Strings{} : Strings{std::move(output)}});
|
|
||||||
} else {
|
|
||||||
if (isPath)
|
|
||||||
iter->second.path = true;
|
|
||||||
else if (isAllOutputs)
|
|
||||||
iter->second.allOutputs = true;
|
|
||||||
else
|
|
||||||
iter->second.outputs.emplace_back(std::move(output));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto attrs = state.buildBindings(contextInfos.size());
|
auto attrs = state.buildBindings(contextInfos.size());
|
||||||
|
@ -120,7 +113,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
for (const auto & [i, output] : enumerate(info.second.outputs))
|
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||||
}
|
}
|
||||||
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
v.mkAttrs(attrs);
|
v.mkAttrs(attrs);
|
||||||
|
|
|
@ -22,7 +22,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
std::string_view n(state.symbols[attr.name]);
|
std::string_view n(state.symbols[attr.name]);
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.coerceToString(attr.pos, *attr.value, context, false, false, "while evaluating the `url` attribute passed to builtins.fetchMercurial").toOwned();
|
url = state.coerceToString(attr.pos, *attr.value, context,
|
||||||
|
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
|
||||||
|
false, false).toOwned();
|
||||||
else if (n == "rev") {
|
else if (n == "rev") {
|
||||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
|
@ -48,7 +50,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
});
|
});
|
||||||
|
|
||||||
} else
|
} else
|
||||||
url = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.fetchMercurial").toOwned();
|
url = state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to builtins.fetchMercurial",
|
||||||
|
false, false).toOwned();
|
||||||
|
|
||||||
// FIXME: git externals probably can be used to bypass the URI
|
// FIXME: git externals probably can be used to bypass the URI
|
||||||
// whitelist. Ah well.
|
// whitelist. Ah well.
|
||||||
|
|
|
@ -125,7 +125,7 @@ static void fetchTree(
|
||||||
if (attr.name == state.sType) continue;
|
if (attr.name == state.sType) continue;
|
||||||
state.forceValue(*attr.value, attr.pos);
|
state.forceValue(*attr.value, attr.pos);
|
||||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||||
auto s = state.coerceToString(attr.pos, *attr.value, context, false, false, "").toOwned();
|
auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned();
|
||||||
attrs.emplace(state.symbols[attr.name],
|
attrs.emplace(state.symbols[attr.name],
|
||||||
state.symbols[attr.name] == "url"
|
state.symbols[attr.name] == "url"
|
||||||
? type == "git"
|
? type == "git"
|
||||||
|
@ -151,7 +151,9 @@ static void fetchTree(
|
||||||
|
|
||||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
} else {
|
} else {
|
||||||
auto url = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to the fetcher").toOwned();
|
auto url = state.coerceToString(pos, *args[0], context,
|
||||||
|
"while evaluating the first argument passed to the fetcher",
|
||||||
|
false, false).toOwned();
|
||||||
|
|
||||||
if (type == "git") {
|
if (type == "git") {
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
|
@ -218,6 +220,9 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
} else
|
} else
|
||||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||||
|
|
||||||
|
if (who == "fetchTarball")
|
||||||
|
url = evalSettings.resolvePseudoUrl(*url);
|
||||||
|
|
||||||
state.checkURI(*url);
|
state.checkURI(*url);
|
||||||
|
|
||||||
if (name == "")
|
if (name == "")
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,7 +6,9 @@ libexpr-tests_DIR := $(d)
|
||||||
|
|
||||||
libexpr-tests_INSTALL_DIR :=
|
libexpr-tests_INSTALL_DIR :=
|
||||||
|
|
||||||
libexpr-tests_SOURCES := $(wildcard $(d)/*.cc)
|
libexpr-tests_SOURCES := \
|
||||||
|
$(wildcard $(d)/*.cc) \
|
||||||
|
$(wildcard $(d)/value/*.cc)
|
||||||
|
|
||||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
||||||
|
|
||||||
|
|
72
src/libexpr/tests/value/context.cc
Normal file
72
src/libexpr/tests/value/context.cc
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
#include "value/context.hh"
|
||||||
|
|
||||||
|
#include "libexprtests.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
// Testing of trivial expressions
|
||||||
|
struct NixStringContextElemTest : public LibExprTest {
|
||||||
|
const Store & store() const {
|
||||||
|
return *LibExprTest::store;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, empty_invalid) {
|
||||||
|
EXPECT_THROW(
|
||||||
|
NixStringContextElem::parse(store(), ""),
|
||||||
|
BadNixStringContextElem);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, single_bang_invalid) {
|
||||||
|
EXPECT_THROW(
|
||||||
|
NixStringContextElem::parse(store(), "!"),
|
||||||
|
BadNixStringContextElem);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, double_bang_invalid) {
|
||||||
|
EXPECT_THROW(
|
||||||
|
NixStringContextElem::parse(store(), "!!/"),
|
||||||
|
BadStorePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, eq_slash_invalid) {
|
||||||
|
EXPECT_THROW(
|
||||||
|
NixStringContextElem::parse(store(), "=/"),
|
||||||
|
BadStorePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, slash_invalid) {
|
||||||
|
EXPECT_THROW(
|
||||||
|
NixStringContextElem::parse(store(), "/"),
|
||||||
|
BadStorePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, opaque) {
|
||||||
|
std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||||
|
auto elem = NixStringContextElem::parse(store(), opaque);
|
||||||
|
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem);
|
||||||
|
ASSERT_TRUE(p);
|
||||||
|
ASSERT_EQ(p->path, store().parseStorePath(opaque));
|
||||||
|
ASSERT_EQ(elem.to_string(store()), opaque);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, drvDeep) {
|
||||||
|
std::string_view drvDeep = "=/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||||
|
auto elem = NixStringContextElem::parse(store(), drvDeep);
|
||||||
|
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem);
|
||||||
|
ASSERT_TRUE(p);
|
||||||
|
ASSERT_EQ(p->drvPath, store().parseStorePath(drvDeep.substr(1)));
|
||||||
|
ASSERT_EQ(elem.to_string(store()), drvDeep);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_F(NixStringContextElemTest, built) {
|
||||||
|
std::string_view built = "!foo!/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||||
|
auto elem = NixStringContextElem::parse(store(), built);
|
||||||
|
auto * p = std::get_if<NixStringContextElem::Built>(&elem);
|
||||||
|
ASSERT_TRUE(p);
|
||||||
|
ASSERT_EQ(p->output, "foo");
|
||||||
|
ASSERT_EQ(p->drvPath, store().parseStorePath(built.substr(5)));
|
||||||
|
ASSERT_EQ(elem.to_string(store()), built);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,6 +1,7 @@
|
||||||
#include "value-to-json.hh"
|
#include "value-to-json.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
@ -35,7 +36,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
|
|
||||||
case nPath:
|
case nPath:
|
||||||
if (copyToStore)
|
if (copyToStore)
|
||||||
out = state.copyPathToStore(context, v.path);
|
out = state.store->printStorePath(state.copyPathToStore(context, v.path));
|
||||||
else
|
else
|
||||||
out = v.path;
|
out = v.path;
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
|
||||||
#include "symbol-table.hh"
|
#include "symbol-table.hh"
|
||||||
|
#include "value/context.hh"
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
#include <gc/gc_allocator.h>
|
#include <gc/gc_allocator.h>
|
||||||
|
@ -67,8 +68,6 @@ class XMLWriter;
|
||||||
|
|
||||||
typedef int64_t NixInt;
|
typedef int64_t NixInt;
|
||||||
typedef double NixFloat;
|
typedef double NixFloat;
|
||||||
typedef std::pair<StorePath, std::string> NixStringContextElem;
|
|
||||||
typedef std::vector<NixStringContextElem> NixStringContext;
|
|
||||||
|
|
||||||
/* External values must descend from ExternalValueBase, so that
|
/* External values must descend from ExternalValueBase, so that
|
||||||
* type-agnostic nix functions (e.g. showType) can be implemented
|
* type-agnostic nix functions (e.g. showType) can be implemented
|
||||||
|
@ -90,7 +89,7 @@ class ExternalValueBase
|
||||||
/* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
/* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||||
* error.
|
* error.
|
||||||
*/
|
*/
|
||||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore, std::string_view errorCtx) const;
|
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||||
|
|
||||||
/* Compare to another value of the same type. Defaults to uncomparable,
|
/* Compare to another value of the same type. Defaults to uncomparable,
|
||||||
* i.e. always false.
|
* i.e. always false.
|
||||||
|
|
67
src/libexpr/value/context.cc
Normal file
67
src/libexpr/value/context.cc
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
#include "value/context.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
NixStringContextElem NixStringContextElem::parse(const Store & store, std::string_view s0)
|
||||||
|
{
|
||||||
|
std::string_view s = s0;
|
||||||
|
|
||||||
|
if (s.size() == 0) {
|
||||||
|
throw BadNixStringContextElem(s0,
|
||||||
|
"String context element should never be an empty string");
|
||||||
|
}
|
||||||
|
switch (s.at(0)) {
|
||||||
|
case '!': {
|
||||||
|
s = s.substr(1); // advance string to parse after first !
|
||||||
|
size_t index = s.find("!");
|
||||||
|
// This makes index + 1 safe. Index can be the length (one after index
|
||||||
|
// of last character), so given any valid character index --- a
|
||||||
|
// successful find --- we can add one.
|
||||||
|
if (index == std::string_view::npos) {
|
||||||
|
throw BadNixStringContextElem(s0,
|
||||||
|
"String content element beginning with '!' should have a second '!'");
|
||||||
|
}
|
||||||
|
return NixStringContextElem::Built {
|
||||||
|
.drvPath = store.parseStorePath(s.substr(index + 1)),
|
||||||
|
.output = std::string(s.substr(0, index)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
case '=': {
|
||||||
|
return NixStringContextElem::DrvDeep {
|
||||||
|
.drvPath = store.parseStorePath(s.substr(1)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
return NixStringContextElem::Opaque {
|
||||||
|
.path = store.parseStorePath(s),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string NixStringContextElem::to_string(const Store & store) const {
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const NixStringContextElem::Built & b) {
|
||||||
|
std::string res;
|
||||||
|
res += '!';
|
||||||
|
res += b.output;
|
||||||
|
res += '!';
|
||||||
|
res += store.printStorePath(b.drvPath);
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::DrvDeep & d) {
|
||||||
|
std::string res;
|
||||||
|
res += '=';
|
||||||
|
res += store.printStorePath(d.drvPath);
|
||||||
|
return res;
|
||||||
|
},
|
||||||
|
[&](const NixStringContextElem::Opaque & o) {
|
||||||
|
return store.printStorePath(o.path);
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
90
src/libexpr/value/context.hh
Normal file
90
src/libexpr/value/context.hh
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "util.hh"
|
||||||
|
#include "path.hh"
|
||||||
|
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
|
#include <nlohmann/json_fwd.hpp>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class BadNixStringContextElem : public Error
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
std::string_view raw;
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
BadNixStringContextElem(std::string_view raw_, const Args & ... args)
|
||||||
|
: Error("")
|
||||||
|
{
|
||||||
|
raw = raw_;
|
||||||
|
auto hf = hintfmt(args...);
|
||||||
|
err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
|
/* Plain opaque path to some store object.
|
||||||
|
|
||||||
|
Encoded as just the path: ‘<path>’.
|
||||||
|
*/
|
||||||
|
struct NixStringContextElem_Opaque {
|
||||||
|
StorePath path;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Path to a derivation and its entire build closure.
|
||||||
|
|
||||||
|
The path doesn't just refer to derivation itself and its closure, but
|
||||||
|
also all outputs of all derivations in that closure (including the
|
||||||
|
root derivation).
|
||||||
|
|
||||||
|
Encoded in the form ‘=<drvPath>’.
|
||||||
|
*/
|
||||||
|
struct NixStringContextElem_DrvDeep {
|
||||||
|
StorePath drvPath;
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Derivation output.
|
||||||
|
|
||||||
|
Encoded in the form ‘!<output>!<drvPath>’.
|
||||||
|
*/
|
||||||
|
struct NixStringContextElem_Built {
|
||||||
|
StorePath drvPath;
|
||||||
|
std::string output;
|
||||||
|
};
|
||||||
|
|
||||||
|
using _NixStringContextElem_Raw = std::variant<
|
||||||
|
NixStringContextElem_Opaque,
|
||||||
|
NixStringContextElem_DrvDeep,
|
||||||
|
NixStringContextElem_Built
|
||||||
|
>;
|
||||||
|
|
||||||
|
struct NixStringContextElem : _NixStringContextElem_Raw {
|
||||||
|
using Raw = _NixStringContextElem_Raw;
|
||||||
|
using Raw::Raw;
|
||||||
|
|
||||||
|
using Opaque = NixStringContextElem_Opaque;
|
||||||
|
using DrvDeep = NixStringContextElem_DrvDeep;
|
||||||
|
using Built = NixStringContextElem_Built;
|
||||||
|
|
||||||
|
inline const Raw & raw() const {
|
||||||
|
return static_cast<const Raw &>(*this);
|
||||||
|
}
|
||||||
|
inline Raw & raw() {
|
||||||
|
return static_cast<Raw &>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Decode a context string, one of:
|
||||||
|
- ‘<path>’
|
||||||
|
- ‘=<path>’
|
||||||
|
- ‘!<name>!<path>’
|
||||||
|
*/
|
||||||
|
static NixStringContextElem parse(const Store & store, std::string_view s);
|
||||||
|
std::string to_string(const Store & store) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::vector<NixStringContextElem> NixStringContext;
|
||||||
|
|
||||||
|
}
|
|
@ -363,6 +363,7 @@ void printVersion(const std::string & programName)
|
||||||
<< "\n";
|
<< "\n";
|
||||||
std::cout << "Store directory: " << settings.nixStore << "\n";
|
std::cout << "Store directory: " << settings.nixStore << "\n";
|
||||||
std::cout << "State directory: " << settings.nixStateDir << "\n";
|
std::cout << "State directory: " << settings.nixStateDir << "\n";
|
||||||
|
std::cout << "Data directory: " << settings.nixDataDir << "\n";
|
||||||
}
|
}
|
||||||
throw Exit();
|
throw Exit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,6 @@ void printVersion(const std::string & programName);
|
||||||
void printGCWarning();
|
void printGCWarning();
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
struct StorePathWithOutputs;
|
|
||||||
|
|
||||||
void printMissing(
|
void printMissing(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
|
|
|
@ -502,22 +502,9 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
|
||||||
writeNarInfo(narInfo);
|
writeNarInfo(narInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
std::optional<std::string> BinaryCacheStore::getBuildLogExact(const StorePath & path)
|
||||||
{
|
{
|
||||||
auto drvPath = path;
|
auto logPath = "log/" + std::string(baseNameOf(printStorePath(path)));
|
||||||
|
|
||||||
if (!path.isDerivation()) {
|
|
||||||
try {
|
|
||||||
auto info = queryPathInfo(path);
|
|
||||||
// FIXME: add a "Log" field to .narinfo
|
|
||||||
if (!info->deriver) return std::nullopt;
|
|
||||||
drvPath = *info->deriver;
|
|
||||||
} catch (InvalidPath &) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
auto logPath = "log/" + std::string(baseNameOf(printStorePath(drvPath)));
|
|
||||||
|
|
||||||
debug("fetching build log from binary cache '%s/%s'", getUri(), logPath);
|
debug("fetching build log from binary cache '%s/%s'", getUri(), logPath);
|
||||||
|
|
||||||
|
|
|
@ -129,7 +129,7 @@ public:
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
std::optional<std::string> getBuildLogExact(const StorePath & path) override;
|
||||||
|
|
||||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||||
, useDerivation(true)
|
, useDerivation(true)
|
||||||
, drvPath(drvPath)
|
, drvPath(drvPath)
|
||||||
|
@ -82,7 +82,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||||
|
|
||||||
|
|
||||||
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||||
, useDerivation(false)
|
, useDerivation(false)
|
||||||
, drvPath(drvPath)
|
, drvPath(drvPath)
|
||||||
|
@ -142,18 +142,12 @@ void DerivationGoal::work()
|
||||||
(this->*state)();
|
(this->*state)();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||||
{
|
{
|
||||||
/* If we already want all outputs, there is nothing to do. */
|
auto newWanted = wantedOutputs.union_(outputs);
|
||||||
if (wantedOutputs.empty()) return;
|
if (!newWanted.isSubsetOf(wantedOutputs))
|
||||||
|
|
||||||
if (outputs.empty()) {
|
|
||||||
wantedOutputs.clear();
|
|
||||||
needRestart = true;
|
needRestart = true;
|
||||||
} else
|
wantedOutputs = newWanted;
|
||||||
for (auto & i : outputs)
|
|
||||||
if (wantedOutputs.insert(i).second)
|
|
||||||
needRestart = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -390,7 +384,7 @@ void DerivationGoal::repairClosure()
|
||||||
auto outputs = queryDerivationOutputMap();
|
auto outputs = queryDerivationOutputMap();
|
||||||
StorePathSet outputClosure;
|
StorePathSet outputClosure;
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
if (!wantedOutputs.contains(i.first)) continue;
|
||||||
worker.store.computeFSClosure(i.second, outputClosure);
|
worker.store.computeFSClosure(i.second, outputClosure);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -422,7 +416,7 @@ void DerivationGoal::repairClosure()
|
||||||
if (drvPath2 == outputsToDrv.end())
|
if (drvPath2 == outputsToDrv.end())
|
||||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
|
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
|
||||||
else
|
else
|
||||||
addWaitee(worker.makeDerivationGoal(drvPath2->second, StringSet(), bmRepair));
|
addWaitee(worker.makeDerivationGoal(drvPath2->second, OutputsSpec::All(), bmRepair));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (waitees.empty()) {
|
if (waitees.empty()) {
|
||||||
|
@ -544,7 +538,8 @@ void DerivationGoal::inputsRealised()
|
||||||
However, the impure derivations feature still relies on this
|
However, the impure derivations feature still relies on this
|
||||||
fragile way of doing things, because its builds do not have
|
fragile way of doing things, because its builds do not have
|
||||||
a representation in the store, which is a usability problem
|
a representation in the store, which is a usability problem
|
||||||
in itself */
|
in itself. When implementing this logic entirely with lookups
|
||||||
|
make sure that they're cached. */
|
||||||
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) {
|
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) {
|
||||||
worker.store.computeFSClosure(*outPath, inputPaths);
|
worker.store.computeFSClosure(*outPath, inputPaths);
|
||||||
}
|
}
|
||||||
|
@ -990,10 +985,15 @@ void DerivationGoal::resolvedFinished()
|
||||||
|
|
||||||
StorePathSet outputPaths;
|
StorePathSet outputPaths;
|
||||||
|
|
||||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
// `wantedOutputs` might merely indicate “all the outputs”
|
||||||
auto realWantedOutputs = wantedOutputs;
|
auto realWantedOutputs = std::visit(overloaded {
|
||||||
if (realWantedOutputs.empty())
|
[&](const OutputsSpec::All &) {
|
||||||
realWantedOutputs = resolvedDrv.outputNames();
|
return resolvedDrv.outputNames();
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & names) {
|
||||||
|
return static_cast<std::set<std::string>>(names);
|
||||||
|
},
|
||||||
|
}, wantedOutputs.raw());
|
||||||
|
|
||||||
for (auto & wantedOutput : realWantedOutputs) {
|
for (auto & wantedOutput : realWantedOutputs) {
|
||||||
auto initialOutput = get(initialOutputs, wantedOutput);
|
auto initialOutput = get(initialOutputs, wantedOutput);
|
||||||
|
@ -1321,7 +1321,14 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
if (!drv->type().isPure()) return { false, {} };
|
if (!drv->type().isPure()) return { false, {} };
|
||||||
|
|
||||||
bool checkHash = buildMode == bmRepair;
|
bool checkHash = buildMode == bmRepair;
|
||||||
auto wantedOutputsLeft = wantedOutputs;
|
auto wantedOutputsLeft = std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
return StringSet {};
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & names) {
|
||||||
|
return static_cast<StringSet>(names);
|
||||||
|
},
|
||||||
|
}, wantedOutputs.raw());
|
||||||
DrvOutputs validOutputs;
|
DrvOutputs validOutputs;
|
||||||
|
|
||||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||||
|
@ -1330,7 +1337,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
// this is an invalid output, gets catched with (!wantedOutputsLeft.empty())
|
// this is an invalid output, gets catched with (!wantedOutputsLeft.empty())
|
||||||
continue;
|
continue;
|
||||||
auto & info = *initialOutput;
|
auto & info = *initialOutput;
|
||||||
info.wanted = wantOutput(i.first, wantedOutputs);
|
info.wanted = wantedOutputs.contains(i.first);
|
||||||
if (info.wanted)
|
if (info.wanted)
|
||||||
wantedOutputsLeft.erase(i.first);
|
wantedOutputsLeft.erase(i.first);
|
||||||
if (i.second) {
|
if (i.second) {
|
||||||
|
@ -1368,7 +1375,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||||
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we requested all the outputs via the empty set, we are always fine.
|
// If we requested all the outputs, we are always fine.
|
||||||
// If we requested specific elements, the loop above removes all the valid
|
// If we requested specific elements, the loop above removes all the valid
|
||||||
// ones, so any that are left must be invalid.
|
// ones, so any that are left must be invalid.
|
||||||
if (!wantedOutputsLeft.empty())
|
if (!wantedOutputsLeft.empty())
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
#include "parsed-derivations.hh"
|
#include "parsed-derivations.hh"
|
||||||
#include "lock.hh"
|
#include "lock.hh"
|
||||||
|
#include "outputs-spec.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "pathlocks.hh"
|
#include "pathlocks.hh"
|
||||||
#include "goal.hh"
|
#include "goal.hh"
|
||||||
|
@ -55,7 +56,7 @@ struct DerivationGoal : public Goal
|
||||||
|
|
||||||
/* The specific outputs that we need to build. Empty means all of
|
/* The specific outputs that we need to build. Empty means all of
|
||||||
them. */
|
them. */
|
||||||
StringSet wantedOutputs;
|
OutputsSpec wantedOutputs;
|
||||||
|
|
||||||
/* Mapping from input derivations + output names to actual store
|
/* Mapping from input derivations + output names to actual store
|
||||||
paths. This is filled in by waiteeDone() as each dependency
|
paths. This is filled in by waiteeDone() as each dependency
|
||||||
|
@ -128,10 +129,10 @@ struct DerivationGoal : public Goal
|
||||||
std::string machineName;
|
std::string machineName;
|
||||||
|
|
||||||
DerivationGoal(const StorePath & drvPath,
|
DerivationGoal(const StorePath & drvPath,
|
||||||
const StringSet & wantedOutputs, Worker & worker,
|
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||||
BuildMode buildMode = bmNormal);
|
BuildMode buildMode = bmNormal);
|
||||||
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
const StringSet & wantedOutputs, Worker & worker,
|
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||||
BuildMode buildMode = bmNormal);
|
BuildMode buildMode = bmNormal);
|
||||||
virtual ~DerivationGoal();
|
virtual ~DerivationGoal();
|
||||||
|
|
||||||
|
@ -142,7 +143,7 @@ struct DerivationGoal : public Goal
|
||||||
void work() override;
|
void work() override;
|
||||||
|
|
||||||
/* Add wanted outputs to an already existing derivation goal. */
|
/* Add wanted outputs to an already existing derivation goal. */
|
||||||
void addWantedOutputs(const StringSet & outputs);
|
void addWantedOutputs(const OutputsSpec & outputs);
|
||||||
|
|
||||||
/* The states. */
|
/* The states. */
|
||||||
void getDerivation();
|
void getDerivation();
|
||||||
|
|
|
@ -80,7 +80,7 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
||||||
BuildMode buildMode)
|
BuildMode buildMode)
|
||||||
{
|
{
|
||||||
Worker worker(*this, *this);
|
Worker worker(*this, *this);
|
||||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
|
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All {}, buildMode);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
worker.run(Goals{goal});
|
worker.run(Goals{goal});
|
||||||
|
@ -89,7 +89,10 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
||||||
return BuildResult {
|
return BuildResult {
|
||||||
.status = BuildResult::MiscFailure,
|
.status = BuildResult::MiscFailure,
|
||||||
.errorMsg = e.msg(),
|
.errorMsg = e.msg(),
|
||||||
.path = DerivedPath::Built { .drvPath = drvPath },
|
.path = DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::All { },
|
||||||
|
},
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -130,7 +133,8 @@ void LocalStore::repairPath(const StorePath & path)
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
if (info->deriver && isValidPath(*info->deriver)) {
|
if (info->deriver && isValidPath(*info->deriver)) {
|
||||||
goals.clear();
|
goals.clear();
|
||||||
goals.insert(worker.makeDerivationGoal(*info->deriver, StringSet(), bmRepair));
|
// FIXME: Should just build the specific output we need.
|
||||||
|
goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair));
|
||||||
worker.run(goals);
|
worker.run(goals);
|
||||||
} else
|
} else
|
||||||
throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path));
|
throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path));
|
||||||
|
|
|
@ -1459,7 +1459,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
||||||
unknown, downloadSize, narSize);
|
unknown, downloadSize, narSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual std::optional<std::string> getBuildLog(const StorePath & path) override
|
virtual std::optional<std::string> getBuildLogExact(const StorePath & path) override
|
||||||
{ return std::nullopt; }
|
{ return std::nullopt; }
|
||||||
|
|
||||||
virtual void addBuildLog(const StorePath & path, std::string_view log) override
|
virtual void addBuildLog(const StorePath & path, std::string_view log) override
|
||||||
|
@ -2050,10 +2050,14 @@ void LocalDerivationGoal::runChild()
|
||||||
sandboxProfile += "(deny default (with no-log))\n";
|
sandboxProfile += "(deny default (with no-log))\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
sandboxProfile += "(import \"sandbox-defaults.sb\")\n";
|
sandboxProfile +=
|
||||||
|
#include "sandbox-defaults.sb"
|
||||||
|
;
|
||||||
|
|
||||||
if (!derivationType.isSandboxed())
|
if (!derivationType.isSandboxed())
|
||||||
sandboxProfile += "(import \"sandbox-network.sb\")\n";
|
sandboxProfile +=
|
||||||
|
#include "sandbox-network.sb"
|
||||||
|
;
|
||||||
|
|
||||||
/* Add the output paths we'll use at build-time to the chroot */
|
/* Add the output paths we'll use at build-time to the chroot */
|
||||||
sandboxProfile += "(allow file-read* file-write* process-exec\n";
|
sandboxProfile += "(allow file-read* file-write* process-exec\n";
|
||||||
|
@ -2096,7 +2100,9 @@ void LocalDerivationGoal::runChild()
|
||||||
|
|
||||||
sandboxProfile += additionalSandboxProfile;
|
sandboxProfile += additionalSandboxProfile;
|
||||||
} else
|
} else
|
||||||
sandboxProfile += "(import \"sandbox-minimal.sb\")\n";
|
sandboxProfile +=
|
||||||
|
#include "sandbox-minimal.sb"
|
||||||
|
;
|
||||||
|
|
||||||
debug("Generated sandbox profile:");
|
debug("Generated sandbox profile:");
|
||||||
debug(sandboxProfile);
|
debug(sandboxProfile);
|
||||||
|
@ -2121,8 +2127,6 @@ void LocalDerivationGoal::runChild()
|
||||||
args.push_back(sandboxFile);
|
args.push_back(sandboxFile);
|
||||||
args.push_back("-D");
|
args.push_back("-D");
|
||||||
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
|
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
|
||||||
args.push_back("-D");
|
|
||||||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
|
||||||
if (allowLocalNetworking) {
|
if (allowLocalNetworking) {
|
||||||
args.push_back("-D");
|
args.push_back("-D");
|
||||||
args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1"));
|
args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||||
|
@ -2748,7 +2752,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||||
signRealisation(thisRealisation);
|
signRealisation(thisRealisation);
|
||||||
worker.store.registerDrvOutput(thisRealisation);
|
worker.store.registerDrvOutput(thisRealisation);
|
||||||
}
|
}
|
||||||
if (wantOutput(outputName, wantedOutputs))
|
if (wantedOutputs.contains(outputName))
|
||||||
builtOutputs.emplace(thisRealisation.id, thisRealisation);
|
builtOutputs.emplace(thisRealisation.id, thisRealisation);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
(define TMPDIR (param "_GLOBAL_TMP_DIR"))
|
(define TMPDIR (param "_GLOBAL_TMP_DIR"))
|
||||||
|
|
||||||
(deny default)
|
(deny default)
|
||||||
|
@ -104,3 +106,5 @@
|
||||||
(subpath "/System/Library/Apple/usr/libexec/oah")
|
(subpath "/System/Library/Apple/usr/libexec/oah")
|
||||||
(subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
|
(subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
|
||||||
(subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))
|
(subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))
|
||||||
|
|
||||||
|
)""
|
|
@ -1,5 +1,9 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
(allow default)
|
(allow default)
|
||||||
|
|
||||||
; Disallow creating setuid/setgid binaries, since that
|
; Disallow creating setuid/setgid binaries, since that
|
||||||
; would allow breaking build user isolation.
|
; would allow breaking build user isolation.
|
||||||
(deny file-write-setugid)
|
(deny file-write-setugid)
|
||||||
|
|
||||||
|
)""
|
|
@ -1,3 +1,5 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
; Allow local and remote network traffic.
|
; Allow local and remote network traffic.
|
||||||
(allow network* (local ip) (remote ip))
|
(allow network* (local ip) (remote ip))
|
||||||
|
|
||||||
|
@ -18,3 +20,5 @@
|
||||||
; Allow access to trustd.
|
; Allow access to trustd.
|
||||||
(allow mach-lookup (global-name "com.apple.trustd"))
|
(allow mach-lookup (global-name "com.apple.trustd"))
|
||||||
(allow mach-lookup (global-name "com.apple.trustd.agent"))
|
(allow mach-lookup (global-name "com.apple.trustd.agent"))
|
||||||
|
|
||||||
|
)""
|
|
@ -42,7 +42,7 @@ Worker::~Worker()
|
||||||
|
|
||||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||||
const StorePath & drvPath,
|
const StorePath & drvPath,
|
||||||
const StringSet & wantedOutputs,
|
const OutputsSpec & wantedOutputs,
|
||||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
|
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
|
||||||
{
|
{
|
||||||
std::weak_ptr<DerivationGoal> & goal_weak = derivationGoals[drvPath];
|
std::weak_ptr<DerivationGoal> & goal_weak = derivationGoals[drvPath];
|
||||||
|
@ -59,7 +59,7 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||||
|
|
||||||
|
|
||||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
|
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
|
||||||
const StringSet & wantedOutputs, BuildMode buildMode)
|
const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||||
{
|
{
|
||||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||||
return !dynamic_cast<LocalStore *>(&store)
|
return !dynamic_cast<LocalStore *>(&store)
|
||||||
|
@ -70,7 +70,7 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drv
|
||||||
|
|
||||||
|
|
||||||
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
||||||
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
|
const BasicDerivation & drv, const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||||
{
|
{
|
||||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||||
return !dynamic_cast<LocalStore *>(&store)
|
return !dynamic_cast<LocalStore *>(&store)
|
||||||
|
|
|
@ -140,15 +140,15 @@ public:
|
||||||
/* derivation goal */
|
/* derivation goal */
|
||||||
private:
|
private:
|
||||||
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
||||||
const StorePath & drvPath, const StringSet & wantedOutputs,
|
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
|
||||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
||||||
public:
|
public:
|
||||||
std::shared_ptr<DerivationGoal> makeDerivationGoal(
|
std::shared_ptr<DerivationGoal> makeDerivationGoal(
|
||||||
const StorePath & drvPath,
|
const StorePath & drvPath,
|
||||||
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||||
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
|
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
|
||||||
const StorePath & drvPath, const BasicDerivation & drv,
|
const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||||
|
|
||||||
/* substitution goal */
|
/* substitution goal */
|
||||||
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||||
|
|
|
@ -222,7 +222,8 @@ struct ClientSettings
|
||||||
else if (!hasSuffix(s, "/") && trusted.count(s + "/"))
|
else if (!hasSuffix(s, "/") && trusted.count(s + "/"))
|
||||||
subs.push_back(s + "/");
|
subs.push_back(s + "/");
|
||||||
else
|
else
|
||||||
warn("ignoring untrusted substituter '%s'", s);
|
warn("ignoring untrusted substituter '%s', you are not a trusted user.\n"
|
||||||
|
"Run `man nix.conf` for more information on the `substituters` configuration option.", s);
|
||||||
res = subs;
|
res = subs;
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
|
@ -688,12 +688,6 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted)
|
|
||||||
{
|
|
||||||
return wanted.empty() || wanted.find(output) != wanted.end();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||||
{
|
{
|
||||||
const auto pathS = readString(in);
|
const auto pathS = readString(in);
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
/* Abstract syntax of derivations. */
|
/* Abstract syntax of derivations. */
|
||||||
|
|
||||||
|
@ -294,8 +295,6 @@ typedef std::map<StorePath, DrvHash> DrvHashes;
|
||||||
// FIXME: global, though at least thread-safe.
|
// FIXME: global, though at least thread-safe.
|
||||||
extern Sync<DrvHashes> drvHashes;
|
extern Sync<DrvHashes> drvHashes;
|
||||||
|
|
||||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted);
|
|
||||||
|
|
||||||
struct Source;
|
struct Source;
|
||||||
struct Sink;
|
struct Sink;
|
||||||
|
|
||||||
|
|
|
@ -19,11 +19,11 @@ nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const {
|
||||||
res["drvPath"] = store->printStorePath(drvPath);
|
res["drvPath"] = store->printStorePath(drvPath);
|
||||||
// Fallback for the input-addressed derivation case: We expect to always be
|
// Fallback for the input-addressed derivation case: We expect to always be
|
||||||
// able to print the output paths, so let’s do it
|
// able to print the output paths, so let’s do it
|
||||||
const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
|
const auto outputMap = store->queryPartialDerivationOutputMap(drvPath);
|
||||||
for (const auto & output : outputs) {
|
for (const auto & [output, outputPathOpt] : outputMap) {
|
||||||
auto knownOutput = get(knownOutputs, output);
|
if (!outputs.contains(output)) continue;
|
||||||
if (knownOutput && *knownOutput)
|
if (outputPathOpt)
|
||||||
res["outputs"][output] = store->printStorePath(**knownOutput);
|
res["outputs"][output] = store->printStorePath(*outputPathOpt);
|
||||||
else
|
else
|
||||||
res["outputs"][output] = nullptr;
|
res["outputs"][output] = nullptr;
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,7 @@ std::string DerivedPath::Built::to_string(const Store & store) const
|
||||||
{
|
{
|
||||||
return store.printStorePath(drvPath)
|
return store.printStorePath(drvPath)
|
||||||
+ "!"
|
+ "!"
|
||||||
+ (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs));
|
+ outputs.to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string DerivedPath::to_string(const Store & store) const
|
std::string DerivedPath::to_string(const Store & store) const
|
||||||
|
@ -81,15 +81,10 @@ DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_
|
||||||
|
|
||||||
DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view drvS, std::string_view outputsS)
|
DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view drvS, std::string_view outputsS)
|
||||||
{
|
{
|
||||||
auto drvPath = store.parseStorePath(drvS);
|
return {
|
||||||
std::set<std::string> outputs;
|
.drvPath = store.parseStorePath(drvS),
|
||||||
if (outputsS != "*") {
|
.outputs = OutputsSpec::parse(outputsS),
|
||||||
outputs = tokenizeString<std::set<std::string>>(outputsS, ",");
|
};
|
||||||
if (outputs.empty())
|
|
||||||
throw Error(
|
|
||||||
"Explicit list of wanted outputs '%s' must not be empty. Consider using '*' as a wildcard meaning all outputs if no output in particular is wanted.", outputsS);
|
|
||||||
}
|
|
||||||
return {drvPath, outputs};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "realisation.hh"
|
#include "realisation.hh"
|
||||||
|
#include "outputs-spec.hh"
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ struct DerivedPathOpaque {
|
||||||
*/
|
*/
|
||||||
struct DerivedPathBuilt {
|
struct DerivedPathBuilt {
|
||||||
StorePath drvPath;
|
StorePath drvPath;
|
||||||
std::set<std::string> outputs;
|
OutputsSpec outputs;
|
||||||
|
|
||||||
std::string to_string(const Store & store) const;
|
std::string to_string(const Store & store) const;
|
||||||
static DerivedPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
static DerivedPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
||||||
|
|
|
@ -77,60 +77,73 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::addTempRoot(const StorePath & path)
|
void LocalStore::createTempRootsFile()
|
||||||
{
|
{
|
||||||
auto state(_state.lock());
|
auto fdTempRoots(_fdTempRoots.lock());
|
||||||
|
|
||||||
/* Create the temporary roots file for this process. */
|
/* Create the temporary roots file for this process. */
|
||||||
if (!state->fdTempRoots) {
|
if (*fdTempRoots) return;
|
||||||
|
|
||||||
while (1) {
|
while (1) {
|
||||||
if (pathExists(fnTempRoots))
|
if (pathExists(fnTempRoots))
|
||||||
/* It *must* be stale, since there can be no two
|
/* It *must* be stale, since there can be no two
|
||||||
processes with the same pid. */
|
processes with the same pid. */
|
||||||
unlink(fnTempRoots.c_str());
|
unlink(fnTempRoots.c_str());
|
||||||
|
|
||||||
state->fdTempRoots = openLockFile(fnTempRoots, true);
|
*fdTempRoots = openLockFile(fnTempRoots, true);
|
||||||
|
|
||||||
debug("acquiring write lock on '%s'", fnTempRoots);
|
debug("acquiring write lock on '%s'", fnTempRoots);
|
||||||
lockFile(state->fdTempRoots.get(), ltWrite, true);
|
lockFile(fdTempRoots->get(), ltWrite, true);
|
||||||
|
|
||||||
/* Check whether the garbage collector didn't get in our
|
/* Check whether the garbage collector didn't get in our
|
||||||
way. */
|
way. */
|
||||||
struct stat st;
|
struct stat st;
|
||||||
if (fstat(state->fdTempRoots.get(), &st) == -1)
|
if (fstat(fdTempRoots->get(), &st) == -1)
|
||||||
throw SysError("statting '%1%'", fnTempRoots);
|
throw SysError("statting '%1%'", fnTempRoots);
|
||||||
if (st.st_size == 0) break;
|
if (st.st_size == 0) break;
|
||||||
|
|
||||||
/* The garbage collector deleted this file before we could
|
/* The garbage collector deleted this file before we could get
|
||||||
get a lock. (It won't delete the file after we get a
|
a lock. (It won't delete the file after we get a lock.)
|
||||||
lock.) Try again. */
|
Try again. */
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void LocalStore::addTempRoot(const StorePath & path)
|
||||||
|
{
|
||||||
|
createTempRootsFile();
|
||||||
|
|
||||||
|
/* Open/create the global GC lock file. */
|
||||||
|
{
|
||||||
|
auto fdGCLock(_fdGCLock.lock());
|
||||||
|
if (!*fdGCLock)
|
||||||
|
*fdGCLock = openGCLock();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!state->fdGCLock)
|
|
||||||
state->fdGCLock = openGCLock();
|
|
||||||
|
|
||||||
restart:
|
restart:
|
||||||
FdLock gcLock(state->fdGCLock.get(), ltRead, false, "");
|
/* Try to acquire a shared global GC lock (non-blocking). This
|
||||||
|
only succeeds if the garbage collector is not currently
|
||||||
|
running. */
|
||||||
|
FdLock gcLock(_fdGCLock.lock()->get(), ltRead, false, "");
|
||||||
|
|
||||||
if (!gcLock.acquired) {
|
if (!gcLock.acquired) {
|
||||||
/* We couldn't get a shared global GC lock, so the garbage
|
/* We couldn't get a shared global GC lock, so the garbage
|
||||||
collector is running. So we have to connect to the garbage
|
collector is running. So we have to connect to the garbage
|
||||||
collector and inform it about our root. */
|
collector and inform it about our root. */
|
||||||
if (!state->fdRootsSocket) {
|
auto fdRootsSocket(_fdRootsSocket.lock());
|
||||||
|
|
||||||
|
if (!*fdRootsSocket) {
|
||||||
auto socketPath = stateDir.get() + gcSocketPath;
|
auto socketPath = stateDir.get() + gcSocketPath;
|
||||||
debug("connecting to '%s'", socketPath);
|
debug("connecting to '%s'", socketPath);
|
||||||
state->fdRootsSocket = createUnixDomainSocket();
|
*fdRootsSocket = createUnixDomainSocket();
|
||||||
try {
|
try {
|
||||||
nix::connect(state->fdRootsSocket.get(), socketPath);
|
nix::connect(fdRootsSocket->get(), socketPath);
|
||||||
} catch (SysError & e) {
|
} catch (SysError & e) {
|
||||||
/* The garbage collector may have exited, so we need to
|
/* The garbage collector may have exited, so we need to
|
||||||
restart. */
|
restart. */
|
||||||
if (e.errNo == ECONNREFUSED) {
|
if (e.errNo == ECONNREFUSED) {
|
||||||
debug("GC socket connection refused");
|
debug("GC socket connection refused");
|
||||||
state->fdRootsSocket.close();
|
fdRootsSocket->close();
|
||||||
goto restart;
|
goto restart;
|
||||||
}
|
}
|
||||||
throw;
|
throw;
|
||||||
|
@ -139,9 +152,9 @@ void LocalStore::addTempRoot(const StorePath & path)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
debug("sending GC root '%s'", printStorePath(path));
|
debug("sending GC root '%s'", printStorePath(path));
|
||||||
writeFull(state->fdRootsSocket.get(), printStorePath(path) + "\n", false);
|
writeFull(fdRootsSocket->get(), printStorePath(path) + "\n", false);
|
||||||
char c;
|
char c;
|
||||||
readFull(state->fdRootsSocket.get(), &c, 1);
|
readFull(fdRootsSocket->get(), &c, 1);
|
||||||
assert(c == '1');
|
assert(c == '1');
|
||||||
debug("got ack for GC root '%s'", printStorePath(path));
|
debug("got ack for GC root '%s'", printStorePath(path));
|
||||||
} catch (SysError & e) {
|
} catch (SysError & e) {
|
||||||
|
@ -149,20 +162,21 @@ void LocalStore::addTempRoot(const StorePath & path)
|
||||||
restart. */
|
restart. */
|
||||||
if (e.errNo == EPIPE || e.errNo == ECONNRESET) {
|
if (e.errNo == EPIPE || e.errNo == ECONNRESET) {
|
||||||
debug("GC socket disconnected");
|
debug("GC socket disconnected");
|
||||||
state->fdRootsSocket.close();
|
fdRootsSocket->close();
|
||||||
goto restart;
|
goto restart;
|
||||||
}
|
}
|
||||||
throw;
|
throw;
|
||||||
} catch (EndOfFile & e) {
|
} catch (EndOfFile & e) {
|
||||||
debug("GC socket disconnected");
|
debug("GC socket disconnected");
|
||||||
state->fdRootsSocket.close();
|
fdRootsSocket->close();
|
||||||
goto restart;
|
goto restart;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Append the store path to the temporary roots file. */
|
/* Record the store path in the temporary roots file so it will be
|
||||||
|
seen by a future run of the garbage collector. */
|
||||||
auto s = printStorePath(path) + '\0';
|
auto s = printStorePath(path) + '\0';
|
||||||
writeFull(state->fdTempRoots.get(), s);
|
writeFull(_fdTempRoots.lock()->get(), s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -570,11 +570,15 @@ public:
|
||||||
{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
|
{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
|
||||||
"trusted-public-keys",
|
"trusted-public-keys",
|
||||||
R"(
|
R"(
|
||||||
A whitespace-separated list of public keys. When paths are copied
|
A whitespace-separated list of public keys.
|
||||||
from another Nix store (such as a binary cache), they must be
|
|
||||||
signed with one of these keys. For example:
|
At least one of the following condition must be met
|
||||||
`cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
|
for Nix to accept copying a store object from another
|
||||||
hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=`.
|
Nix store (such as a substituter):
|
||||||
|
|
||||||
|
- the store object has been signed using a key in the trusted keys list
|
||||||
|
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||||
|
- the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object)
|
||||||
)",
|
)",
|
||||||
{"binary-cache-public-keys"}};
|
{"binary-cache-public-keys"}};
|
||||||
|
|
||||||
|
@ -670,13 +674,14 @@ public:
|
||||||
independently. Lower value means higher priority.
|
independently. Lower value means higher priority.
|
||||||
The default is `https://cache.nixos.org`, with a Priority of 40.
|
The default is `https://cache.nixos.org`, with a Priority of 40.
|
||||||
|
|
||||||
Nix will copy a store path from a remote store only if one
|
At least one of the following conditions must be met for Nix to use
|
||||||
of the following is true:
|
a substituter:
|
||||||
|
|
||||||
- the store object is signed by one of the [`trusted-public-keys`](#conf-trusted-public-keys)
|
|
||||||
- the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
- the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
||||||
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
- the user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
|
||||||
- the store object is [output-addressed](glossary.md#gloss-output-addressed-store-object)
|
|
||||||
|
In addition, each store path should be trusted as described
|
||||||
|
in [`trusted-public-keys`](#conf-trusted-public-keys)
|
||||||
)",
|
)",
|
||||||
{"binary-caches"}};
|
{"binary-caches"}};
|
||||||
|
|
||||||
|
|
|
@ -279,7 +279,12 @@ public:
|
||||||
|
|
||||||
conn->to.flush();
|
conn->to.flush();
|
||||||
|
|
||||||
BuildResult status { .path = DerivedPath::Built { .drvPath = drvPath } };
|
BuildResult status {
|
||||||
|
.path = DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::All { },
|
||||||
|
},
|
||||||
|
};
|
||||||
status.status = (BuildResult::Status) readInt(conn->from);
|
status.status = (BuildResult::Status) readInt(conn->from);
|
||||||
conn->from >> status.errorMsg;
|
conn->from >> status.errorMsg;
|
||||||
|
|
||||||
|
|
|
@ -87,20 +87,8 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
|
||||||
|
|
||||||
const std::string LocalFSStore::drvsLogDir = "drvs";
|
const std::string LocalFSStore::drvsLogDir = "drvs";
|
||||||
|
|
||||||
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
std::optional<std::string> LocalFSStore::getBuildLogExact(const StorePath & path)
|
||||||
{
|
{
|
||||||
auto path = path_;
|
|
||||||
|
|
||||||
if (!path.isDerivation()) {
|
|
||||||
try {
|
|
||||||
auto info = queryPathInfo(path);
|
|
||||||
if (!info->deriver) return std::nullopt;
|
|
||||||
path = *info->deriver;
|
|
||||||
} catch (InvalidPath &) {
|
|
||||||
return std::nullopt;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
auto baseName = path.to_string();
|
auto baseName = path.to_string();
|
||||||
|
|
||||||
for (int j = 0; j < 2; j++) {
|
for (int j = 0; j < 2; j++) {
|
||||||
|
|
|
@ -50,7 +50,7 @@ public:
|
||||||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
std::optional<std::string> getBuildLogExact(const StorePath & path) override;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -441,9 +441,9 @@ LocalStore::~LocalStore()
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto state(_state.lock());
|
auto fdTempRoots(_fdTempRoots.lock());
|
||||||
if (state->fdTempRoots) {
|
if (*fdTempRoots) {
|
||||||
state->fdTempRoots = -1;
|
*fdTempRoots = -1;
|
||||||
unlink(fnTempRoots.c_str());
|
unlink(fnTempRoots.c_str());
|
||||||
}
|
}
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
|
|
@ -59,15 +59,6 @@ private:
|
||||||
struct Stmts;
|
struct Stmts;
|
||||||
std::unique_ptr<Stmts> stmts;
|
std::unique_ptr<Stmts> stmts;
|
||||||
|
|
||||||
/* The global GC lock */
|
|
||||||
AutoCloseFD fdGCLock;
|
|
||||||
|
|
||||||
/* The file to which we write our temporary roots. */
|
|
||||||
AutoCloseFD fdTempRoots;
|
|
||||||
|
|
||||||
/* Connection to the garbage collector. */
|
|
||||||
AutoCloseFD fdRootsSocket;
|
|
||||||
|
|
||||||
/* The last time we checked whether to do an auto-GC, or an
|
/* The last time we checked whether to do an auto-GC, or an
|
||||||
auto-GC finished. */
|
auto-GC finished. */
|
||||||
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
|
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
|
||||||
|
@ -156,6 +147,21 @@ public:
|
||||||
|
|
||||||
void addTempRoot(const StorePath & path) override;
|
void addTempRoot(const StorePath & path) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
void createTempRootsFile();
|
||||||
|
|
||||||
|
/* The file to which we write our temporary roots. */
|
||||||
|
Sync<AutoCloseFD> _fdTempRoots;
|
||||||
|
|
||||||
|
/* The global GC lock. */
|
||||||
|
Sync<AutoCloseFD> _fdGCLock;
|
||||||
|
|
||||||
|
/* Connection to the garbage collector. */
|
||||||
|
Sync<AutoCloseFD> _fdRootsSocket;
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
void addIndirectRoot(const Path & path) override;
|
void addIndirectRoot(const Path & path) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -13,10 +13,6 @@ ifdef HOST_LINUX
|
||||||
libstore_LDFLAGS += -ldl
|
libstore_LDFLAGS += -ldl
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||||
|
|
||||||
ifeq ($(ENABLE_S3), 1)
|
ifeq ($(ENABLE_S3), 1)
|
||||||
|
|
|
@ -123,8 +123,12 @@ struct AutoUserLock : UserLock
|
||||||
|
|
||||||
std::vector<gid_t> getSupplementaryGIDs() override { return {}; }
|
std::vector<gid_t> getSupplementaryGIDs() override { return {}; }
|
||||||
|
|
||||||
static std::unique_ptr<UserLock> acquire(uid_t nrIds, bool useChroot)
|
static std::unique_ptr<UserLock> acquire(uid_t nrIds, bool useUserNamespace)
|
||||||
{
|
{
|
||||||
|
#if !defined(__linux__)
|
||||||
|
useUserNamespace = false;
|
||||||
|
#endif
|
||||||
|
|
||||||
settings.requireExperimentalFeature(Xp::AutoAllocateUids);
|
settings.requireExperimentalFeature(Xp::AutoAllocateUids);
|
||||||
assert(settings.startId > 0);
|
assert(settings.startId > 0);
|
||||||
assert(settings.uidCount % maxIdsPerBuild == 0);
|
assert(settings.uidCount % maxIdsPerBuild == 0);
|
||||||
|
@ -157,7 +161,7 @@ struct AutoUserLock : UserLock
|
||||||
auto lock = std::make_unique<AutoUserLock>();
|
auto lock = std::make_unique<AutoUserLock>();
|
||||||
lock->fdUserLock = std::move(fd);
|
lock->fdUserLock = std::move(fd);
|
||||||
lock->firstUid = firstUid;
|
lock->firstUid = firstUid;
|
||||||
if (useChroot)
|
if (useUserNamespace)
|
||||||
lock->firstGid = firstUid;
|
lock->firstGid = firstUid;
|
||||||
else {
|
else {
|
||||||
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
|
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
|
||||||
|
@ -174,10 +178,10 @@ struct AutoUserLock : UserLock
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useChroot)
|
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useUserNamespace)
|
||||||
{
|
{
|
||||||
if (settings.autoAllocateUids)
|
if (settings.autoAllocateUids)
|
||||||
return AutoUserLock::acquire(nrIds, useChroot);
|
return AutoUserLock::acquire(nrIds, useUserNamespace);
|
||||||
else
|
else
|
||||||
return SimpleUserLock::acquire();
|
return SimpleUserLock::acquire();
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ struct UserLock
|
||||||
|
|
||||||
/* Acquire a user lock for a UID range of size `nrIds`. Note that this
|
/* Acquire a user lock for a UID range of size `nrIds`. Note that this
|
||||||
may return nullptr if no user is available. */
|
may return nullptr if no user is available. */
|
||||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useChroot);
|
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useUserNamespace);
|
||||||
|
|
||||||
bool useBuildUsers();
|
bool useBuildUsers();
|
||||||
|
|
||||||
|
|
12
src/libstore/log-store.cc
Normal file
12
src/libstore/log-store.cc
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
#include "log-store.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
std::optional<std::string> LogStore::getBuildLog(const StorePath & path) {
|
||||||
|
auto maybePath = getBuildDerivationPath(path);
|
||||||
|
if (!maybePath)
|
||||||
|
return std::nullopt;
|
||||||
|
return getBuildLogExact(maybePath.value());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -11,7 +11,9 @@ struct LogStore : public virtual Store
|
||||||
|
|
||||||
/* Return the build log of the specified store path, if available,
|
/* Return the build log of the specified store path, if available,
|
||||||
or null otherwise. */
|
or null otherwise. */
|
||||||
virtual std::optional<std::string> getBuildLog(const StorePath & path) = 0;
|
std::optional<std::string> getBuildLog(const StorePath & path);
|
||||||
|
|
||||||
|
virtual std::optional<std::string> getBuildLogExact(const StorePath & path) = 0;
|
||||||
|
|
||||||
virtual void addBuildLog(const StorePath & path, std::string_view log) = 0;
|
virtual void addBuildLog(const StorePath & path, std::string_view log) = 0;
|
||||||
|
|
||||||
|
|
|
@ -185,7 +185,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
|
||||||
knownOutputPaths = false;
|
knownOutputPaths = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (wantOutput(outputName, bfd.outputs) && !isValidPath(*pathOpt))
|
if (bfd.outputs.contains(outputName) && !isValidPath(*pathOpt))
|
||||||
invalid.insert(*pathOpt);
|
invalid.insert(*pathOpt);
|
||||||
}
|
}
|
||||||
if (knownOutputPaths && invalid.empty()) return;
|
if (knownOutputPaths && invalid.empty()) return;
|
||||||
|
@ -301,4 +301,47 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||||
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, Store * evalStore_)
|
||||||
|
{
|
||||||
|
auto & evalStore = evalStore_ ? *evalStore_ : store;
|
||||||
|
|
||||||
|
OutputPathMap outputs;
|
||||||
|
auto drv = evalStore.readDerivation(bfd.drvPath);
|
||||||
|
auto outputHashes = staticOutputHashes(store, drv);
|
||||||
|
auto drvOutputs = drv.outputsAndOptPaths(store);
|
||||||
|
auto outputNames = std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
StringSet names;
|
||||||
|
for (auto & [outputName, _] : drv.outputs)
|
||||||
|
names.insert(outputName);
|
||||||
|
return names;
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & names) {
|
||||||
|
return static_cast<std::set<std::string>>(names);
|
||||||
|
},
|
||||||
|
}, bfd.outputs.raw());
|
||||||
|
for (auto & output : outputNames) {
|
||||||
|
auto outputHash = get(outputHashes, output);
|
||||||
|
if (!outputHash)
|
||||||
|
throw Error(
|
||||||
|
"the derivation '%s' doesn't have an output named '%s'",
|
||||||
|
store.printStorePath(bfd.drvPath), output);
|
||||||
|
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||||
|
DrvOutput outputId { *outputHash, output };
|
||||||
|
auto realisation = store.queryRealisation(outputId);
|
||||||
|
if (!realisation)
|
||||||
|
throw MissingRealisation(outputId);
|
||||||
|
outputs.insert_or_assign(output, realisation->outPath);
|
||||||
|
} else {
|
||||||
|
// If ca-derivations isn't enabled, assume that
|
||||||
|
// the output path is statically known.
|
||||||
|
auto drvOutput = get(drvOutputs, output);
|
||||||
|
assert(drvOutput);
|
||||||
|
assert(drvOutput->second);
|
||||||
|
outputs.insert_or_assign(output, *drvOutput->second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return outputs;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
195
src/libstore/outputs-spec.cc
Normal file
195
src/libstore/outputs-spec.cc
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
#include <regex>
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
#include "util.hh"
|
||||||
|
#include "regex-combinators.hh"
|
||||||
|
#include "outputs-spec.hh"
|
||||||
|
#include "path-regex.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
bool OutputsSpec::contains(const std::string & outputName) const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & outputNames) {
|
||||||
|
return outputNames.count(outputName) > 0;
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::string outputSpecRegexStr =
|
||||||
|
regex::either(
|
||||||
|
regex::group(R"(\*)"),
|
||||||
|
regex::group(regex::list(nameRegexStr)));
|
||||||
|
|
||||||
|
std::optional<OutputsSpec> OutputsSpec::parseOpt(std::string_view s)
|
||||||
|
{
|
||||||
|
static std::regex regex(std::string { outputSpecRegexStr });
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
std::string s2 { s }; // until some improves std::regex
|
||||||
|
if (!std::regex_match(s2, match, regex))
|
||||||
|
return std::nullopt;
|
||||||
|
|
||||||
|
if (match[1].matched)
|
||||||
|
return { OutputsSpec::All {} };
|
||||||
|
|
||||||
|
if (match[2].matched)
|
||||||
|
return OutputsSpec::Names { tokenizeString<StringSet>(match[2].str(), ",") };
|
||||||
|
|
||||||
|
assert(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
OutputsSpec OutputsSpec::parse(std::string_view s)
|
||||||
|
{
|
||||||
|
std::optional spec = parseOpt(s);
|
||||||
|
if (!spec)
|
||||||
|
throw Error("invalid outputs specifier '%s'", s);
|
||||||
|
return *spec;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<std::pair<std::string_view, ExtendedOutputsSpec>> ExtendedOutputsSpec::parseOpt(std::string_view s)
|
||||||
|
{
|
||||||
|
auto found = s.rfind('^');
|
||||||
|
|
||||||
|
if (found == std::string::npos)
|
||||||
|
return std::pair { s, ExtendedOutputsSpec::Default {} };
|
||||||
|
|
||||||
|
auto specOpt = OutputsSpec::parseOpt(s.substr(found + 1));
|
||||||
|
if (!specOpt)
|
||||||
|
return std::nullopt;
|
||||||
|
return std::pair { s.substr(0, found), ExtendedOutputsSpec::Explicit { *std::move(specOpt) } };
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::pair<std::string_view, ExtendedOutputsSpec> ExtendedOutputsSpec::parse(std::string_view s)
|
||||||
|
{
|
||||||
|
std::optional spec = parseOpt(s);
|
||||||
|
if (!spec)
|
||||||
|
throw Error("invalid extended outputs specifier '%s'", s);
|
||||||
|
return *spec;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::string OutputsSpec::to_string() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) -> std::string {
|
||||||
|
return "*";
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & outputNames) -> std::string {
|
||||||
|
return concatStringsSep(",", outputNames);
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::string ExtendedOutputsSpec::to_string() const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const ExtendedOutputsSpec::Default &) -> std::string {
|
||||||
|
return "";
|
||||||
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> std::string {
|
||||||
|
return "^" + outputSpec.to_string();
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
OutputsSpec OutputsSpec::union_(const OutputsSpec & that) const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) -> OutputsSpec {
|
||||||
|
return OutputsSpec::All { };
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & theseNames) -> OutputsSpec {
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) -> OutputsSpec {
|
||||||
|
return OutputsSpec::All {};
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & thoseNames) -> OutputsSpec {
|
||||||
|
OutputsSpec::Names ret = theseNames;
|
||||||
|
ret.insert(thoseNames.begin(), thoseNames.end());
|
||||||
|
return ret;
|
||||||
|
},
|
||||||
|
}, that.raw());
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool OutputsSpec::isSubsetOf(const OutputsSpec & that) const
|
||||||
|
{
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & thoseNames) {
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & theseNames) {
|
||||||
|
bool ret = true;
|
||||||
|
for (auto & o : theseNames)
|
||||||
|
if (thoseNames.count(o) == 0)
|
||||||
|
ret = false;
|
||||||
|
return ret;
|
||||||
|
},
|
||||||
|
}, raw());
|
||||||
|
},
|
||||||
|
}, that.raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace nlohmann {
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
||||||
|
auto names = json.get<StringSet>();
|
||||||
|
if (names == StringSet({"*"}))
|
||||||
|
return OutputsSpec::All {};
|
||||||
|
else
|
||||||
|
return OutputsSpec::Names { std::move(names) };
|
||||||
|
}
|
||||||
|
|
||||||
|
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) {
|
||||||
|
json = std::vector<std::string>({"*"});
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & names) {
|
||||||
|
json = names;
|
||||||
|
},
|
||||||
|
}, t.raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json) {
|
||||||
|
if (json.is_null())
|
||||||
|
return ExtendedOutputsSpec::Default {};
|
||||||
|
else {
|
||||||
|
return ExtendedOutputsSpec::Explicit { json.get<OutputsSpec>() };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t) {
|
||||||
|
std::visit(overloaded {
|
||||||
|
[&](const ExtendedOutputsSpec::Default &) {
|
||||||
|
json = nullptr;
|
||||||
|
},
|
||||||
|
[&](const ExtendedOutputsSpec::Explicit & e) {
|
||||||
|
adl_serializer<OutputsSpec>::to_json(json, e);
|
||||||
|
},
|
||||||
|
}, t.raw());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
95
src/libstore/outputs-spec.hh
Normal file
95
src/libstore/outputs-spec.hh
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <cassert>
|
||||||
|
#include <optional>
|
||||||
|
#include <set>
|
||||||
|
#include <variant>
|
||||||
|
|
||||||
|
#include "json-impls.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
struct OutputNames : std::set<std::string> {
|
||||||
|
using std::set<std::string>::set;
|
||||||
|
|
||||||
|
/* These need to be "inherited manually" */
|
||||||
|
|
||||||
|
OutputNames(const std::set<std::string> & s)
|
||||||
|
: std::set<std::string>(s)
|
||||||
|
{ assert(!empty()); }
|
||||||
|
|
||||||
|
OutputNames(std::set<std::string> && s)
|
||||||
|
: std::set<std::string>(s)
|
||||||
|
{ assert(!empty()); }
|
||||||
|
|
||||||
|
/* This set should always be non-empty, so we delete this
|
||||||
|
constructor in order make creating empty ones by mistake harder.
|
||||||
|
*/
|
||||||
|
OutputNames() = delete;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct AllOutputs : std::monostate { };
|
||||||
|
|
||||||
|
typedef std::variant<AllOutputs, OutputNames> _OutputsSpecRaw;
|
||||||
|
|
||||||
|
struct OutputsSpec : _OutputsSpecRaw {
|
||||||
|
using Raw = _OutputsSpecRaw;
|
||||||
|
using Raw::Raw;
|
||||||
|
|
||||||
|
/* Force choosing a variant */
|
||||||
|
OutputsSpec() = delete;
|
||||||
|
|
||||||
|
using Names = OutputNames;
|
||||||
|
using All = AllOutputs;
|
||||||
|
|
||||||
|
inline const Raw & raw() const {
|
||||||
|
return static_cast<const Raw &>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Raw & raw() {
|
||||||
|
return static_cast<Raw &>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool contains(const std::string & output) const;
|
||||||
|
|
||||||
|
/* Create a new OutputsSpec which is the union of this and that. */
|
||||||
|
OutputsSpec union_(const OutputsSpec & that) const;
|
||||||
|
|
||||||
|
/* Whether this OutputsSpec is a subset of that. */
|
||||||
|
bool isSubsetOf(const OutputsSpec & outputs) const;
|
||||||
|
|
||||||
|
/* Parse a string of the form 'output1,...outputN' or
|
||||||
|
'*', returning the outputs spec. */
|
||||||
|
static OutputsSpec parse(std::string_view s);
|
||||||
|
static std::optional<OutputsSpec> parseOpt(std::string_view s);
|
||||||
|
|
||||||
|
std::string to_string() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct DefaultOutputs : std::monostate { };
|
||||||
|
|
||||||
|
typedef std::variant<DefaultOutputs, OutputsSpec> _ExtendedOutputsSpecRaw;
|
||||||
|
|
||||||
|
struct ExtendedOutputsSpec : _ExtendedOutputsSpecRaw {
|
||||||
|
using Raw = _ExtendedOutputsSpecRaw;
|
||||||
|
using Raw::Raw;
|
||||||
|
|
||||||
|
using Default = DefaultOutputs;
|
||||||
|
using Explicit = OutputsSpec;
|
||||||
|
|
||||||
|
inline const Raw & raw() const {
|
||||||
|
return static_cast<const Raw &>(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Parse a string of the form 'prefix^output1,...outputN' or
|
||||||
|
'prefix^*', returning the prefix and the extended outputs spec. */
|
||||||
|
static std::pair<std::string_view, ExtendedOutputsSpec> parse(std::string_view s);
|
||||||
|
static std::optional<std::pair<std::string_view, ExtendedOutputsSpec>> parseOpt(std::string_view s);
|
||||||
|
|
||||||
|
std::string to_string() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
JSON_IMPL(OutputsSpec)
|
||||||
|
JSON_IMPL(ExtendedOutputsSpec)
|
|
@ -3,6 +3,80 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
|
{
|
||||||
|
if (narSize == 0)
|
||||||
|
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||||
|
store.printStorePath(path));
|
||||||
|
return
|
||||||
|
"1;" + store.printStorePath(path) + ";"
|
||||||
|
+ narHash.to_string(Base32, true) + ";"
|
||||||
|
+ std::to_string(narSize) + ";"
|
||||||
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||||
|
{
|
||||||
|
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||||
|
{
|
||||||
|
if (! ca) return false;
|
||||||
|
|
||||||
|
auto caPath = std::visit(overloaded {
|
||||||
|
[&](const TextHash & th) {
|
||||||
|
return store.makeTextPath(path.name(), th.hash, references);
|
||||||
|
},
|
||||||
|
[&](const FixedOutputHash & fsh) {
|
||||||
|
auto refs = references;
|
||||||
|
bool hasSelfReference = false;
|
||||||
|
if (refs.count(path)) {
|
||||||
|
hasSelfReference = true;
|
||||||
|
refs.erase(path);
|
||||||
|
}
|
||||||
|
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
||||||
|
}
|
||||||
|
}, *ca);
|
||||||
|
|
||||||
|
bool res = caPath == path;
|
||||||
|
|
||||||
|
if (!res)
|
||||||
|
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const
|
||||||
|
{
|
||||||
|
if (isContentAddressed(store)) return maxSigs;
|
||||||
|
|
||||||
|
size_t good = 0;
|
||||||
|
for (auto & sig : sigs)
|
||||||
|
if (checkSignature(store, publicKeys, sig))
|
||||||
|
good++;
|
||||||
|
return good;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const
|
||||||
|
{
|
||||||
|
return verifyDetached(fingerprint(store), sig, publicKeys);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Strings ValidPathInfo::shortRefs() const
|
||||||
|
{
|
||||||
|
Strings refs;
|
||||||
|
for (auto & r : references)
|
||||||
|
refs.push_back(std::string(r.to_string()));
|
||||||
|
return refs;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format)
|
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format)
|
||||||
{
|
{
|
||||||
return read(source, store, format, store.parseStorePath(readString(source)));
|
return read(source, store, format, store.parseStorePath(readString(source)));
|
||||||
|
@ -24,6 +98,7 @@ ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void ValidPathInfo::write(
|
void ValidPathInfo::write(
|
||||||
Sink & sink,
|
Sink & sink,
|
||||||
const Store & store,
|
const Store & store,
|
||||||
|
|
7
src/libstore/path-regex.hh
Normal file
7
src/libstore/path-regex.hh
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)";
|
||||||
|
|
||||||
|
}
|
|
@ -1,6 +1,5 @@
|
||||||
#include "path-with-outputs.hh"
|
#include "path-with-outputs.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "nlohmann/json.hpp"
|
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
|
||||||
|
@ -16,10 +15,14 @@ std::string StorePathWithOutputs::to_string(const Store & store) const
|
||||||
|
|
||||||
DerivedPath StorePathWithOutputs::toDerivedPath() const
|
DerivedPath StorePathWithOutputs::toDerivedPath() const
|
||||||
{
|
{
|
||||||
if (!outputs.empty() || path.isDerivation())
|
if (!outputs.empty()) {
|
||||||
return DerivedPath::Built { path, outputs };
|
return DerivedPath::Built { path, OutputsSpec::Names { outputs } };
|
||||||
else
|
} else if (path.isDerivation()) {
|
||||||
|
assert(outputs.empty());
|
||||||
|
return DerivedPath::Built { path, OutputsSpec::All { } };
|
||||||
|
} else {
|
||||||
return DerivedPath::Opaque { path };
|
return DerivedPath::Opaque { path };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -42,7 +45,18 @@ std::variant<StorePathWithOutputs, StorePath> StorePathWithOutputs::tryFromDeriv
|
||||||
return StorePathWithOutputs { bo.path };
|
return StorePathWithOutputs { bo.path };
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Built & bfd) -> std::variant<StorePathWithOutputs, StorePath> {
|
[&](const DerivedPath::Built & bfd) -> std::variant<StorePathWithOutputs, StorePath> {
|
||||||
return StorePathWithOutputs { bfd.drvPath, bfd.outputs };
|
return StorePathWithOutputs {
|
||||||
|
.path = bfd.drvPath,
|
||||||
|
// Use legacy encoding of wildcard as empty set
|
||||||
|
.outputs = std::visit(overloaded {
|
||||||
|
[&](const OutputsSpec::All &) -> StringSet {
|
||||||
|
return {};
|
||||||
|
},
|
||||||
|
[&](const OutputsSpec::Names & outputs) {
|
||||||
|
return static_cast<StringSet>(outputs);
|
||||||
|
},
|
||||||
|
}, bfd.outputs.raw()),
|
||||||
|
};
|
||||||
},
|
},
|
||||||
}, p.raw());
|
}, p.raw());
|
||||||
}
|
}
|
||||||
|
@ -53,8 +67,8 @@ std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s)
|
||||||
size_t n = s.find("!");
|
size_t n = s.find("!");
|
||||||
return n == s.npos
|
return n == s.npos
|
||||||
? std::make_pair(s, std::set<std::string>())
|
? std::make_pair(s, std::set<std::string>())
|
||||||
: std::make_pair(((std::string_view) s).substr(0, n),
|
: std::make_pair(s.substr(0, n),
|
||||||
tokenizeString<std::set<std::string>>(((std::string_view) s).substr(n + 1), ","));
|
tokenizeString<std::set<std::string>>(s.substr(n + 1), ","));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,57 +85,4 @@ StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std:
|
||||||
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
|
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s)
|
|
||||||
{
|
|
||||||
static std::regex regex(R"((.*)\^((\*)|([a-z]+(,[a-z]+)*)))");
|
|
||||||
|
|
||||||
std::smatch match;
|
|
||||||
if (!std::regex_match(s, match, regex))
|
|
||||||
return {s, DefaultOutputs()};
|
|
||||||
|
|
||||||
if (match[3].matched)
|
|
||||||
return {match[1], AllOutputs()};
|
|
||||||
|
|
||||||
return {match[1], tokenizeString<OutputNames>(match[4].str(), ",")};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string printOutputsSpec(const OutputsSpec & outputsSpec)
|
|
||||||
{
|
|
||||||
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
|
||||||
return "";
|
|
||||||
|
|
||||||
if (std::get_if<AllOutputs>(&outputsSpec))
|
|
||||||
return "^*";
|
|
||||||
|
|
||||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
|
||||||
return "^" + concatStringsSep(",", *outputNames);
|
|
||||||
|
|
||||||
assert(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
void to_json(nlohmann::json & json, const OutputsSpec & outputsSpec)
|
|
||||||
{
|
|
||||||
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
|
||||||
json = nullptr;
|
|
||||||
|
|
||||||
else if (std::get_if<AllOutputs>(&outputsSpec))
|
|
||||||
json = std::vector<std::string>({"*"});
|
|
||||||
|
|
||||||
else if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
|
||||||
json = *outputNames;
|
|
||||||
}
|
|
||||||
|
|
||||||
void from_json(const nlohmann::json & json, OutputsSpec & outputsSpec)
|
|
||||||
{
|
|
||||||
if (json.is_null())
|
|
||||||
outputsSpec = DefaultOutputs();
|
|
||||||
else {
|
|
||||||
auto names = json.get<OutputNames>();
|
|
||||||
if (names == OutputNames({"*"}))
|
|
||||||
outputsSpec = AllOutputs();
|
|
||||||
else
|
|
||||||
outputsSpec = names;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include <variant>
|
|
||||||
|
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "derived-path.hh"
|
#include "derived-path.hh"
|
||||||
#include "nlohmann/json_fwd.hpp"
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
/* This is a deprecated old type just for use by the old CLI, and older
|
||||||
|
versions of the RPC protocols. In new code don't use it; you want
|
||||||
|
`DerivedPath` instead.
|
||||||
|
|
||||||
|
`DerivedPath` is better because it handles more cases, and does so more
|
||||||
|
explicitly without devious punning tricks.
|
||||||
|
*/
|
||||||
struct StorePathWithOutputs
|
struct StorePathWithOutputs
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
|
@ -33,25 +37,4 @@ StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view
|
||||||
|
|
||||||
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
|
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
|
||||||
|
|
||||||
typedef std::set<std::string> OutputNames;
|
|
||||||
|
|
||||||
struct AllOutputs {
|
|
||||||
bool operator < (const AllOutputs & _) const { return false; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct DefaultOutputs {
|
|
||||||
bool operator < (const DefaultOutputs & _) const { return false; }
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::variant<DefaultOutputs, AllOutputs, OutputNames> OutputsSpec;
|
|
||||||
|
|
||||||
/* Parse a string of the form 'prefix^output1,...outputN' or
|
|
||||||
'prefix^*', returning the prefix and the outputs spec. */
|
|
||||||
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s);
|
|
||||||
|
|
||||||
std::string printOutputsSpec(const OutputsSpec & outputsSpec);
|
|
||||||
|
|
||||||
void to_json(nlohmann::json &, const OutputsSpec &);
|
|
||||||
void from_json(const nlohmann::json &, OutputsSpec &);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,10 @@ static void checkName(std::string_view path, std::string_view name)
|
||||||
{
|
{
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
throw BadStorePath("store path '%s' has an empty name", path);
|
throw BadStorePath("store path '%s' has an empty name", path);
|
||||||
if (name.size() > 211)
|
if (name.size() > StorePath::MaxPathLen)
|
||||||
throw BadStorePath("store path '%s' has a name longer than 211 characters", path);
|
throw BadStorePath("store path '%s' has a name longer than '%d characters",
|
||||||
|
StorePath::MaxPathLen, path);
|
||||||
|
// See nameRegexStr for the definition
|
||||||
for (auto c : name)
|
for (auto c : name)
|
||||||
if (!((c >= '0' && c <= '9')
|
if (!((c >= '0' && c <= '9')
|
||||||
|| (c >= 'a' && c <= 'z')
|
|| (c >= 'a' && c <= 'z')
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
class Store;
|
|
||||||
struct Hash;
|
struct Hash;
|
||||||
|
|
||||||
class StorePath
|
class StorePath
|
||||||
|
@ -17,6 +16,8 @@ public:
|
||||||
/* Size of the hash part of store paths, in base-32 characters. */
|
/* Size of the hash part of store paths, in base-32 characters. */
|
||||||
constexpr static size_t HashLen = 32; // i.e. 160 bits
|
constexpr static size_t HashLen = 32; // i.e. 160 bits
|
||||||
|
|
||||||
|
constexpr static size_t MaxPathLen = 211;
|
||||||
|
|
||||||
StorePath() = delete;
|
StorePath() = delete;
|
||||||
|
|
||||||
StorePath(std::string_view baseName);
|
StorePath(std::string_view baseName);
|
||||||
|
@ -64,7 +65,6 @@ public:
|
||||||
|
|
||||||
typedef std::set<StorePath> StorePathSet;
|
typedef std::set<StorePath> StorePathSet;
|
||||||
typedef std::vector<StorePath> StorePaths;
|
typedef std::vector<StorePath> StorePaths;
|
||||||
typedef std::map<std::string, StorePath> OutputPathMap;
|
|
||||||
|
|
||||||
typedef std::map<StorePath, std::optional<ContentAddress>> StorePathCAMap;
|
typedef std::map<StorePath, std::optional<ContentAddress>> StorePathCAMap;
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
struct DrvOutput {
|
struct DrvOutput {
|
||||||
// The hash modulo of the derivation
|
// The hash modulo of the derivation
|
||||||
Hash drvHash;
|
Hash drvHash;
|
||||||
|
@ -93,4 +95,14 @@ struct RealisedPath {
|
||||||
GENERATE_CMP(RealisedPath, me->raw);
|
GENERATE_CMP(RealisedPath, me->raw);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class MissingRealisation : public Error
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
MissingRealisation(DrvOutput & outputId)
|
||||||
|
: Error( "cannot operate on an output of the "
|
||||||
|
"unbuilt derivation '%s'",
|
||||||
|
outputId.to_string())
|
||||||
|
{}
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -867,8 +867,8 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
||||||
OutputPathMap outputs;
|
OutputPathMap outputs;
|
||||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||||
const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||||
const auto drvOutputs = drv.outputsAndOptPaths(*this);
|
auto built = resolveDerivedPath(*this, bfd, &*evalStore);
|
||||||
for (auto & output : bfd.outputs) {
|
for (auto & [output, outputPath] : built) {
|
||||||
auto outputHash = get(outputHashes, output);
|
auto outputHash = get(outputHashes, output);
|
||||||
if (!outputHash)
|
if (!outputHash)
|
||||||
throw Error(
|
throw Error(
|
||||||
|
@ -879,22 +879,14 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
||||||
auto realisation =
|
auto realisation =
|
||||||
queryRealisation(outputId);
|
queryRealisation(outputId);
|
||||||
if (!realisation)
|
if (!realisation)
|
||||||
throw Error(
|
throw MissingRealisation(outputId);
|
||||||
"cannot operate on an output of unbuilt "
|
|
||||||
"content-addressed derivation '%s'",
|
|
||||||
outputId.to_string());
|
|
||||||
res.builtOutputs.emplace(realisation->id, *realisation);
|
res.builtOutputs.emplace(realisation->id, *realisation);
|
||||||
} else {
|
} else {
|
||||||
// If ca-derivations isn't enabled, assume that
|
|
||||||
// the output path is statically known.
|
|
||||||
const auto drvOutput = get(drvOutputs, output);
|
|
||||||
assert(drvOutput);
|
|
||||||
assert(drvOutput->second);
|
|
||||||
res.builtOutputs.emplace(
|
res.builtOutputs.emplace(
|
||||||
outputId,
|
outputId,
|
||||||
Realisation {
|
Realisation {
|
||||||
.id = outputId,
|
.id = outputId,
|
||||||
.outPath = *drvOutput->second,
|
.outPath = outputPath,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -918,7 +910,12 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
|
||||||
writeDerivation(conn->to, *this, drv);
|
writeDerivation(conn->to, *this, drv);
|
||||||
conn->to << buildMode;
|
conn->to << buildMode;
|
||||||
conn.processStderr();
|
conn.processStderr();
|
||||||
BuildResult res { .path = DerivedPath::Built { .drvPath = drvPath } };
|
BuildResult res {
|
||||||
|
.path = DerivedPath::Built {
|
||||||
|
.drvPath = drvPath,
|
||||||
|
.outputs = OutputsSpec::All { },
|
||||||
|
},
|
||||||
|
};
|
||||||
res.status = (BuildResult::Status) readInt(conn->from);
|
res.status = (BuildResult::Status) readInt(conn->from);
|
||||||
conn->from >> res.errorMsg;
|
conn->from >> res.errorMsg;
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
|
||||||
|
|
|
@ -53,8 +53,8 @@ public:
|
||||||
{ return false; }
|
{ return false; }
|
||||||
|
|
||||||
// FIXME extend daemon protocol, move implementation to RemoteStore
|
// FIXME extend daemon protocol, move implementation to RemoteStore
|
||||||
std::optional<std::string> getBuildLog(const StorePath & path) override
|
std::optional<std::string> getBuildLogExact(const StorePath & path) override
|
||||||
{ unsupported("getBuildLog"); }
|
{ unsupported("getBuildLogExact"); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
|
|
|
@ -1210,79 +1210,6 @@ std::string showPaths(const PathSet & paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string ValidPathInfo::fingerprint(const Store & store) const
|
|
||||||
{
|
|
||||||
if (narSize == 0)
|
|
||||||
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
|
||||||
store.printStorePath(path));
|
|
||||||
return
|
|
||||||
"1;" + store.printStorePath(path) + ";"
|
|
||||||
+ narHash.to_string(Base32, true) + ";"
|
|
||||||
+ std::to_string(narSize) + ";"
|
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
|
||||||
{
|
|
||||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
|
||||||
}
|
|
||||||
|
|
||||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
|
||||||
{
|
|
||||||
if (! ca) return false;
|
|
||||||
|
|
||||||
auto caPath = std::visit(overloaded {
|
|
||||||
[&](const TextHash & th) {
|
|
||||||
return store.makeTextPath(path.name(), th.hash, references);
|
|
||||||
},
|
|
||||||
[&](const FixedOutputHash & fsh) {
|
|
||||||
auto refs = references;
|
|
||||||
bool hasSelfReference = false;
|
|
||||||
if (refs.count(path)) {
|
|
||||||
hasSelfReference = true;
|
|
||||||
refs.erase(path);
|
|
||||||
}
|
|
||||||
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
|
||||||
}
|
|
||||||
}, *ca);
|
|
||||||
|
|
||||||
bool res = caPath == path;
|
|
||||||
|
|
||||||
if (!res)
|
|
||||||
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const
|
|
||||||
{
|
|
||||||
if (isContentAddressed(store)) return maxSigs;
|
|
||||||
|
|
||||||
size_t good = 0;
|
|
||||||
for (auto & sig : sigs)
|
|
||||||
if (checkSignature(store, publicKeys, sig))
|
|
||||||
good++;
|
|
||||||
return good;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const
|
|
||||||
{
|
|
||||||
return verifyDetached(fingerprint(store), sig, publicKeys);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Strings ValidPathInfo::shortRefs() const
|
|
||||||
{
|
|
||||||
Strings refs;
|
|
||||||
for (auto & r : references)
|
|
||||||
refs.push_back(std::string(r.to_string()));
|
|
||||||
return refs;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Derivation Store::derivationFromPath(const StorePath & drvPath)
|
Derivation Store::derivationFromPath(const StorePath & drvPath)
|
||||||
{
|
{
|
||||||
ensurePath(drvPath);
|
ensurePath(drvPath);
|
||||||
|
@ -1301,6 +1228,34 @@ Derivation readDerivationCommon(Store& store, const StorePath& drvPath, bool req
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::optional<StorePath> Store::getBuildDerivationPath(const StorePath & path)
|
||||||
|
{
|
||||||
|
|
||||||
|
if (!path.isDerivation()) {
|
||||||
|
try {
|
||||||
|
auto info = queryPathInfo(path);
|
||||||
|
if (!info->deriver) return std::nullopt;
|
||||||
|
return *info->deriver;
|
||||||
|
} catch (InvalidPath &) {
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations) || !isValidPath(path))
|
||||||
|
return path;
|
||||||
|
|
||||||
|
auto drv = readDerivation(path);
|
||||||
|
if (!drv.type().hasKnownOutputPaths()) {
|
||||||
|
// The build log is actually attached to the corresponding
|
||||||
|
// resolved derivation, so we need to get it first
|
||||||
|
auto resolvedDrv = drv.tryResolve(*this);
|
||||||
|
if (resolvedDrv)
|
||||||
|
return writeDerivation(*this, *resolvedDrv, NoRepair, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
Derivation Store::readDerivation(const StorePath & drvPath)
|
Derivation Store::readDerivation(const StorePath & drvPath)
|
||||||
{ return readDerivationCommon(*this, drvPath, true); }
|
{ return readDerivationCommon(*this, drvPath, true); }
|
||||||
|
|
||||||
|
|
|
@ -71,6 +71,9 @@ class NarInfoDiskCache;
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
|
|
||||||
|
typedef std::map<std::string, StorePath> OutputPathMap;
|
||||||
|
|
||||||
|
|
||||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||||
|
@ -120,6 +123,8 @@ public:
|
||||||
|
|
||||||
typedef std::map<std::string, std::string> Params;
|
typedef std::map<std::string, std::string> Params;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
struct PathInfoCacheValue {
|
struct PathInfoCacheValue {
|
||||||
|
@ -618,6 +623,13 @@ public:
|
||||||
*/
|
*/
|
||||||
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a store path, return the realisation actually used in the realisation of this path:
|
||||||
|
* - If the path is a content-addressed derivation, try to resolve it
|
||||||
|
* - Otherwise, find one of its derivers
|
||||||
|
*/
|
||||||
|
std::optional<StorePath> getBuildDerivationPath(const StorePath &);
|
||||||
|
|
||||||
/* Hack to allow long-running processes like hydra-queue-runner to
|
/* Hack to allow long-running processes like hydra-queue-runner to
|
||||||
occasionally flush their path info cache. */
|
occasionally flush their path info cache. */
|
||||||
void clearPathInfoCache()
|
void clearPathInfoCache()
|
||||||
|
@ -719,6 +731,11 @@ void copyClosure(
|
||||||
void removeTempRoots();
|
void removeTempRoots();
|
||||||
|
|
||||||
|
|
||||||
|
/* Resolve the derived path completely, failing if any derivation output
|
||||||
|
is unknown. */
|
||||||
|
OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * evalStore = nullptr);
|
||||||
|
|
||||||
|
|
||||||
/* Return a Store object to access the Nix store denoted by
|
/* Return a Store object to access the Nix store denoted by
|
||||||
‘uri’ (slight misnomer...). Supported values are:
|
‘uri’ (slight misnomer...). Supported values are:
|
||||||
|
|
||||||
|
|
23
src/libstore/tests/libstoretests.hh
Normal file
23
src/libstore/tests/libstoretests.hh
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include <gmock/gmock.h>
|
||||||
|
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class LibStoreTest : public ::testing::Test {
|
||||||
|
public:
|
||||||
|
static void SetUpTestSuite() {
|
||||||
|
initLibStore();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
LibStoreTest()
|
||||||
|
: store(openStore("dummy://"))
|
||||||
|
{ }
|
||||||
|
|
||||||
|
ref<Store> store;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
} /* namespace nix */
|
|
@ -12,4 +12,4 @@ libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil
|
||||||
|
|
||||||
libstore-tests_LIBS = libstore libutil
|
libstore-tests_LIBS = libstore libutil
|
||||||
|
|
||||||
libstore-tests_LDFLAGS := $(GTEST_LIBS)
|
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||||
|
|
201
src/libstore/tests/outputs-spec.cc
Normal file
201
src/libstore/tests/outputs-spec.cc
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
#include "outputs-spec.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
#ifndef NDEBUG
|
||||||
|
TEST(OutputsSpec, no_empty_names) {
|
||||||
|
ASSERT_DEATH(OutputsSpec::Names { std::set<std::string> { } }, "");
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define TEST_DONT_PARSE(NAME, STR) \
|
||||||
|
TEST(OutputsSpec, bad_ ## NAME) { \
|
||||||
|
std::optional OutputsSpecOpt = \
|
||||||
|
OutputsSpec::parseOpt(STR); \
|
||||||
|
ASSERT_FALSE(OutputsSpecOpt); \
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_DONT_PARSE(empty, "")
|
||||||
|
TEST_DONT_PARSE(garbage, "&*()")
|
||||||
|
TEST_DONT_PARSE(double_star, "**")
|
||||||
|
TEST_DONT_PARSE(star_first, "*,foo")
|
||||||
|
TEST_DONT_PARSE(star_second, "foo,*")
|
||||||
|
|
||||||
|
#undef TEST_DONT_PARSE
|
||||||
|
|
||||||
|
TEST(OutputsSpec, all) {
|
||||||
|
std::string_view str = "*";
|
||||||
|
OutputsSpec expected = OutputsSpec::All { };
|
||||||
|
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||||
|
ASSERT_EQ(expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, names_out) {
|
||||||
|
std::string_view str = "out";
|
||||||
|
OutputsSpec expected = OutputsSpec::Names { "out" };
|
||||||
|
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||||
|
ASSERT_EQ(expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, names_underscore) {
|
||||||
|
std::string_view str = "a_b";
|
||||||
|
OutputsSpec expected = OutputsSpec::Names { "a_b" };
|
||||||
|
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||||
|
ASSERT_EQ(expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, names_numberic) {
|
||||||
|
std::string_view str = "01";
|
||||||
|
OutputsSpec expected = OutputsSpec::Names { "01" };
|
||||||
|
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||||
|
ASSERT_EQ(expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, names_out_bin) {
|
||||||
|
OutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||||
|
ASSERT_EQ(OutputsSpec::parse("out,bin"), expected);
|
||||||
|
// N.B. This normalization is OK.
|
||||||
|
ASSERT_EQ(expected.to_string(), "bin,out");
|
||||||
|
}
|
||||||
|
|
||||||
|
#define TEST_SUBSET(X, THIS, THAT) \
|
||||||
|
X((OutputsSpec { THIS }).isSubsetOf(THAT));
|
||||||
|
|
||||||
|
TEST(OutputsSpec, subsets_all_all) {
|
||||||
|
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All { }, OutputsSpec::All { });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, subsets_names_all) {
|
||||||
|
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, subsets_names_names_eq) {
|
||||||
|
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::Names { "a" });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, subsets_names_names_noneq) {
|
||||||
|
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, (OutputsSpec::Names { "a", "b" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, not_subsets_all_names) {
|
||||||
|
TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, not_subsets_names_names) {
|
||||||
|
TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names { "a", "b" }), (OutputsSpec::Names { "a" }));
|
||||||
|
}
|
||||||
|
|
||||||
|
#undef TEST_SUBSET
|
||||||
|
|
||||||
|
#define TEST_UNION(RES, THIS, THAT) \
|
||||||
|
ASSERT_EQ(OutputsSpec { RES }, (OutputsSpec { THIS }).union_(THAT));
|
||||||
|
|
||||||
|
TEST(OutputsSpec, union_all_all) {
|
||||||
|
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::All { });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, union_all_names) {
|
||||||
|
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, union_names_all) {
|
||||||
|
TEST_UNION(OutputsSpec::All { }, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(OutputsSpec, union_names_names) {
|
||||||
|
TEST_UNION((OutputsSpec::Names { "a", "b" }), OutputsSpec::Names { "a" }, OutputsSpec::Names { "b" });
|
||||||
|
}
|
||||||
|
|
||||||
|
#undef TEST_UNION
|
||||||
|
|
||||||
|
#define TEST_DONT_PARSE(NAME, STR) \
|
||||||
|
TEST(ExtendedOutputsSpec, bad_ ## NAME) { \
|
||||||
|
std::optional extendedOutputsSpecOpt = \
|
||||||
|
ExtendedOutputsSpec::parseOpt(STR); \
|
||||||
|
ASSERT_FALSE(extendedOutputsSpecOpt); \
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_DONT_PARSE(carot_empty, "^")
|
||||||
|
TEST_DONT_PARSE(prefix_carot_empty, "foo^")
|
||||||
|
TEST_DONT_PARSE(garbage, "^&*()")
|
||||||
|
TEST_DONT_PARSE(double_star, "^**")
|
||||||
|
TEST_DONT_PARSE(star_first, "^*,foo")
|
||||||
|
TEST_DONT_PARSE(star_second, "^foo,*")
|
||||||
|
|
||||||
|
#undef TEST_DONT_PARSE
|
||||||
|
|
||||||
|
TEST(ExtendedOutputsSpec, defeault) {
|
||||||
|
std::string_view str = "foo";
|
||||||
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ExtendedOutputsSpec expected = ExtendedOutputsSpec::Default { };
|
||||||
|
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||||
|
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(ExtendedOutputsSpec, all) {
|
||||||
|
std::string_view str = "foo^*";
|
||||||
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ExtendedOutputsSpec expected = OutputsSpec::All { };
|
||||||
|
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||||
|
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(ExtendedOutputsSpec, out) {
|
||||||
|
std::string_view str = "foo^out";
|
||||||
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ExtendedOutputsSpec expected = OutputsSpec::Names { "out" };
|
||||||
|
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||||
|
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(ExtendedOutputsSpec, out_bin) {
|
||||||
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^out,bin");
|
||||||
|
ASSERT_EQ(prefix, "foo");
|
||||||
|
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||||
|
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||||
|
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bin,out");
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(ExtendedOutputsSpec, many_carrot) {
|
||||||
|
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^bar^out,bin");
|
||||||
|
ASSERT_EQ(prefix, "foo^bar");
|
||||||
|
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||||
|
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||||
|
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bar^bin,out");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#define TEST_JSON(TYPE, NAME, STR, VAL) \
|
||||||
|
\
|
||||||
|
TEST(TYPE, NAME ## _to_json) { \
|
||||||
|
using nlohmann::literals::operator "" _json; \
|
||||||
|
ASSERT_EQ( \
|
||||||
|
STR ## _json, \
|
||||||
|
((nlohmann::json) TYPE { VAL })); \
|
||||||
|
} \
|
||||||
|
\
|
||||||
|
TEST(TYPE, NAME ## _from_json) { \
|
||||||
|
using nlohmann::literals::operator "" _json; \
|
||||||
|
ASSERT_EQ( \
|
||||||
|
TYPE { VAL }, \
|
||||||
|
(STR ## _json).get<TYPE>()); \
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All { })
|
||||||
|
TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names { "a" })
|
||||||
|
TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names { "a", "b" }))
|
||||||
|
|
||||||
|
TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default { })
|
||||||
|
TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit { OutputsSpec::All { } })
|
||||||
|
TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a" } })
|
||||||
|
TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a", "b" } }))
|
||||||
|
|
||||||
|
#undef TEST_JSON
|
||||||
|
|
||||||
|
}
|
|
@ -1,46 +0,0 @@
|
||||||
#include "path-with-outputs.hh"
|
|
||||||
|
|
||||||
#include <gtest/gtest.h>
|
|
||||||
|
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
TEST(parseOutputsSpec, basic)
|
|
||||||
{
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo");
|
|
||||||
ASSERT_EQ(prefix, "foo");
|
|
||||||
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^*");
|
|
||||||
ASSERT_EQ(prefix, "foo");
|
|
||||||
ASSERT_TRUE(std::get_if<AllOutputs>(&outputsSpec));
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out");
|
|
||||||
ASSERT_EQ(prefix, "foo");
|
|
||||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out"}));
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out,bin");
|
|
||||||
ASSERT_EQ(prefix, "foo");
|
|
||||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^bar^out,bin");
|
|
||||||
ASSERT_EQ(prefix, "foo^bar");
|
|
||||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^&*()");
|
|
||||||
ASSERT_EQ(prefix, "foo^&*()");
|
|
||||||
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
144
src/libstore/tests/path.cc
Normal file
144
src/libstore/tests/path.cc
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
#include <gtest/gtest.h>
|
||||||
|
#include <rapidcheck/gtest.h>
|
||||||
|
|
||||||
|
#include "path-regex.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
#include "libstoretests.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
#define STORE_DIR "/nix/store/"
|
||||||
|
#define HASH_PART "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q"
|
||||||
|
|
||||||
|
class StorePathTest : public LibStoreTest
|
||||||
|
{
|
||||||
|
};
|
||||||
|
|
||||||
|
static std::regex nameRegex { std::string { nameRegexStr } };
|
||||||
|
|
||||||
|
#define TEST_DONT_PARSE(NAME, STR) \
|
||||||
|
TEST_F(StorePathTest, bad_ ## NAME) { \
|
||||||
|
std::string_view str = \
|
||||||
|
STORE_DIR HASH_PART "-" STR; \
|
||||||
|
ASSERT_THROW( \
|
||||||
|
store->parseStorePath(str), \
|
||||||
|
BadStorePath); \
|
||||||
|
std::string name { STR }; \
|
||||||
|
EXPECT_FALSE(std::regex_match(name, nameRegex)); \
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_DONT_PARSE(empty, "")
|
||||||
|
TEST_DONT_PARSE(garbage, "&*()")
|
||||||
|
TEST_DONT_PARSE(double_star, "**")
|
||||||
|
TEST_DONT_PARSE(star_first, "*,foo")
|
||||||
|
TEST_DONT_PARSE(star_second, "foo,*")
|
||||||
|
TEST_DONT_PARSE(bang, "foo!o")
|
||||||
|
|
||||||
|
#undef TEST_DONT_PARSE
|
||||||
|
|
||||||
|
#define TEST_DO_PARSE(NAME, STR) \
|
||||||
|
TEST_F(StorePathTest, good_ ## NAME) { \
|
||||||
|
std::string_view str = \
|
||||||
|
STORE_DIR HASH_PART "-" STR; \
|
||||||
|
auto p = store->parseStorePath(str); \
|
||||||
|
std::string name { p.name() }; \
|
||||||
|
EXPECT_TRUE(std::regex_match(name, nameRegex)); \
|
||||||
|
}
|
||||||
|
|
||||||
|
// 0-9 a-z A-Z + - . _ ? =
|
||||||
|
|
||||||
|
TEST_DO_PARSE(numbers, "02345")
|
||||||
|
TEST_DO_PARSE(lower_case, "foo")
|
||||||
|
TEST_DO_PARSE(upper_case, "FOO")
|
||||||
|
TEST_DO_PARSE(plus, "foo+bar")
|
||||||
|
TEST_DO_PARSE(dash, "foo-dev")
|
||||||
|
TEST_DO_PARSE(underscore, "foo_bar")
|
||||||
|
TEST_DO_PARSE(period, "foo.txt")
|
||||||
|
TEST_DO_PARSE(question_mark, "foo?why")
|
||||||
|
TEST_DO_PARSE(equals_sign, "foo=foo")
|
||||||
|
|
||||||
|
#undef TEST_DO_PARSE
|
||||||
|
|
||||||
|
// For rapidcheck
|
||||||
|
void showValue(const StorePath & p, std::ostream & os) {
|
||||||
|
os << p.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace rc {
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
template<>
|
||||||
|
struct Arbitrary<StorePath> {
|
||||||
|
static Gen<StorePath> arbitrary();
|
||||||
|
};
|
||||||
|
|
||||||
|
Gen<StorePath> Arbitrary<StorePath>::arbitrary()
|
||||||
|
{
|
||||||
|
auto len = *gen::inRange<size_t>(1, StorePath::MaxPathLen);
|
||||||
|
|
||||||
|
std::string pre { HASH_PART "-" };
|
||||||
|
pre.reserve(pre.size() + len);
|
||||||
|
|
||||||
|
for (size_t c = 0; c < len; ++c) {
|
||||||
|
switch (auto i = *gen::inRange<uint8_t>(0, 10 + 2 * 26 + 6)) {
|
||||||
|
case 0 ... 9:
|
||||||
|
pre += '0' + i;
|
||||||
|
case 10 ... 35:
|
||||||
|
pre += 'A' + (i - 10);
|
||||||
|
break;
|
||||||
|
case 36 ... 61:
|
||||||
|
pre += 'a' + (i - 36);
|
||||||
|
break;
|
||||||
|
case 62:
|
||||||
|
pre += '+';
|
||||||
|
break;
|
||||||
|
case 63:
|
||||||
|
pre += '-';
|
||||||
|
break;
|
||||||
|
case 64:
|
||||||
|
pre += '.';
|
||||||
|
break;
|
||||||
|
case 65:
|
||||||
|
pre += '_';
|
||||||
|
break;
|
||||||
|
case 66:
|
||||||
|
pre += '?';
|
||||||
|
break;
|
||||||
|
case 67:
|
||||||
|
pre += '=';
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return gen::just(StorePath { pre });
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace rc
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
RC_GTEST_FIXTURE_PROP(
|
||||||
|
StorePathTest,
|
||||||
|
prop_regex_accept,
|
||||||
|
(const StorePath & p))
|
||||||
|
{
|
||||||
|
RC_ASSERT(std::regex_match(std::string { p.name() }, nameRegex));
|
||||||
|
}
|
||||||
|
|
||||||
|
RC_GTEST_FIXTURE_PROP(
|
||||||
|
StorePathTest,
|
||||||
|
prop_round_rip,
|
||||||
|
(const StorePath & p))
|
||||||
|
{
|
||||||
|
RC_ASSERT(p == store->parseStorePath(store->printStorePath(p)));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -74,6 +74,8 @@ struct AbstractPos
|
||||||
virtual void print(std::ostream & out) const = 0;
|
virtual void print(std::ostream & out) const = 0;
|
||||||
|
|
||||||
std::optional<LinesOfCode> getCodeLines() const;
|
std::optional<LinesOfCode> getCodeLines() const;
|
||||||
|
|
||||||
|
virtual ~AbstractPos() = default;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
|
std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
|
||||||
|
|
14
src/libutil/json-impls.hh
Normal file
14
src/libutil/json-impls.hh
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "nlohmann/json_fwd.hpp"
|
||||||
|
|
||||||
|
// Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types
|
||||||
|
#define JSON_IMPL(TYPE) \
|
||||||
|
namespace nlohmann { \
|
||||||
|
using namespace nix; \
|
||||||
|
template <> \
|
||||||
|
struct adl_serializer<TYPE> { \
|
||||||
|
static TYPE from_json(const json & json); \
|
||||||
|
static void to_json(json & json, TYPE t); \
|
||||||
|
}; \
|
||||||
|
}
|
|
@ -22,27 +22,38 @@ public:
|
||||||
{
|
{
|
||||||
thread = std::thread([fd]() {
|
thread = std::thread([fd]() {
|
||||||
while (true) {
|
while (true) {
|
||||||
/* Wait indefinitely until a POLLHUP occurs. */
|
/* Wait indefinitely until a POLLHUP occurs. */
|
||||||
struct pollfd fds[1];
|
struct pollfd fds[1];
|
||||||
fds[0].fd = fd;
|
fds[0].fd = fd;
|
||||||
/* This shouldn't be necessary, but macOS doesn't seem to
|
/* Polling for no specific events (i.e. just waiting
|
||||||
like a zeroed out events field.
|
for an error/hangup) doesn't work on macOS
|
||||||
See rdar://37537852.
|
anymore. So wait for read events and ignore
|
||||||
*/
|
them. */
|
||||||
fds[0].events = POLLHUP;
|
fds[0].events =
|
||||||
auto count = poll(fds, 1, -1);
|
#ifdef __APPLE__
|
||||||
if (count == -1) abort(); // can't happen
|
POLLRDNORM
|
||||||
/* This shouldn't happen, but can on macOS due to a bug.
|
#else
|
||||||
See rdar://37550628.
|
0
|
||||||
|
#endif
|
||||||
|
;
|
||||||
|
auto count = poll(fds, 1, -1);
|
||||||
|
if (count == -1) abort(); // can't happen
|
||||||
|
/* This shouldn't happen, but can on macOS due to a bug.
|
||||||
|
See rdar://37550628.
|
||||||
|
|
||||||
This may eventually need a delay or further
|
This may eventually need a delay or further
|
||||||
coordination with the main thread if spinning proves
|
coordination with the main thread if spinning proves
|
||||||
too harmful.
|
too harmful.
|
||||||
*/
|
*/
|
||||||
if (count == 0) continue;
|
if (count == 0) continue;
|
||||||
assert(fds[0].revents & POLLHUP);
|
if (fds[0].revents & POLLHUP) {
|
||||||
triggerInterrupt();
|
triggerInterrupt();
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
|
/* This will only happen on macOS. We sleep a bit to
|
||||||
|
avoid waking up too often if the client is sending
|
||||||
|
input. */
|
||||||
|
sleep(1);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue