forked from lix-project/lix
Merge branch 'master' into referenceablePaths
This commit is contained in:
commit
4aaf0ee52e
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
28
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Motivation
|
||||
<!-- Briefly explain what the change is about and why it is desirable. -->
|
||||
|
||||
# Context
|
||||
<!-- Provide context. Reference open issues if available. -->
|
||||
|
||||
<!-- Non-trivial change: Briefly outline the implementation strategy. -->
|
||||
|
||||
<!-- Invasive change: Discuss alternative designs or approaches you considered. -->
|
||||
|
||||
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
||||
|
||||
# Checklist for maintainers
|
||||
|
||||
<!-- Contributors: please leave this as is -->
|
||||
|
||||
Maintainers: tick if completed or explain if not relevant
|
||||
|
||||
- [ ] agreed on idea
|
||||
- [ ] agreed on implementation strategy
|
||||
- [ ] tests, as appropriate
|
||||
- functional tests - `tests/**.sh`
|
||||
- unit tests - `src/*/tests`
|
||||
- integration tests - `tests/nixos/*`
|
||||
- [ ] documentation in the manual
|
||||
- [ ] code and comments are self-explanatory
|
||||
- [ ] commit message explains why the change was made
|
||||
- [ ] new feature or bug fix: updated release notes
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
|||
fetch-depth: 0
|
||||
- name: Create backport PRs
|
||||
# should be kept in sync with `version`
|
||||
uses: zeebe-io/backport-action@v1.0.1
|
||||
uses: zeebe-io/backport-action@v1.1.0
|
||||
with:
|
||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
77
boehmgc-coroutine-sp-fallback.diff
Normal file
77
boehmgc-coroutine-sp-fallback.diff
Normal file
|
@ -0,0 +1,77 @@
|
|||
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
|
||||
index 3dbaa3fb..36a1d1f7 100644
|
||||
--- a/darwin_stop_world.c
|
||||
+++ b/darwin_stop_world.c
|
||||
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
|
||||
int nthreads = 0;
|
||||
word total_size = 0;
|
||||
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
|
||||
+ size_t stack_limit;
|
||||
if (!EXPECT(GC_thr_initialized, TRUE))
|
||||
GC_thr_init();
|
||||
|
||||
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
|
||||
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
|
||||
}
|
||||
if (altstack_lo) {
|
||||
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||
+ // control flow returns to the thread.
|
||||
+ // While in the coroutine, the sp points outside the thread stack,
|
||||
+ // so we can detect this and push the entire thread stack instead,
|
||||
+ // as an approximation.
|
||||
+ // We assume that the coroutine has similarly added its entire stack.
|
||||
+ // This could be made accurate by cooperating with the application
|
||||
+ // via new functions and/or callbacks.
|
||||
+ stack_limit = pthread_get_stacksize_np(p->id);
|
||||
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
|
||||
+ altstack_lo = altstack_hi - stack_limit;
|
||||
+ }
|
||||
+
|
||||
total_size += altstack_hi - altstack_lo;
|
||||
GC_push_all_stack(altstack_lo, altstack_hi);
|
||||
}
|
||||
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
||||
index b5d71e62..aed7b0bf 100644
|
||||
--- a/pthread_stop_world.c
|
||||
+++ b/pthread_stop_world.c
|
||||
@@ -768,6 +768,8 @@ STATIC void GC_restart_handler(int sig)
|
||||
/* world is stopped. Should not fail if it isn't. */
|
||||
GC_INNER void GC_push_all_stacks(void)
|
||||
{
|
||||
+ size_t stack_limit;
|
||||
+ pthread_attr_t pattr;
|
||||
GC_bool found_me = FALSE;
|
||||
size_t nthreads = 0;
|
||||
int i;
|
||||
@@ -851,6 +853,31 @@ GC_INNER void GC_push_all_stacks(void)
|
||||
hi = p->altstack + p->altstack_size;
|
||||
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||
/* FIXME: Assume stack grows down */
|
||||
+ } else {
|
||||
+ if (pthread_getattr_np(p->id, &pattr)) {
|
||||
+ ABORT("GC_push_all_stacks: pthread_getattr_np failed!");
|
||||
+ }
|
||||
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
||||
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
|
||||
+ }
|
||||
+ if (pthread_attr_destroy(&pattr)) {
|
||||
+ ABORT("GC_push_all_stacks: pthread_attr_destroy failed!");
|
||||
+ }
|
||||
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||
+ // control flow returns to the thread.
|
||||
+ // While in the coroutine, the sp points outside the thread stack,
|
||||
+ // so we can detect this and push the entire thread stack instead,
|
||||
+ // as an approximation.
|
||||
+ // We assume that the coroutine has similarly added its entire stack.
|
||||
+ // This could be made accurate by cooperating with the application
|
||||
+ // via new functions and/or callbacks.
|
||||
+ #ifndef STACK_GROWS_UP
|
||||
+ if (lo >= hi || lo < hi - stack_limit) { // sp outside stack
|
||||
+ lo = hi - stack_limit;
|
||||
+ }
|
||||
+ #else
|
||||
+ #error "STACK_GROWS_UP not supported in boost_coroutine2 (as of june 2021), so we don't support it in Nix."
|
||||
+ #endif
|
||||
}
|
||||
GC_push_all_stack_sections(lo, hi, traced_stack_sect);
|
||||
# ifdef STACK_GROWS_UP
|
|
@ -274,6 +274,12 @@ fi
|
|||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||
|
||||
|
||||
# Look for rapidcheck.
|
||||
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
||||
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
||||
AC_CHECK_LIB([rapidcheck], [])
|
||||
|
||||
|
||||
# Look for nlohmann/json.
|
||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||
|
||||
|
|
|
@ -10,3 +10,12 @@ git-repository-url = "https://github.com/NixOS/nix"
|
|||
[preprocessor.anchors]
|
||||
renderers = ["html"]
|
||||
command = "jq --from-file doc/manual/anchors.jq"
|
||||
|
||||
[output.linkcheck]
|
||||
# no Internet during the build (in the sandbox)
|
||||
follow-web-links = false
|
||||
|
||||
# mdbook-linkcheck does not understand [foo]{#bar} style links, resulting in
|
||||
# excessive "Potential incomplete link" warnings. No other kind of warning was
|
||||
# produced at the time of writing.
|
||||
warning-policy = "ignore"
|
||||
|
|
|
@ -50,11 +50,16 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
|||
|
||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||
@rm -rf $@
|
||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix { toplevel = builtins.readFile $<; }'
|
||||
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||
$(trace-gen) sed -i $@.tmp/*.md -e 's^@docroot@^../..^g'
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
||||
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||
| sed -e 's^@docroot@^..^g'>> $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
$(d)/nix.json: $(bindir)/nix
|
||||
|
@ -67,7 +72,9 @@ $(d)/conf-file.json: $(bindir)/nix
|
|||
|
||||
$(d)/src/language/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/language/builtins-prefix.md $(bindir)/nix
|
||||
@cat doc/manual/src/language/builtins-prefix.md > $@.tmp
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
|
||||
# @docroot@: https://nixos.org/manual/nix/unstable/contributing/hacking.html#docroot-variable
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' \
|
||||
| sed -e 's^@docroot@^..^g' >> $@.tmp
|
||||
@cat doc/manual/src/language/builtins-suffix.md >> $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
|
@ -102,6 +109,12 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
|||
@touch $@
|
||||
|
||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/language/builtins.md
|
||||
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
|
||||
$(trace-gen) \
|
||||
set -euo pipefail; \
|
||||
RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual.tmp 2>&1 \
|
||||
| { grep -Fv "because fragment resolution isn't implemented" || :; }
|
||||
@rm -rf $(DESTDIR)$(docdir)/manual
|
||||
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
|
||||
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
|
||||
|
||||
endif
|
||||
|
|
|
@ -67,6 +67,7 @@
|
|||
- [CLI guideline](contributing/cli-guideline.md)
|
||||
- [Release Notes](release-notes/release-notes.md)
|
||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||
- [Release 2.13 (2023-01-17)](release-notes/rl-2.13.md)
|
||||
- [Release 2.12 (2022-12-06)](release-notes/rl-2.12.md)
|
||||
- [Release 2.11 (2022-08-25)](release-notes/rl-2.11.md)
|
||||
- [Release 2.10 (2022-07-11)](release-notes/rl-2.10.md)
|
||||
|
|
|
@ -68,7 +68,7 @@ It can also execute build plans to produce new data, which are made available to
|
|||
A build plan itself is a series of *build tasks*, together with their build inputs.
|
||||
|
||||
> **Important**
|
||||
> A build task in Nix is called [derivation](../glossary#gloss-derivation).
|
||||
> A build task in Nix is called [derivation](../glossary.md#gloss-derivation).
|
||||
|
||||
Each build task has a special build input executed as *build instructions* in order to perform the build.
|
||||
The result of a build task can be input to another build task.
|
||||
|
|
|
@ -11,7 +11,7 @@ Most Nix commands interpret the following environment variables:
|
|||
expressions using [paths](../language/values.md#type-path)
|
||||
enclosed in angle brackets (i.e., `<path>`),
|
||||
e.g. `/home/eelco/Dev:/etc/nixos`. It can be extended using the
|
||||
[`-I` option](./opt-common#opt-I).
|
||||
[`-I` option](./opt-common.md#opt-I).
|
||||
|
||||
- [`NIX_IGNORE_SYMLINK_STORE`]{#env-NIX_IGNORE_SYMLINK_STORE}\
|
||||
Normally, the Nix store directory (typically `/nix/store`) is not
|
||||
|
|
|
@ -49,7 +49,7 @@ authentication, you can avoid typing the passphrase with `ssh-agent`.
|
|||
- `--include-outputs`\
|
||||
Also copy the outputs of [store derivation]s included in the closure.
|
||||
|
||||
[store derivation]: ../../glossary.md#gloss-store-derivation
|
||||
[store derivation]: ../glossary.md#gloss-store-derivation
|
||||
|
||||
- `--use-substitutes` / `-s`\
|
||||
Attempt to download missing paths on the target machine using Nix’s
|
||||
|
|
|
@ -66,11 +66,11 @@ The operation `--realise` essentially “builds” the specified store
|
|||
paths. Realisation is a somewhat overloaded term:
|
||||
|
||||
- If the store path is a *derivation*, realisation ensures that the
|
||||
output paths of the derivation are [valid](../glossary.md) (i.e.,
|
||||
output paths of the derivation are [valid] (i.e.,
|
||||
the output path and its closure exist in the file system). This
|
||||
can be done in several ways. First, it is possible that the
|
||||
outputs are already valid, in which case we are done
|
||||
immediately. Otherwise, there may be [substitutes](../glossary.md)
|
||||
immediately. Otherwise, there may be [substitutes]
|
||||
that produce the outputs (e.g., by downloading them). Finally, the
|
||||
outputs can be produced by running the build task described
|
||||
by the derivation.
|
||||
|
@ -82,6 +82,9 @@ paths. Realisation is a somewhat overloaded term:
|
|||
produced through substitutes. If there are no (successful)
|
||||
substitutes, realisation fails.
|
||||
|
||||
[valid]: ../glossary.md#gloss-validity
|
||||
[substitutes]: ../glossary.md#gloss-substitute
|
||||
|
||||
The output path of each derivation is printed on standard output. (For
|
||||
non-derivations argument, the argument itself is printed.)
|
||||
|
||||
|
@ -155,6 +158,12 @@ To test whether a previously-built derivation is deterministic:
|
|||
$ nix-build '<nixpkgs>' -A hello --check -K
|
||||
```
|
||||
|
||||
Use [`--read-log`](#operation---read-log) to show the stderr and stdout of a build:
|
||||
|
||||
```console
|
||||
$ nix-store --read-log $(nix-instantiate ./test.nix)
|
||||
```
|
||||
|
||||
# Operation `--serve`
|
||||
|
||||
## Synopsis
|
||||
|
@ -289,8 +298,8 @@ error: cannot delete path `/nix/store/zq0h41l75vlb4z45kzgjjmsjxvcv1qk7-mesa-6.4'
|
|||
|
||||
## Description
|
||||
|
||||
The operation `--query` displays various bits of information about the
|
||||
store paths . The queries are described below. At most one query can be
|
||||
The operation `--query` displays information about [store path]s.
|
||||
The queries are described below. At most one query can be
|
||||
specified. The default query is `--outputs`.
|
||||
|
||||
The paths *paths* may also be symlinks from outside of the Nix store, to
|
||||
|
@ -310,12 +319,12 @@ symlink.
|
|||
## Queries
|
||||
|
||||
- `--outputs`\
|
||||
Prints out the [output paths](../glossary.md) of the store
|
||||
Prints out the [output path]s of the store
|
||||
derivations *paths*. These are the paths that will be produced when
|
||||
the derivation is built.
|
||||
|
||||
- `--requisites`; `-R`\
|
||||
Prints out the [closure](../glossary.md) of the store path *paths*.
|
||||
Prints out the [closure] of the given *paths*.
|
||||
|
||||
This query has one option:
|
||||
|
||||
|
@ -332,10 +341,12 @@ symlink.
|
|||
derivation and specifying the option `--include-outputs`.
|
||||
|
||||
- `--references`\
|
||||
Prints the set of [references](../glossary.md) of the store paths
|
||||
Prints the set of [references]s of the store paths
|
||||
*paths*, that is, their immediate dependencies. (For *all*
|
||||
dependencies, use `--requisites`.)
|
||||
|
||||
[reference]: ../glossary.md#gloss-reference
|
||||
|
||||
- `--referrers`\
|
||||
Prints the set of *referrers* of the store paths *paths*, that is,
|
||||
the store paths currently existing in the Nix store that refer to
|
||||
|
@ -350,11 +361,13 @@ symlink.
|
|||
in the Nix store that are dependent on *paths*.
|
||||
|
||||
- `--deriver`; `-d`\
|
||||
Prints the [deriver](../glossary.md) of the store paths *paths*. If
|
||||
Prints the [deriver] of the store paths *paths*. If
|
||||
the path has no deriver (e.g., if it is a source file), or if the
|
||||
deriver is not known (e.g., in the case of a binary-only
|
||||
deployment), the string `unknown-deriver` is printed.
|
||||
|
||||
[deriver]: ../glossary.md#gloss-deriver
|
||||
|
||||
- `--graph`\
|
||||
Prints the references graph of the store paths *paths* in the format
|
||||
of the `dot` tool of AT\&T's [Graphviz
|
||||
|
|
|
@ -92,7 +92,8 @@ $ nix develop
|
|||
|
||||
The unit-tests for each Nix library (`libexpr`, `libstore`, etc..) are defined
|
||||
under `src/{library_name}/tests` using the
|
||||
[googletest](https://google.github.io/googletest/) framework.
|
||||
[googletest](https://google.github.io/googletest/) and
|
||||
[rapidcheck](https://github.com/emil-e/rapidcheck) frameworks.
|
||||
|
||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option.
|
||||
|
||||
|
@ -249,3 +250,36 @@ search/replaced in it for each new build.
|
|||
The installer now supports a `--tarball-url-prefix` flag which _may_ have
|
||||
solved this need?
|
||||
-->
|
||||
|
||||
### Checking links in the manual
|
||||
|
||||
The build checks for broken internal links.
|
||||
This happens late in the process, so `nix build` is not suitable for iterating.
|
||||
To build the manual incrementally, run:
|
||||
|
||||
```console
|
||||
make html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
||||
|
||||
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
||||
|
||||
```console
|
||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
||||
```
|
||||
|
||||
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||
|
||||
[`mdbook-linkcheck`]: https://github.com/Michael-F-Bryan/mdbook-linkcheck
|
||||
[URI fragments]: https://en.m.wikipedia.org/wiki/URI_fragment
|
||||
|
||||
#### `@docroot@` variable
|
||||
|
||||
`@docroot@` provides a base path for links that occur in reusable snippets or other documentation that doesn't have a base path of its own.
|
||||
|
||||
If a broken link occurs in a snippet that was inserted into multiple generated files in different directories, use `@docroot@` to reference the `doc/manual/src` directory.
|
||||
|
||||
If the `@docroot@` literal appears in an error message from the `mdbook-linkcheck` tool, the `@docroot@` replacement needs to be applied to the generated source file that mentions it.
|
||||
See existing `@docroot@` logic in the [Makefile].
|
||||
Regular markdown files used for the manual have a base path of their own and they can use relative paths instead of `@docroot@`.
|
||||
|
|
|
@ -19,6 +19,17 @@
|
|||
|
||||
[store derivation]: #gloss-store-derivation
|
||||
|
||||
- [realise]{#gloss-realise}, realisation\
|
||||
Ensure a [store path] is [valid][validity].
|
||||
|
||||
This means either running the `builder` executable as specified in the corresponding [derivation] or fetching a pre-built [store object] from a [substituter].
|
||||
|
||||
See [`nix-build`](./command-ref/nix-build.md) and [`nix-store --realise`](./command-ref/nix-store.md#operation---realise).
|
||||
|
||||
See [`nix build`](./command-ref/new-cli/nix3-build.md) (experimental).
|
||||
|
||||
[realise]: #gloss-realise
|
||||
|
||||
- [content-addressed derivation]{#gloss-content-addressed-derivation}\
|
||||
A derivation which has the
|
||||
[`__contentAddressed`](./language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||
|
@ -101,6 +112,8 @@
|
|||
copy store objects it doesn't have. For details, see the
|
||||
[`substituters` option](./command-ref/conf-file.md#conf-substituters).
|
||||
|
||||
[substituter]: #gloss-substituter
|
||||
|
||||
- [purity]{#gloss-purity}\
|
||||
The assumption that equal Nix derivations when run always produce
|
||||
the same output. This cannot be guaranteed in general (e.g., a
|
||||
|
@ -143,19 +156,25 @@
|
|||
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||
references `R` then `R` is also in the closure of `P`.
|
||||
|
||||
[closure]: #gloss-closure
|
||||
|
||||
- [output path]{#gloss-output-path}\
|
||||
A [store path] produced by a [derivation].
|
||||
|
||||
[output path]: #gloss-output-path
|
||||
|
||||
- [deriver]{#gloss-deriver}\
|
||||
The deriver of an *output path* is the store
|
||||
derivation that built it.
|
||||
The [store derivation] that produced an [output path].
|
||||
|
||||
- [validity]{#gloss-validity}\
|
||||
A store path is considered *valid* if it exists in the file system,
|
||||
is listed in the Nix database as being valid, and if all paths in
|
||||
its closure are also valid.
|
||||
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||
|
||||
For a local store, this means:
|
||||
- The store path leads to an existing [store object] in that [store].
|
||||
- The store path is listed in the Nix database as being valid.
|
||||
- All paths in the store path's [closure] are valid.
|
||||
|
||||
[validity]: #gloss-validity
|
||||
|
||||
- [user environment]{#gloss-user-env}\
|
||||
An automatically generated store object that consists of a set of
|
||||
|
|
|
@ -120,10 +120,10 @@ sudo rm -rf /nix /etc/nix /etc/profile/nix.sh ~root/.nix-profile ~root/.nix-defe
|
|||
Remove build users and their group:
|
||||
|
||||
```console
|
||||
for i in $(seq 30001 30032); do
|
||||
sudo userdel $i
|
||||
for i in $(seq 1 32); do
|
||||
sudo userdel nixbld$i
|
||||
done
|
||||
sudo groupdel 30000
|
||||
sudo groupdel nixbld
|
||||
```
|
||||
|
||||
There may also be references to Nix in
|
||||
|
|
|
@ -191,12 +191,12 @@ This is an incomplete overview of language features, by example.
|
|||
<tr>
|
||||
<td>
|
||||
|
||||
<nixpkgs>
|
||||
`<nixpkgs>`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
Search path. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
|
||||
Search path for Nix files. Value determined by [`$NIX_PATH` environment variable](../command-ref/env-common.md#env-NIX_PATH).
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
|
|
@ -1,28 +1,167 @@
|
|||
# Operators
|
||||
|
||||
The table below lists the operators in the Nix language, in
|
||||
order of precedence (from strongest to weakest binding).
|
||||
| Name | Syntax | Associativity | Precedence |
|
||||
|----------------------------------------|--------------------------------------------|---------------|------------|
|
||||
| [Attribute selection] | *attrset* `.` *attrpath* \[ `or` *expr* \] | none | 1 |
|
||||
| Function application | *func* *expr* | left | 2 |
|
||||
| [Arithmetic negation][arithmetic] | `-` *number* | none | 3 |
|
||||
| [Has attribute] | *attrset* `?` *attrpath* | none | 4 |
|
||||
| List concatenation | *list* `++` *list* | right | 5 |
|
||||
| [Multiplication][arithmetic] | *number* `*` *number* | left | 6 |
|
||||
| [Division][arithmetic] | *number* `/` *number* | left | 6 |
|
||||
| [Subtraction][arithmetic] | *number* `-` *number* | left | 7 |
|
||||
| [Addition][arithmetic] | *number* `+` *number* | left | 7 |
|
||||
| [String concatenation] | *string* `+` *string* | left | 7 |
|
||||
| [Path concatenation] | *path* `+` *path* | left | 7 |
|
||||
| [Path and string concatenation] | *path* `+` *string* | left | 7 |
|
||||
| [String and path concatenation] | *string* `+` *path* | left | 7 |
|
||||
| Logical negation (`NOT`) | `!` *bool* | none | 8 |
|
||||
| [Update] | *attrset* `//` *attrset* | right | 9 |
|
||||
| [Less than][Comparison] | *expr* `<` *expr* | none | 10 |
|
||||
| [Less than or equal to][Comparison] | *expr* `<=` *expr* | none | 10 |
|
||||
| [Greater than][Comparison] | *expr* `>` *expr* | none | 10 |
|
||||
| [Greater than or equal to][Comparison] | *expr* `>=` *expr* | none | 10 |
|
||||
| [Equality] | *expr* `==` *expr* | none | 11 |
|
||||
| Inequality | *expr* `!=` *expr* | none | 11 |
|
||||
| Logical conjunction (`AND`) | *bool* `&&` *bool* | left | 12 |
|
||||
| Logical disjunction (`OR`) | *bool* `\|\|` *bool* | left | 13 |
|
||||
| [Logical implication] | *bool* `->` *bool* | none | 14 |
|
||||
|
||||
[string]: ./values.md#type-string
|
||||
[path]: ./values.md#type-path
|
||||
[number]: ./values.md#type-number
|
||||
[list]: ./values.md#list
|
||||
[attribute set]: ./values.md#attribute-set
|
||||
|
||||
## Attribute selection
|
||||
|
||||
Select the attribute denoted by attribute path *attrpath* from [attribute set] *attrset*.
|
||||
If the attribute doesn’t exist, return *value* if provided, otherwise abort evaluation.
|
||||
|
||||
<!-- FIXME: the following should to into its own language syntax section, but that needs more work to fit in well -->
|
||||
|
||||
An attribute path is a dot-separated list of attribute names.
|
||||
An attribute name can be an identifier or a string.
|
||||
|
||||
> *attrpath* = *name* [ `.` *name* ]...
|
||||
> *name* = *identifier* | *string*
|
||||
> *identifier* ~ `[a-zA-Z_][a-zA-Z0-9_'-]*`
|
||||
|
||||
[Attribute selection]: #attribute-selection
|
||||
|
||||
## Has attribute
|
||||
|
||||
> *attrset* `?` *attrpath*
|
||||
|
||||
Test whether [attribute set] *attrset* contains the attribute denoted by *attrpath*.
|
||||
The result is a [Boolean] value.
|
||||
|
||||
[Boolean]: ./values.md#type-boolean
|
||||
|
||||
[Has attribute]: #has-attribute
|
||||
|
||||
## Arithmetic
|
||||
|
||||
Numbers are type-compatible:
|
||||
Pure integer operations will always return integers, whereas any operation involving at least one floating point number return a floating point number.
|
||||
|
||||
See also [Comparison] and [Equality].
|
||||
|
||||
The `+` operator is overloaded to also work on strings and paths.
|
||||
|
||||
[arithmetic]: #arithmetic
|
||||
|
||||
## String concatenation
|
||||
|
||||
> *string* `+` *string*
|
||||
|
||||
Concatenate two [string]s and merge their string contexts.
|
||||
|
||||
[String concatenation]: #string-concatenation
|
||||
|
||||
## Path concatenation
|
||||
|
||||
> *path* `+` *path*
|
||||
|
||||
Concatenate two [path]s.
|
||||
The result is a path.
|
||||
|
||||
[Path concatenation]: #path-concatenation
|
||||
|
||||
## Path and string concatenation
|
||||
|
||||
> *path* + *string*
|
||||
|
||||
Concatenate *[path]* with *[string]*.
|
||||
The result is a path.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> The string must not have a string context that refers to a [store path].
|
||||
|
||||
[Path and string concatenation]: #path-and-string-concatenation
|
||||
|
||||
## String and path concatenation
|
||||
|
||||
> *string* + *path*
|
||||
|
||||
Concatenate *[string]* with *[path]*.
|
||||
The result is a string.
|
||||
|
||||
> **Important**
|
||||
>
|
||||
> The file or directory at *path* must exist and is copied to the [store].
|
||||
> The path appears in the result as the corresponding [store path].
|
||||
|
||||
[store path]: ../glossary.md#gloss-store-path
|
||||
[store]: ../glossary.md#gloss-store
|
||||
|
||||
[Path and string concatenation]: #path-and-string-concatenation
|
||||
|
||||
## Update
|
||||
|
||||
> *attrset1* // *attrset2*
|
||||
|
||||
Update [attribute set] *attrset1* with names and values from *attrset2*.
|
||||
|
||||
The returned attribute set will have of all the attributes in *attrset1* and *attrset2*.
|
||||
If an attribute name is present in both, the attribute value from the latter is taken.
|
||||
|
||||
[Update]: #update
|
||||
|
||||
## Comparison
|
||||
|
||||
Comparison is
|
||||
|
||||
- [arithmetic] for [number]s
|
||||
- lexicographic for [string]s and [path]s
|
||||
- item-wise lexicographic for [list]s:
|
||||
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||
|
||||
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||
|
||||
| comparison | implementation |
|
||||
|--------------|-----------------------|
|
||||
| *a* `<=` *b* | `! (` *b* `<` *a* `)` |
|
||||
| *a* `>` *b* | *b* `<` *a* |
|
||||
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||
|
||||
[Comparison]: #comparison-operators
|
||||
|
||||
## Equality
|
||||
|
||||
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
||||
- Comparison of [function]s always returns `false`.
|
||||
- Numbers are type-compatible, see [arithmetic] operators.
|
||||
- Floating point numbers only differ up to a limited precision.
|
||||
|
||||
[function]: ./constructs.md#functions
|
||||
|
||||
[Equality]: #equality
|
||||
|
||||
## Logical implication
|
||||
|
||||
Equivalent to `!`*b1* `||` *b2*.
|
||||
|
||||
[Logical implication]: #logical-implication
|
||||
|
||||
| Name | Syntax | Associativity | Description | Precedence |
|
||||
| ------------------------ | ----------------------------------- | ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------- |
|
||||
| Select | *e* `.` *attrpath* \[ `or` *def* \] | none | Select attribute denoted by the attribute path *attrpath* from set *e*. (An attribute path is a dot-separated list of attribute names.) If the attribute doesn’t exist, return *def* if provided, otherwise abort evaluation. | 1 |
|
||||
| Application | *e1* *e2* | left | Call function *e1* with argument *e2*. | 2 |
|
||||
| Arithmetic Negation | `-` *e* | none | Arithmetic negation. | 3 |
|
||||
| Has Attribute | *e* `?` *attrpath* | none | Test whether set *e* contains the attribute denoted by *attrpath*; return `true` or `false`. | 4 |
|
||||
| List Concatenation | *e1* `++` *e2* | right | List concatenation. | 5 |
|
||||
| Multiplication | *e1* `*` *e2*, | left | Arithmetic multiplication. | 6 |
|
||||
| Division | *e1* `/` *e2* | left | Arithmetic division. | 6 |
|
||||
| Addition | *e1* `+` *e2* | left | Arithmetic addition. | 7 |
|
||||
| Subtraction | *e1* `-` *e2* | left | Arithmetic subtraction. | 7 |
|
||||
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
|
||||
| Not | `!` *e* | none | Boolean negation. | 8 |
|
||||
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
|
||||
| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
|
||||
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||
| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
|
||||
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
||||
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
||||
| Logical OR | *e1* <code>||</code> *e2* | left | Logical OR. | 13 |
|
||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to <code>!e1 || e2</code>). | 14 |
|
||||
|
|
|
@ -85,9 +85,10 @@
|
|||
Numbers, which can be *integers* (like `123`) or *floating point*
|
||||
(like `123.43` or `.27e13`).
|
||||
|
||||
Numbers are type-compatible: pure integer operations will always
|
||||
return integers, whereas any operation involving at least one
|
||||
floating point number will have a floating point number as a result.
|
||||
See [arithmetic] and [comparison] operators for semantics.
|
||||
|
||||
[arithmetic]: ./operators.md#arithmetic
|
||||
[comparison]: ./operators.md#comparison
|
||||
|
||||
- <a id="type-path" href="#type-path">Path</a>
|
||||
|
||||
|
|
|
@ -32,13 +32,13 @@ which should print something like:
|
|||
Priority: 30
|
||||
|
||||
On the client side, you can tell Nix to use your binary cache using
|
||||
`--option extra-binary-caches`, e.g.:
|
||||
`--substituters`, e.g.:
|
||||
|
||||
```console
|
||||
$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
|
||||
$ nix-env -iA nixpkgs.firefox --substituters http://avalon:8080/
|
||||
```
|
||||
|
||||
The option `extra-binary-caches` tells Nix to use this binary cache in
|
||||
The option `substituters` tells Nix to use this binary cache in
|
||||
addition to your default caches, such as <https://cache.nixos.org>.
|
||||
Thus, for any path in the closure of Firefox, Nix will first check if
|
||||
the path is available on the server `avalon` or another binary caches.
|
||||
|
@ -47,4 +47,4 @@ If not, it will fall back to building from source.
|
|||
You can also tell Nix to always use your binary cache by adding a line
|
||||
to the `nix.conf` configuration file like this:
|
||||
|
||||
binary-caches = http://avalon:8080/ https://cache.nixos.org/
|
||||
substituters = http://avalon:8080/ https://cache.nixos.org/
|
||||
|
|
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
44
doc/manual/src/release-notes/rl-2.13.md
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Release 2.13 (2023-01-17)
|
||||
|
||||
* The `repeat` and `enforce-determinism` options have been removed
|
||||
since they had been broken under many circumstances for a long time.
|
||||
|
||||
* You can now use [flake references] in the [old command line interface], e.g.
|
||||
|
||||
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
|
||||
[old command line interface]: ../command-ref/main-commands.md
|
||||
|
||||
```shell-session
|
||||
# nix-build flake:nixpkgs -A hello
|
||||
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
||||
'<nixpkgs>' -A hello
|
||||
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
||||
```
|
||||
|
||||
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
|
||||
Historical release notes were not changed.
|
||||
|
||||
* Error traces have been reworked to provide detailed explanations and more
|
||||
accurate error locations. A short excerpt of the trace is now shown by
|
||||
default when an error occurs.
|
||||
|
||||
* Allow explicitly selecting outputs in a store derivation installable, just like we can do with other sorts of installables.
|
||||
For example,
|
||||
```shell-session
|
||||
# nix build /nix/store/gzaflydcr6sb3567hap9q6srzx8ggdgg-glibc-2.33-78.drv^dev
|
||||
```
|
||||
now works just as
|
||||
```shell-session
|
||||
# nix build nixpkgs#glibc^dev
|
||||
```
|
||||
does already.
|
||||
|
||||
* On Linux, `nix develop` now sets the
|
||||
[*personality*](https://man7.org/linux/man-pages/man2/personality.2.html)
|
||||
for the development shell in the same way as the actual build of the
|
||||
derivation. This makes shells for `i686-linux` derivations work
|
||||
correctly on `x86_64-linux`.
|
||||
|
||||
* You can now disable the global flake registry by setting the `flake-registry`
|
||||
configuration option to an empty string. The same can be achieved at runtime with
|
||||
`--flake-registry ""`.
|
|
@ -1,26 +1,11 @@
|
|||
# Release X.Y (202?-??-??)
|
||||
* A new function `builtins.readFileType` is available. It is similar to
|
||||
`builtins.readDir` but acts on a single file or directory.
|
||||
|
||||
* The `repeat` and `enforce-determinism` options have been removed
|
||||
since they had been broken under many circumstances for a long time.
|
||||
|
||||
* You can now use [flake references] in the [old command line interface], e.g.
|
||||
|
||||
[flake references]: ../command-ref/new-cli/nix3-flake.md#flake-references
|
||||
[old command line interface]: ../command-ref/main-commands.md
|
||||
|
||||
```
|
||||
# nix-build flake:nixpkgs -A hello
|
||||
# nix-build -I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05 \
|
||||
'<nixpkgs>' -A hello
|
||||
# NIX_PATH=nixpkgs=flake:nixpkgs nix-build '<nixpkgs>' -A hello
|
||||
```
|
||||
|
||||
* Instead of "antiquotation", the more common term [string interpolation](../language/string-interpolation.md) is now used consistently.
|
||||
Historical release notes were not changed.
|
||||
|
||||
* Error traces have been reworked to provide detailed explanations and more
|
||||
accurate error locations. A short excerpt of the trace is now shown by
|
||||
default when an error occurs.
|
||||
* The `builtins.readDir` function has been optimized when encountering not-yet-known
|
||||
file types from POSIX's `readdir`. In such cases the type of each file is/was
|
||||
discovered by making multiple syscalls. This change makes these operations
|
||||
lazy such that these lookups will only be performed if the attribute is used.
|
||||
This optimization affects a minority of filesystems and operating systems.
|
||||
|
||||
* In derivations that use structured attributes, you can now use `unsafeDiscardReferences`
|
||||
to disable scanning a given output for runtime dependencies:
|
||||
|
|
86
flake.nix
86
flake.nix
|
@ -82,7 +82,9 @@
|
|||
});
|
||||
|
||||
configureFlags =
|
||||
lib.optionals stdenv.isLinux [
|
||||
[
|
||||
"CXXFLAGS=-I${lib.getDev rapidcheck}/extras/gtest/include"
|
||||
] ++ lib.optionals stdenv.isLinux [
|
||||
"--with-boost=${boost}/lib"
|
||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||
]
|
||||
|
@ -96,6 +98,7 @@
|
|||
buildPackages.flex
|
||||
(lib.getBin buildPackages.lowdown-nix)
|
||||
buildPackages.mdbook
|
||||
buildPackages.mdbook-linkcheck
|
||||
buildPackages.autoconf-archive
|
||||
buildPackages.autoreconfHook
|
||||
buildPackages.pkg-config
|
||||
|
@ -115,6 +118,7 @@
|
|||
boost
|
||||
lowdown-nix
|
||||
gtest
|
||||
rapidcheck
|
||||
]
|
||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||
|
@ -127,9 +131,14 @@
|
|||
});
|
||||
|
||||
propagatedDeps =
|
||||
[ (boehmgc.override {
|
||||
[ ((boehmgc.override {
|
||||
enableLargeConfig = true;
|
||||
}).overrideAttrs(o: {
|
||||
patches = (o.patches or []) ++ [
|
||||
./boehmgc-coroutine-sp-fallback.diff
|
||||
];
|
||||
})
|
||||
)
|
||||
nlohmann_json
|
||||
];
|
||||
};
|
||||
|
@ -400,6 +409,18 @@
|
|||
};
|
||||
};
|
||||
|
||||
nixos-lib = import (nixpkgs + "/nixos/lib") { };
|
||||
|
||||
# https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests
|
||||
runNixOSTestFor = system: test: nixos-lib.runTest {
|
||||
imports = [ test ];
|
||||
hostPkgs = nixpkgsFor.${system};
|
||||
defaults = {
|
||||
nixpkgs.pkgs = nixpkgsFor.${system};
|
||||
};
|
||||
_module.args.nixpkgs = nixpkgs;
|
||||
};
|
||||
|
||||
in {
|
||||
|
||||
# A Nixpkgs overlay that overrides the 'nix' and
|
||||
|
@ -456,6 +477,10 @@
|
|||
|
||||
src = self;
|
||||
|
||||
configureFlags = [
|
||||
"CXXFLAGS=-I${lib.getDev pkgs.rapidcheck}/extras/gtest/include"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
nativeBuildInputs = nativeBuildDeps;
|
||||
|
@ -474,49 +499,22 @@
|
|||
};
|
||||
|
||||
# System tests.
|
||||
tests.remoteBuilds = import ./tests/remote-builds.nix {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
};
|
||||
tests.remoteBuilds = runNixOSTestFor "x86_64-linux" ./tests/nixos/remote-builds.nix;
|
||||
|
||||
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
};
|
||||
tests.nix-copy-closure = runNixOSTestFor "x86_64-linux" ./tests/nixos/nix-copy-closure.nix;
|
||||
|
||||
tests.nssPreload = (import ./tests/nss-preload.nix rec {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
});
|
||||
tests.nssPreload = runNixOSTestFor "x86_64-linux" ./tests/nixos/nss-preload.nix;
|
||||
|
||||
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
});
|
||||
tests.githubFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/github-flakes.nix;
|
||||
|
||||
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
});
|
||||
tests.sourcehutFlakes = runNixOSTestFor "x86_64-linux" ./tests/nixos/sourcehut-flakes.nix;
|
||||
|
||||
tests.containers = (import ./tests/containers.nix rec {
|
||||
system = "x86_64-linux";
|
||||
inherit nixpkgs;
|
||||
overlay = self.overlays.default;
|
||||
});
|
||||
tests.containers = runNixOSTestFor "x86_64-linux" ./tests/nixos/containers/containers.nix;
|
||||
|
||||
tests.setuid = nixpkgs.lib.genAttrs
|
||||
["i686-linux" "x86_64-linux"]
|
||||
(system:
|
||||
import ./tests/setuid.nix rec {
|
||||
inherit nixpkgs system;
|
||||
overlay = self.overlays.default;
|
||||
});
|
||||
(system: runNixOSTestFor system ./tests/nixos/setuid.nix);
|
||||
|
||||
|
||||
# Make sure that nix-env still produces the exact same result
|
||||
# on a particular version of Nixpkgs.
|
||||
|
@ -531,6 +529,12 @@
|
|||
mkdir $out
|
||||
'';
|
||||
|
||||
tests.nixpkgsLibTests =
|
||||
nixpkgs.lib.genAttrs systems (system:
|
||||
import (nixpkgs + "/lib/tests/release.nix")
|
||||
{ pkgs = nixpkgsFor.${system}; }
|
||||
);
|
||||
|
||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||
pkgs = nixpkgsFor.x86_64-linux;
|
||||
nixpkgs = nixpkgs-regression;
|
||||
|
@ -561,6 +565,7 @@
|
|||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||
installTests = self.hydraJobs.installTests.${system};
|
||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||
});
|
||||
|
@ -642,6 +647,7 @@
|
|||
inherit system crossSystem;
|
||||
overlays = [ self.overlays.default ];
|
||||
};
|
||||
inherit (nixpkgsCross) lib;
|
||||
in with commonDeps { pkgs = nixpkgsCross; }; nixpkgsCross.stdenv.mkDerivation {
|
||||
name = "nix-${version}";
|
||||
|
||||
|
@ -654,7 +660,11 @@
|
|||
nativeBuildInputs = nativeBuildDeps;
|
||||
buildInputs = buildDeps ++ propagatedDeps;
|
||||
|
||||
configureFlags = [ "--sysconfdir=/etc" "--disable-doc-gen" ];
|
||||
configureFlags = [
|
||||
"CXXFLAGS=-I${lib.getDev nixpkgsCross.rapidcheck}/extras/gtest/include"
|
||||
"--sysconfdir=/etc"
|
||||
"--disable-doc-gen"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
|
|
|
@ -36,17 +36,45 @@ Issues on the board progress through the following states:
|
|||
|
||||
- No Status
|
||||
|
||||
Team members can add pull requests or issues to discuss or review together.
|
||||
|
||||
During the discussion meeting, the team triages new items.
|
||||
To be considered, issues and pull requests must have a high-level description to provide the whole team with the necessary context at a glance.
|
||||
|
||||
On every meeting, at least one item from each of the following categories is inspected:
|
||||
|
||||
1. [critical](https://github.com/NixOS/nix/labels/critical)
|
||||
2. [security](https://github.com/NixOS/nix/labels/security)
|
||||
3. [regression](https://github.com/NixOS/nix/labels/regression)
|
||||
4. [bug](https://github.com/NixOS/nix/issues?q=is%3Aopen+label%3Abug+sort%3Areactions-%2B1-desc)
|
||||
|
||||
- [oldest pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular pull requests](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
- [oldest issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Acreated-asc)
|
||||
- [most popular issues](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc)
|
||||
|
||||
Team members can also add pull requests or issues they would like the whole team to consider.
|
||||
|
||||
If there is disagreement on the general idea behind an issue or pull request, it is moved to _To discuss_, otherwise to _In review_.
|
||||
|
||||
- To discuss
|
||||
|
||||
Pull requests and issues that are important and controversial are discussed by the team during discussion meetings.
|
||||
Pull requests and issues that are deemed important and controversial are discussed by the team during discussion meetings.
|
||||
|
||||
This may be where the merit of the change itself or the implementation strategy is contested by a team member.
|
||||
|
||||
As a general guideline, the order of items is determined as follows:
|
||||
|
||||
- Prioritise pull requests over issues
|
||||
|
||||
Contributors who took the time to implement concrete change proposals should not wait indefinitely.
|
||||
|
||||
- Prioritise fixing bugs over documentation, improvements or new features
|
||||
|
||||
The team values stability and accessibility higher than raw functionality.
|
||||
|
||||
- Interleave issues and PRs
|
||||
|
||||
This way issues without attempts at a solution get a chance to get addressed.
|
||||
|
||||
- In review
|
||||
|
||||
Pull requests in this column are reviewed together during work meetings.
|
||||
|
|
|
@ -24,12 +24,17 @@ $1
|
|||
EOF
|
||||
}
|
||||
|
||||
escape_systemd_env() {
|
||||
temp_var="${1//\'/\\\'}"
|
||||
echo "${temp_var//\%/%%}"
|
||||
}
|
||||
|
||||
# Gather all non-empty proxy environment variables into a string
|
||||
create_systemd_proxy_env() {
|
||||
vars="http_proxy https_proxy ftp_proxy no_proxy HTTP_PROXY HTTPS_PROXY FTP_PROXY NO_PROXY"
|
||||
for v in $vars; do
|
||||
if [ "x${!v:-}" != "x" ]; then
|
||||
echo "Environment=${v}=${!v}"
|
||||
echo "Environment=${v}=$(escape_systemd_env ${!v})"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
|
|
@ -1,3 +1,15 @@
|
|||
function add_path --argument-names new_path
|
||||
if type -q fish_add_path
|
||||
# fish 3.2.0 or newer
|
||||
fish_add_path --prepend --global $new_path
|
||||
else
|
||||
# older versions of fish
|
||||
if not contains $new_path $fish_user_paths
|
||||
set --global fish_user_paths $new_path $fish_user_paths
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Only execute this file once per shell.
|
||||
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
||||
exit
|
||||
|
@ -31,5 +43,7 @@ else
|
|||
end
|
||||
end
|
||||
|
||||
fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin"
|
||||
fish_add_path --prepend --global "$HOME/.nix-profile/bin"
|
||||
add_path "@localstatedir@/nix/profiles/default/bin"
|
||||
add_path "$HOME/.nix-profile/bin"
|
||||
|
||||
functions -e add_path
|
||||
|
|
|
@ -1,3 +1,15 @@
|
|||
function add_path --argument-names new_path
|
||||
if type -q fish_add_path
|
||||
# fish 3.2.0 or newer
|
||||
fish_add_path --prepend --global $new_path
|
||||
else
|
||||
# older versions of fish
|
||||
if not contains $new_path $fish_user_paths
|
||||
set --global fish_user_paths $new_path $fish_user_paths
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if test -n "$HOME" && test -n "$USER"
|
||||
|
||||
# Set up the per-user profile.
|
||||
|
@ -32,6 +44,8 @@ if test -n "$HOME" && test -n "$USER"
|
|||
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
||||
end
|
||||
|
||||
fish_add_path --prepend --global "$NIX_LINK/bin"
|
||||
add_path "$NIX_LINK/bin"
|
||||
set --erase NIX_LINK
|
||||
end
|
||||
|
||||
functions -e add_path
|
||||
|
|
|
@ -34,8 +34,8 @@ MixEvalArgs::MixEvalArgs()
|
|||
.shortName = 'I',
|
||||
.description = R"(
|
||||
Add *path* to the Nix search path. The Nix search path is
|
||||
initialized from the colon-separated [`NIX_PATH`](./env-common.md#env-NIX_PATH) environment
|
||||
variable, and is used to look up the location of Nix expressions using [paths](../language/values.md#type-path) enclosed in angle
|
||||
initialized from the colon-separated [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment
|
||||
variable, and is used to look up the location of Nix expressions using [paths](@docroot@/language/values.md#type-path) enclosed in angle
|
||||
brackets (i.e., `<nixpkgs>`).
|
||||
|
||||
For instance, passing
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "globals.hh"
|
||||
#include "installables.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "util.hh"
|
||||
#include "command.hh"
|
||||
#include "attr-path.hh"
|
||||
|
@ -358,7 +359,7 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
|||
}
|
||||
}
|
||||
|
||||
DerivedPath Installable::toDerivedPath()
|
||||
DerivedPathWithInfo Installable::toDerivedPath()
|
||||
{
|
||||
auto buildables = toDerivedPaths();
|
||||
if (buildables.size() != 1)
|
||||
|
@ -401,18 +402,6 @@ struct InstallableStorePath : Installable
|
|||
ref<Store> store;
|
||||
DerivedPath req;
|
||||
|
||||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||
: store(store),
|
||||
req(storePath.isDerivation()
|
||||
? (DerivedPath) DerivedPath::Built {
|
||||
.drvPath = std::move(storePath),
|
||||
.outputs = {},
|
||||
}
|
||||
: (DerivedPath) DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
})
|
||||
{ }
|
||||
|
||||
InstallableStorePath(ref<Store> store, DerivedPath && req)
|
||||
: store(store), req(std::move(req))
|
||||
{ }
|
||||
|
@ -422,21 +411,9 @@ struct InstallableStorePath : Installable
|
|||
return req.to_string(*store);
|
||||
}
|
||||
|
||||
DerivedPaths toDerivedPaths() override
|
||||
DerivedPathsWithInfo toDerivedPaths() override
|
||||
{
|
||||
return { req };
|
||||
}
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) -> StorePathSet {
|
||||
return { bfd.drvPath };
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) -> StorePathSet {
|
||||
return { getDeriver(store, *this, bo.path) };
|
||||
},
|
||||
}, req.raw());
|
||||
return {{.path = req, .info = {} }};
|
||||
}
|
||||
|
||||
std::optional<StorePath> getStorePath() override
|
||||
|
@ -452,52 +429,24 @@ struct InstallableStorePath : Installable
|
|||
}
|
||||
};
|
||||
|
||||
DerivedPaths InstallableValue::toDerivedPaths()
|
||||
{
|
||||
DerivedPaths res;
|
||||
|
||||
std::map<StorePath, std::set<std::string>> drvsToOutputs;
|
||||
RealisedPath::Set drvsToCopy;
|
||||
|
||||
// Group by derivation, helps with .all in particular
|
||||
for (auto & drv : toDerivations()) {
|
||||
for (auto & outputName : drv.outputsToInstall)
|
||||
drvsToOutputs[drv.drvPath].insert(outputName);
|
||||
drvsToCopy.insert(drv.drvPath);
|
||||
}
|
||||
|
||||
for (auto & i : drvsToOutputs)
|
||||
res.push_back(DerivedPath::Built { i.first, i.second });
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
||||
{
|
||||
StorePathSet res;
|
||||
for (auto & drv : toDerivations())
|
||||
res.insert(drv.drvPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
struct InstallableAttrPath : InstallableValue
|
||||
{
|
||||
SourceExprCommand & cmd;
|
||||
RootValue v;
|
||||
std::string attrPath;
|
||||
OutputsSpec outputsSpec;
|
||||
ExtendedOutputsSpec extendedOutputsSpec;
|
||||
|
||||
InstallableAttrPath(
|
||||
ref<EvalState> state,
|
||||
SourceExprCommand & cmd,
|
||||
Value * v,
|
||||
const std::string & attrPath,
|
||||
OutputsSpec outputsSpec)
|
||||
ExtendedOutputsSpec extendedOutputsSpec)
|
||||
: InstallableValue(state)
|
||||
, cmd(cmd)
|
||||
, v(allocRootValue(v))
|
||||
, attrPath(attrPath)
|
||||
, outputsSpec(std::move(outputsSpec))
|
||||
, extendedOutputsSpec(std::move(extendedOutputsSpec))
|
||||
{ }
|
||||
|
||||
std::string what() const override { return attrPath; }
|
||||
|
@ -509,11 +458,8 @@ struct InstallableAttrPath : InstallableValue
|
|||
return {vRes, pos};
|
||||
}
|
||||
|
||||
virtual std::vector<InstallableValue::DerivationInfo> toDerivations() override;
|
||||
};
|
||||
|
||||
std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations()
|
||||
{
|
||||
DerivedPathsWithInfo toDerivedPaths() override
|
||||
{
|
||||
auto v = toValue(*state).first;
|
||||
|
||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||
|
@ -521,28 +467,45 @@ std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations
|
|||
DrvInfos drvInfos;
|
||||
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
||||
|
||||
std::vector<DerivationInfo> res;
|
||||
// Backward compatibility hack: group results by drvPath. This
|
||||
// helps keep .all output together.
|
||||
std::map<StorePath, OutputsSpec> byDrvPath;
|
||||
|
||||
for (auto & drvInfo : drvInfos) {
|
||||
auto drvPath = drvInfo.queryDrvPath();
|
||||
if (!drvPath)
|
||||
throw Error("'%s' is not a derivation", what());
|
||||
|
||||
auto newOutputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
|
||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||
outputsToInstall = *outputNames;
|
||||
else
|
||||
for (auto & output : drvInfo.queryOutputs(false, std::get_if<DefaultOutputs>(&outputsSpec)))
|
||||
for (auto & output : drvInfo.queryOutputs(false, true))
|
||||
outputsToInstall.insert(output.first);
|
||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw());
|
||||
|
||||
res.push_back(DerivationInfo {
|
||||
.drvPath = *drvPath,
|
||||
.outputsToInstall = std::move(outputsToInstall)
|
||||
});
|
||||
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
|
||||
|
||||
if (!didInsert)
|
||||
iter->second = iter->second.union_(newOutputs);
|
||||
}
|
||||
|
||||
DerivedPathsWithInfo res;
|
||||
for (auto & [drvPath, outputs] : byDrvPath)
|
||||
res.push_back({
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = outputs,
|
||||
},
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||
{
|
||||
|
@ -615,7 +578,7 @@ InstallableFlake::InstallableFlake(
|
|||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
std::string_view fragment,
|
||||
OutputsSpec outputsSpec,
|
||||
ExtendedOutputsSpec extendedOutputsSpec,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags)
|
||||
|
@ -623,14 +586,14 @@ InstallableFlake::InstallableFlake(
|
|||
flakeRef(flakeRef),
|
||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||
outputsSpec(std::move(outputsSpec)),
|
||||
extendedOutputsSpec(std::move(extendedOutputsSpec)),
|
||||
lockFlags(lockFlags)
|
||||
{
|
||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
||||
}
|
||||
|
||||
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
|
||||
DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("evaluating derivation '%s'", what()));
|
||||
|
||||
|
@ -638,56 +601,84 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
|||
|
||||
auto attrPath = attr->getAttrPathStr();
|
||||
|
||||
if (!attr->isDerivation())
|
||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
||||
if (!attr->isDerivation()) {
|
||||
|
||||
// FIXME: use eval cache?
|
||||
auto v = attr->forceValue();
|
||||
|
||||
if (v.type() == nPath) {
|
||||
PathSet context;
|
||||
auto storePath = state->copyPathToStore(context, Path(v.path));
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
else if (v.type() == nString) {
|
||||
PathSet context;
|
||||
auto s = state->forceString(v, context, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath));
|
||||
auto storePath = state->store->maybeParseStorePath(s);
|
||||
if (storePath && context.count(std::string(s))) {
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(*storePath),
|
||||
}
|
||||
}};
|
||||
} else
|
||||
throw Error("flake output attribute '%s' evaluates to the string '%s' which is not a store path", attrPath, s);
|
||||
}
|
||||
|
||||
else
|
||||
throw Error("flake output attribute '%s' is not a derivation or path", attrPath);
|
||||
}
|
||||
|
||||
auto drvPath = attr->forceDerivation();
|
||||
|
||||
std::set<std::string> outputsToInstall;
|
||||
std::optional<NixInt> priority;
|
||||
|
||||
if (attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||
priority = aPriority->getInt();
|
||||
}
|
||||
|
||||
return {{
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = std::move(drvPath),
|
||||
.outputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||
if (aOutputSpecified->getBool()) {
|
||||
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
||||
outputsToInstall = { aOutputName->getString() };
|
||||
}
|
||||
}
|
||||
|
||||
else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||
outputsToInstall.insert(s);
|
||||
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||
priority = aPriority->getInt();
|
||||
}
|
||||
|
||||
if (outputsToInstall.empty() || std::get_if<AllOutputs>(&outputsSpec)) {
|
||||
outputsToInstall.clear();
|
||||
if (auto aOutputs = attr->maybeGetAttr(state->sOutputs))
|
||||
for (auto & s : aOutputs->getListOfStrings())
|
||||
outputsToInstall.insert(s);
|
||||
}
|
||||
|
||||
if (outputsToInstall.empty())
|
||||
outputsToInstall.insert("out");
|
||||
|
||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||
outputsToInstall = *outputNames;
|
||||
|
||||
auto drvInfo = DerivationInfo {
|
||||
.drvPath = std::move(drvPath),
|
||||
.outputsToInstall = std::move(outputsToInstall),
|
||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw()),
|
||||
},
|
||||
.info = {
|
||||
.priority = priority,
|
||||
};
|
||||
|
||||
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
||||
}
|
||||
|
||||
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
|
||||
{
|
||||
std::vector<DerivationInfo> res;
|
||||
res.push_back(std::get<2>(toDerivation()));
|
||||
return res;
|
||||
.originalRef = flakeRef,
|
||||
.resolvedRef = getLockedFlake()->flake.lockedRef,
|
||||
.attrPath = attrPath,
|
||||
.extendedOutputsSpec = extendedOutputsSpec,
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
||||
|
@ -802,12 +793,12 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
}
|
||||
|
||||
for (auto & s : ss) {
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec(s);
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
||||
result.push_back(
|
||||
std::make_shared<InstallableAttrPath>(
|
||||
state, *this, vFile,
|
||||
prefix == "." ? "" : prefix,
|
||||
outputsSpec));
|
||||
prefix == "." ? "" : std::string { prefix },
|
||||
extendedOutputsSpec));
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -815,24 +806,46 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
for (auto & s : ss) {
|
||||
std::exception_ptr ex;
|
||||
|
||||
auto found = s.rfind('^');
|
||||
if (found != std::string::npos) {
|
||||
try {
|
||||
result.push_back(std::make_shared<InstallableStorePath>(
|
||||
store,
|
||||
DerivedPath::Built::parse(*store, s.substr(0, found), s.substr(found + 1))));
|
||||
continue;
|
||||
} catch (BadStorePath &) {
|
||||
} catch (...) {
|
||||
if (!ex)
|
||||
ex = std::current_exception();
|
||||
}
|
||||
}
|
||||
auto [prefix_, extendedOutputsSpec_] = ExtendedOutputsSpec::parse(s);
|
||||
// To avoid clang's pedantry
|
||||
auto prefix = std::move(prefix_);
|
||||
auto extendedOutputsSpec = std::move(extendedOutputsSpec_);
|
||||
|
||||
found = s.find('/');
|
||||
auto found = prefix.find('/');
|
||||
if (found != std::string::npos) {
|
||||
try {
|
||||
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
|
||||
auto derivedPath = std::visit(overloaded {
|
||||
// If the user did not use ^, we treat the output more liberally.
|
||||
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||
// First, we accept a symlink chain or an actual store path.
|
||||
auto storePath = store->followLinksToStorePath(prefix);
|
||||
// Second, we see if the store path ends in `.drv` to decide what sort
|
||||
// of derived path they want.
|
||||
//
|
||||
// This handling predates the `^` syntax. The `^*` in
|
||||
// `/nix/store/hash-foo.drv^*` unambiguously means "do the
|
||||
// `DerivedPath::Built` case", so plain `/nix/store/hash-foo.drv` could
|
||||
// also unambiguously mean "do the DerivedPath::Opaque` case".
|
||||
//
|
||||
// Issue #7261 tracks reconsidering this `.drv` dispatching.
|
||||
return storePath.isDerivation()
|
||||
? (DerivedPath) DerivedPath::Built {
|
||||
.drvPath = std::move(storePath),
|
||||
.outputs = OutputsSpec::All {},
|
||||
}
|
||||
: (DerivedPath) DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
};
|
||||
},
|
||||
// If the user did use ^, we just do exactly what is written.
|
||||
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||
return DerivedPath::Built {
|
||||
.drvPath = store->parseStorePath(prefix),
|
||||
.outputs = outputSpec,
|
||||
};
|
||||
},
|
||||
}, extendedOutputsSpec.raw());
|
||||
result.push_back(std::make_shared<InstallableStorePath>(store, std::move(derivedPath)));
|
||||
continue;
|
||||
} catch (BadStorePath &) {
|
||||
} catch (...) {
|
||||
|
@ -842,13 +855,13 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
}
|
||||
|
||||
try {
|
||||
auto [flakeRef, fragment, outputsSpec] = parseFlakeRefWithFragmentAndOutputsSpec(s, absPath("."));
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
|
||||
result.push_back(std::make_shared<InstallableFlake>(
|
||||
this,
|
||||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
fragment,
|
||||
outputsSpec,
|
||||
extendedOutputsSpec,
|
||||
getDefaultFlakeAttrPaths(),
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
lockFlags));
|
||||
|
@ -895,13 +908,19 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
|||
if (mode == Realise::Nothing)
|
||||
settings.readOnlyMode = true;
|
||||
|
||||
struct Aux
|
||||
{
|
||||
ExtraPathInfo info;
|
||||
std::shared_ptr<Installable> installable;
|
||||
};
|
||||
|
||||
std::vector<DerivedPath> pathsToBuild;
|
||||
std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap;
|
||||
std::map<DerivedPath, std::vector<Aux>> backmap;
|
||||
|
||||
for (auto & i : installables) {
|
||||
for (auto b : i->toDerivedPaths()) {
|
||||
pathsToBuild.push_back(b);
|
||||
backmap[b].push_back(i);
|
||||
pathsToBuild.push_back(b.path);
|
||||
backmap[b.path].push_back({.info = b.info, .installable = i});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -914,42 +933,18 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
|||
printMissing(store, pathsToBuild, lvlError);
|
||||
|
||||
for (auto & path : pathsToBuild) {
|
||||
for (auto & installable : backmap[path]) {
|
||||
for (auto & aux : backmap[path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
||||
for (auto & output : bfd.outputs) {
|
||||
auto outputHash = get(outputHashes, output);
|
||||
if (!outputHash)
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
DrvOutput outputId { *outputHash, output };
|
||||
auto realisation = store->queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw Error(
|
||||
"cannot operate on an output of the "
|
||||
"unbuilt derivation '%s'",
|
||||
outputId.to_string());
|
||||
outputs.insert_or_assign(output, realisation->outPath);
|
||||
} else {
|
||||
// If ca-derivations isn't enabled, assume that
|
||||
// the output path is statically known.
|
||||
auto drvOutput = get(drvOutputs, output);
|
||||
assert(drvOutput);
|
||||
assert(drvOutput->second);
|
||||
outputs.insert_or_assign(
|
||||
output, *drvOutput->second);
|
||||
}
|
||||
}
|
||||
res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }}});
|
||||
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||
.info = aux.info}});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }}});
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Opaque { bo.path },
|
||||
.info = aux.info}});
|
||||
},
|
||||
}, path.raw());
|
||||
}
|
||||
|
@ -965,16 +960,22 @@ std::vector<std::pair<std::shared_ptr<Installable>, BuiltPathWithResult>> Instal
|
|||
if (!buildResult.success())
|
||||
buildResult.rethrow();
|
||||
|
||||
for (auto & installable : backmap[buildResult.path]) {
|
||||
for (auto & aux : backmap[buildResult.path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & path : buildResult.builtOutputs)
|
||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||
res.push_back({installable, {.path = BuiltPath::Built { bfd.drvPath, outputs }, .result = buildResult}});
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built { bfd.drvPath, outputs },
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({installable, {.path = BuiltPath::Opaque { bo.path }, .result = buildResult}});
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Opaque { bo.path },
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
},
|
||||
}, buildResult.path.raw());
|
||||
}
|
||||
|
@ -1059,7 +1060,7 @@ StorePathSet Installable::toDerivations(
|
|||
[&](const DerivedPath::Built & bfd) {
|
||||
drvPaths.insert(bfd.drvPath);
|
||||
},
|
||||
}, b.raw());
|
||||
}, b.path.raw());
|
||||
|
||||
return drvPaths;
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "derived-path.hh"
|
||||
#include "eval.hh"
|
||||
#include "store-api.hh"
|
||||
|
@ -20,7 +20,7 @@ namespace eval_cache { class EvalCache; class AttrCursor; }
|
|||
|
||||
struct App
|
||||
{
|
||||
std::vector<StorePathWithOutputs> context;
|
||||
std::vector<DerivedPath> context;
|
||||
Path program;
|
||||
// FIXME: add args, sandbox settings, metadata, ...
|
||||
};
|
||||
|
@ -52,26 +52,42 @@ enum class OperateOn {
|
|||
Derivation
|
||||
};
|
||||
|
||||
struct ExtraPathInfo
|
||||
{
|
||||
std::optional<NixInt> priority;
|
||||
std::optional<FlakeRef> originalRef;
|
||||
std::optional<FlakeRef> resolvedRef;
|
||||
std::optional<std::string> attrPath;
|
||||
// FIXME: merge with DerivedPath's 'outputs' field?
|
||||
std::optional<ExtendedOutputsSpec> extendedOutputsSpec;
|
||||
};
|
||||
|
||||
/* A derived path with any additional info that commands might
|
||||
need from the derivation. */
|
||||
struct DerivedPathWithInfo
|
||||
{
|
||||
DerivedPath path;
|
||||
ExtraPathInfo info;
|
||||
};
|
||||
|
||||
struct BuiltPathWithResult
|
||||
{
|
||||
BuiltPath path;
|
||||
ExtraPathInfo info;
|
||||
std::optional<BuildResult> result;
|
||||
};
|
||||
|
||||
typedef std::vector<DerivedPathWithInfo> DerivedPathsWithInfo;
|
||||
|
||||
struct Installable
|
||||
{
|
||||
virtual ~Installable() { }
|
||||
|
||||
virtual std::string what() const = 0;
|
||||
|
||||
virtual DerivedPaths toDerivedPaths() = 0;
|
||||
virtual DerivedPathsWithInfo toDerivedPaths() = 0;
|
||||
|
||||
virtual StorePathSet toDrvPaths(ref<Store> store)
|
||||
{
|
||||
throw Error("'%s' cannot be converted to a derivation path", what());
|
||||
}
|
||||
|
||||
DerivedPath toDerivedPath();
|
||||
DerivedPathWithInfo toDerivedPath();
|
||||
|
||||
UnresolvedApp toApp(EvalState & state);
|
||||
|
||||
|
@ -146,19 +162,6 @@ struct InstallableValue : Installable
|
|||
ref<EvalState> state;
|
||||
|
||||
InstallableValue(ref<EvalState> state) : state(state) {}
|
||||
|
||||
struct DerivationInfo
|
||||
{
|
||||
StorePath drvPath;
|
||||
std::set<std::string> outputsToInstall;
|
||||
std::optional<NixInt> priority;
|
||||
};
|
||||
|
||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||
|
||||
DerivedPaths toDerivedPaths() override;
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override;
|
||||
};
|
||||
|
||||
struct InstallableFlake : InstallableValue
|
||||
|
@ -166,7 +169,7 @@ struct InstallableFlake : InstallableValue
|
|||
FlakeRef flakeRef;
|
||||
Strings attrPaths;
|
||||
Strings prefixes;
|
||||
OutputsSpec outputsSpec;
|
||||
ExtendedOutputsSpec extendedOutputsSpec;
|
||||
const flake::LockFlags & lockFlags;
|
||||
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
|
||||
|
||||
|
@ -175,7 +178,7 @@ struct InstallableFlake : InstallableValue
|
|||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
std::string_view fragment,
|
||||
OutputsSpec outputsSpec,
|
||||
ExtendedOutputsSpec extendedOutputsSpec,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags);
|
||||
|
@ -186,9 +189,7 @@ struct InstallableFlake : InstallableValue
|
|||
|
||||
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
|
||||
|
||||
std::tuple<std::string, FlakeRef, DerivationInfo> toDerivation();
|
||||
|
||||
std::vector<DerivationInfo> toDerivations() override;
|
||||
DerivedPathsWithInfo toDerivedPaths() override;
|
||||
|
||||
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
||||
|
||||
|
|
|
@ -397,7 +397,7 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
|||
Expr * e = parseString(expr);
|
||||
Value v;
|
||||
e->eval(*state, *env, v);
|
||||
state->forceAttrs(v, noPos, "nevermind, it is ignored anyway");
|
||||
state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
|
||||
|
||||
for (auto & i : *v.attrs) {
|
||||
std::string_view name = state->symbols[i.name];
|
||||
|
@ -641,7 +641,12 @@ bool NixRepl::processLine(std::string line)
|
|||
Path drvPathRaw = state->store->printStorePath(drvPath);
|
||||
|
||||
if (command == ":b" || command == ":bl") {
|
||||
state->store->buildPaths({DerivedPath::Built{drvPath}});
|
||||
state->store->buildPaths({
|
||||
DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
});
|
||||
auto drv = state->store->readDerivation(drvPath);
|
||||
logger->cout("\nThis derivation produced the following outputs:");
|
||||
for (auto & [outputName, outputPath] : state->store->queryDerivationOutputMap(drvPath)) {
|
||||
|
|
|
@ -300,7 +300,7 @@ struct AttrDb
|
|||
NixStringContext context;
|
||||
if (!queryAttribute.isNull(3))
|
||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||
context.push_back(decodeContext(cfg, s));
|
||||
context.push_back(NixStringContextElem::parse(cfg, s));
|
||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||
}
|
||||
case AttrType::Bool:
|
||||
|
@ -592,7 +592,18 @@ string_t AttrCursor::getStringWithContext()
|
|||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||
bool valid = true;
|
||||
for (auto & c : s->second) {
|
||||
if (!root->state.store->isValidPath(c.first)) {
|
||||
const StorePath & path = std::visit(overloaded {
|
||||
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & {
|
||||
return d.drvPath;
|
||||
},
|
||||
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||
return b.drvPath;
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
|
||||
return o.path;
|
||||
},
|
||||
}, c.raw());
|
||||
if (!root->state.store->isValidPath(path)) {
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,9 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
#include <iostream>
|
||||
#include <cstring>
|
||||
#include <optional>
|
||||
#include <unistd.h>
|
||||
#include <sys/time.h>
|
||||
#include <sys/resource.h>
|
||||
|
@ -517,6 +519,7 @@ EvalState::EvalState(
|
|||
static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes");
|
||||
|
||||
/* Initialise the Nix expression search path. */
|
||||
evalSettings.nixPath.setDefault(evalSettings.getDefaultNixPath());
|
||||
if (!evalSettings.pureEval) {
|
||||
for (auto & i : _searchPath) addToSearchPath(i);
|
||||
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
|
||||
|
@ -1927,7 +1930,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
/* skip canonization of first path, which would only be not
|
||||
canonized in the first place if it's coming from a ./${foo} type
|
||||
path */
|
||||
auto part = state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first, "while evaluating a path segment");
|
||||
auto part = state.coerceToString(i_pos, vTmp, context,
|
||||
"while evaluating a path segment",
|
||||
false, firstType == nString, !first);
|
||||
sSize += part->size();
|
||||
s.emplace_back(std::move(part));
|
||||
}
|
||||
|
@ -2068,27 +2073,6 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
|||
}
|
||||
|
||||
|
||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||
name>. */
|
||||
NixStringContextElem decodeContext(const Store & store, std::string_view s)
|
||||
{
|
||||
if (s.at(0) == '!') {
|
||||
size_t index = s.find("!", 1);
|
||||
return {
|
||||
store.parseStorePath(s.substr(index + 1)),
|
||||
std::string(s.substr(1, index - 1)),
|
||||
};
|
||||
} else
|
||||
return {
|
||||
store.parseStorePath(
|
||||
s.at(0) == '/'
|
||||
? s
|
||||
: s.substr(1)),
|
||||
"",
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
void copyContext(const Value & v, PathSet & context)
|
||||
{
|
||||
if (v.string.context)
|
||||
|
@ -2103,7 +2087,7 @@ NixStringContext Value::getContext(const Store & store)
|
|||
assert(internalType == tString);
|
||||
if (string.context)
|
||||
for (const char * * p = string.context; *p; ++p)
|
||||
res.push_back(decodeContext(store, *p));
|
||||
res.push_back(NixStringContextElem::parse(store, *p));
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -2144,15 +2128,16 @@ std::optional<std::string> EvalState::tryAttrsToString(const PosIdx pos, Value &
|
|||
if (i != v.attrs->end()) {
|
||||
Value v1;
|
||||
callFunction(*i->value, v, v1, pos);
|
||||
return coerceToString(pos, v1, context, coerceMore, copyToStore,
|
||||
"while evaluating the result of the `toString` attribute").toOwned();
|
||||
return coerceToString(pos, v1, context,
|
||||
"while evaluating the result of the `__toString` attribute",
|
||||
coerceMore, copyToStore).toOwned();
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||
bool coerceMore, bool copyToStore, bool canonicalizePath, std::string_view errorCtx)
|
||||
BackedStringView EvalState::coerceToString(const PosIdx pos, Value &v, PathSet &context,
|
||||
std::string_view errorCtx, bool coerceMore, bool copyToStore, bool canonicalizePath)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
|
||||
|
@ -2166,7 +2151,7 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
if (canonicalizePath)
|
||||
path = canonPath(*path);
|
||||
if (copyToStore)
|
||||
path = copyPathToStore(context, std::move(path).toOwned());
|
||||
path = store->printStorePath(copyPathToStore(context, std::move(path).toOwned()));
|
||||
return path;
|
||||
}
|
||||
|
||||
|
@ -2175,13 +2160,23 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
if (maybeString)
|
||||
return std::move(*maybeString);
|
||||
auto i = v.attrs->find(sOutPath);
|
||||
if (i == v.attrs->end())
|
||||
error("cannot coerce a set to a string", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
return coerceToString(pos, *i->value, context, coerceMore, copyToStore, canonicalizePath, errorCtx);
|
||||
if (i == v.attrs->end()) {
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
}
|
||||
return coerceToString(pos, *i->value, context, errorCtx,
|
||||
coerceMore, copyToStore, canonicalizePath);
|
||||
}
|
||||
|
||||
if (v.type() == nExternal)
|
||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore, errorCtx);
|
||||
if (v.type() == nExternal) {
|
||||
try {
|
||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(nullptr, errorCtx);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
if (coerceMore) {
|
||||
/* Note that `false' is represented as an empty string for
|
||||
|
@ -2196,8 +2191,9 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
std::string result;
|
||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||
try {
|
||||
result += *coerceToString(noPos, *v2, context, coerceMore, copyToStore, canonicalizePath,
|
||||
"while evaluating one element of the list");
|
||||
result += *coerceToString(noPos, *v2, context,
|
||||
"while evaluating one element of the list",
|
||||
coerceMore, copyToStore, canonicalizePath);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -2211,37 +2207,39 @@ BackedStringView EvalState::coerceToString(const PosIdx pos, Value & v, PathSet
|
|||
}
|
||||
}
|
||||
|
||||
error("cannot coerce %1% to a string", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
}
|
||||
|
||||
|
||||
std::string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||
StorePath EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||
{
|
||||
if (nix::isDerivation(path))
|
||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
||||
|
||||
Path dstPath;
|
||||
auto dstPath = [&]() -> StorePath
|
||||
{
|
||||
auto i = srcToStore.find(path);
|
||||
if (i != srcToStore.end())
|
||||
dstPath = store->printStorePath(i->second);
|
||||
else {
|
||||
auto p = settings.readOnlyMode
|
||||
if (i != srcToStore.end()) return i->second;
|
||||
|
||||
auto dstPath = settings.readOnlyMode
|
||||
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
|
||||
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
|
||||
dstPath = store->printStorePath(p);
|
||||
allowPath(p);
|
||||
srcToStore.insert_or_assign(path, std::move(p));
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);
|
||||
}
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}();
|
||||
|
||||
context.insert(dstPath);
|
||||
context.insert(store->printStorePath(dstPath));
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||
{
|
||||
auto path = coerceToString(pos, v, context, false, false, true, errorCtx).toOwned();
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
return path;
|
||||
|
@ -2250,7 +2248,7 @@ Path EvalState::coerceToPath(const PosIdx pos, Value & v, PathSet & context, std
|
|||
|
||||
StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, PathSet & context, std::string_view errorCtx)
|
||||
{
|
||||
auto path = coerceToString(pos, v, context, false, false, true, errorCtx).toOwned();
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (auto storePath = store->maybeParseStorePath(path))
|
||||
return *storePath;
|
||||
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
|
@ -2454,13 +2452,11 @@ void EvalState::printStats()
|
|||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore, std::string_view errorCtx) const
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
auto e = TypeError({
|
||||
throw TypeError({
|
||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
||||
});
|
||||
e.addTrace(pos, errorCtx);
|
||||
throw e;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2477,30 +2473,35 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) {
|
|||
|
||||
EvalSettings::EvalSettings()
|
||||
{
|
||||
auto var = getEnv("NIX_PATH");
|
||||
if (var) nixPath = parseNixPath(*var);
|
||||
}
|
||||
|
||||
/* impure => NIX_PATH or a default path
|
||||
* restrict-eval => NIX_PATH
|
||||
* pure-eval => empty
|
||||
*/
|
||||
Strings EvalSettings::getDefaultNixPath()
|
||||
{
|
||||
Strings res;
|
||||
auto add = [&](const Path & p, const std::string & s = std::string()) {
|
||||
if (pathExists(p)) {
|
||||
if (s.empty()) {
|
||||
res.push_back(p);
|
||||
if (pureEval)
|
||||
return {};
|
||||
|
||||
auto var = getEnv("NIX_PATH");
|
||||
if (var) {
|
||||
return parseNixPath(*var);
|
||||
} else if (restrictEval) {
|
||||
return {};
|
||||
} else {
|
||||
res.push_back(s + "=" + p);
|
||||
}
|
||||
}
|
||||
Strings res;
|
||||
auto add = [&](const Path & p, const std::optional<std::string> & s = std::nullopt) {
|
||||
if (pathExists(p))
|
||||
res.push_back(s ? *s + "=" + p : p);
|
||||
};
|
||||
|
||||
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
|
||||
add(getHome() + "/.nix-defexpr/channels");
|
||||
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
|
||||
add(settings.nixStateDir + "/profiles/per-user/root/channels");
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
bool EvalSettings::isPseudoUrl(std::string_view s)
|
||||
|
|
|
@ -203,6 +203,9 @@ public:
|
|||
throw std::move(error);
|
||||
}
|
||||
|
||||
// This is dangerous, but gets in line with the idea that error creation and
|
||||
// throwing should not allocate on the stack of hot functions.
|
||||
// as long as errors are immediately thrown, it works.
|
||||
ErrorBuilder * errorBuilder;
|
||||
|
||||
template<typename... Args>
|
||||
|
@ -375,11 +378,11 @@ public:
|
|||
booleans and lists to a string. If `copyToStore' is set,
|
||||
referenced paths are copied to the Nix store as a side effect. */
|
||||
BackedStringView coerceToString(const PosIdx pos, Value & v, PathSet & context,
|
||||
std::string_view errorCtx,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool canonicalizePath = true,
|
||||
std::string_view errorCtx = "");
|
||||
bool canonicalizePath = true);
|
||||
|
||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
||||
StorePath copyPathToStore(PathSet & context, const Path & path);
|
||||
|
||||
/* Path coercion. Converts strings, paths and derivations to a
|
||||
path. The result is guaranteed to be a canonicalised, absolute
|
||||
|
@ -551,10 +554,6 @@ struct DebugTraceStacker {
|
|||
std::string_view showType(ValueType type);
|
||||
std::string showType(const Value & v);
|
||||
|
||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||
name>. */
|
||||
NixStringContextElem decodeContext(const Store & store, std::string_view s);
|
||||
|
||||
/* If `path' refers to a directory, then append "/default.nix". */
|
||||
Path resolveExprPath(Path path);
|
||||
|
||||
|
@ -571,7 +570,7 @@ struct EvalSettings : Config
|
|||
{
|
||||
EvalSettings();
|
||||
|
||||
static Strings getDefaultNixPath();
|
||||
Strings getDefaultNixPath();
|
||||
|
||||
static bool isPseudoUrl(std::string_view s);
|
||||
|
||||
|
@ -581,8 +580,15 @@ struct EvalSettings : Config
|
|||
"Whether builtin functions that allow executing native code should be enabled."};
|
||||
|
||||
Setting<Strings> nixPath{
|
||||
this, getDefaultNixPath(), "nix-path",
|
||||
"List of directories to be searched for `<...>` file references."};
|
||||
this, {}, "nix-path",
|
||||
R"(
|
||||
List of directories to be searched for `<...>` file references.
|
||||
|
||||
If [pure evaluation](#conf-pure-eval) is disabled,
|
||||
this is initialised using the [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH)
|
||||
environment variable, or, if it is unset and [restricted evaluation](#conf-restrict-eval)
|
||||
is disabled, a default search path including the user's and `root`'s channels.
|
||||
)"};
|
||||
|
||||
Setting<bool> restrictEval{
|
||||
this, false, "restrict-eval",
|
||||
|
|
|
@ -264,7 +264,7 @@ static Flake getFlake(
|
|||
PathSet emptyContext = {};
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.coerceToString(setting.pos, *setting.value, emptyContext, false, true, true, "") .toOwned());
|
||||
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
|
||||
}
|
||||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.emplace(
|
||||
|
|
|
@ -238,15 +238,15 @@ std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
|||
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||
}
|
||||
|
||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
||||
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec(url);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(prefix, baseDir, allowMissing, isFlake);
|
||||
return {std::move(flakeRef), fragment, outputsSpec};
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
|
||||
return {std::move(flakeRef), fragment, extendedOutputsSpec};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include "types.hh"
|
||||
#include "hash.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "outputs-spec.hh"
|
||||
|
||||
#include <variant>
|
||||
|
||||
|
@ -80,7 +80,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||
|
||||
std::tuple<FlakeRef, std::string, OutputsSpec> parseFlakeRefWithFragmentAndOutputsSpec(
|
||||
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {},
|
||||
bool allowMissing = false,
|
||||
|
|
|
@ -6,6 +6,7 @@ libexpr_DIR := $(d)
|
|||
|
||||
libexpr_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/primops/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc) \
|
||||
$(d)/lexer-tab.cc \
|
||||
|
@ -37,6 +38,8 @@ clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexe
|
|||
|
||||
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||
|
||||
|
|
|
@ -43,16 +43,32 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
|||
std::vector<DerivedPath::Built> drvs;
|
||||
StringMap res;
|
||||
|
||||
for (auto & i : context) {
|
||||
auto [ctx, outputName] = decodeContext(*store, i);
|
||||
auto ctxS = store->printStorePath(ctx);
|
||||
if (!store->isValidPath(ctx))
|
||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(ctx)));
|
||||
if (!outputName.empty() && ctx.isDerivation()) {
|
||||
drvs.push_back({ctx, {outputName}});
|
||||
} else {
|
||||
for (auto & c_ : context) {
|
||||
auto ensureValid = [&](const StorePath & p) {
|
||||
if (!store->isValidPath(p))
|
||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
||||
};
|
||||
auto c = NixStringContextElem::parse(*store, c_);
|
||||
std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
drvs.push_back(DerivedPath::Built {
|
||||
.drvPath = b.drvPath,
|
||||
.outputs = OutputsSpec::Names { b.output },
|
||||
});
|
||||
ensureValid(b.drvPath);
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
auto ctxS = store->printStorePath(o.path);
|
||||
res.insert_or_assign(ctxS, ctxS);
|
||||
}
|
||||
ensureValid(o.path);
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
/* Treat same as Opaque */
|
||||
auto ctxS = store->printStorePath(d.drvPath);
|
||||
res.insert_or_assign(ctxS, ctxS);
|
||||
ensureValid(d.drvPath);
|
||||
},
|
||||
}, c.raw());
|
||||
}
|
||||
|
||||
if (drvs.empty()) return {};
|
||||
|
@ -68,16 +84,12 @@ StringMap EvalState::realiseContext(const PathSet & context)
|
|||
store->buildPaths(buildReqs);
|
||||
|
||||
/* Get all the output paths corresponding to the placeholders we had */
|
||||
for (auto & [drvPath, outputs] : drvs) {
|
||||
const auto outputPaths = store->queryDerivationOutputMap(drvPath);
|
||||
for (auto & outputName : outputs) {
|
||||
auto outputPath = get(outputPaths, outputName);
|
||||
if (!outputPath)
|
||||
debugThrowLastTrace(Error("derivation '%s' does not have an output named '%s'",
|
||||
store->printStorePath(drvPath), outputName));
|
||||
for (auto & drv : drvs) {
|
||||
auto outputs = resolveDerivedPath(*store, drv);
|
||||
for (auto & [outputName, outputPath] : outputs) {
|
||||
res.insert_or_assign(
|
||||
downstreamPlaceholder(*store, drvPath, outputName),
|
||||
store->printStorePath(*outputPath)
|
||||
downstreamPlaceholder(*store, drv.drvPath, outputName),
|
||||
store->printStorePath(outputPath)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -240,6 +252,7 @@ static RegisterPrimOp primop_scopedImport(RegisterPrimOp::Info {
|
|||
static RegisterPrimOp primop_import({
|
||||
.name = "import",
|
||||
.args = {"path"},
|
||||
// TODO turn "normal path values" into link below
|
||||
.doc = R"(
|
||||
Load, parse and return the Nix expression in the file *path*. If
|
||||
*path* is a directory, the file ` default.nix ` in that directory
|
||||
|
@ -253,7 +266,7 @@ static RegisterPrimOp primop_import({
|
|||
>
|
||||
> Unlike some languages, `import` is a regular function in Nix.
|
||||
> Paths using the angle bracket syntax (e.g., `import` *\<foo\>*)
|
||||
> are [normal path values](language-values.md).
|
||||
> are normal [path values](@docroot@/language/values.md#type-path).
|
||||
|
||||
A Nix expression loaded by `import` must not contain any *free
|
||||
variables* (identifiers that are not defined in the Nix expression
|
||||
|
@ -337,26 +350,22 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
auto elems = args[0]->listElems();
|
||||
auto count = args[0]->listSize();
|
||||
if (count == 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("at least one argument to 'exec' required"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
||||
PathSet context;
|
||||
auto program = state.coerceToString(pos, *elems[0], context, false, false,
|
||||
"while evaluating the first element of the argument passed to builtins.exec").toOwned();
|
||||
auto program = state.coerceToString(pos, *elems[0], context,
|
||||
"while evaluating the first element of the argument passed to builtins.exec",
|
||||
false, false).toOwned();
|
||||
Strings commandArgs;
|
||||
for (unsigned int i = 1; i < args[0]->listSize(); ++i) {
|
||||
commandArgs.push_back(state.coerceToString(pos, *elems[i], context, false, false,
|
||||
"while evaluating an element of the argument passed to builtins.exec").toOwned());
|
||||
commandArgs.push_back(
|
||||
state.coerceToString(pos, *elems[i], context,
|
||||
"while evaluating an element of the argument passed to builtins.exec",
|
||||
false, false).toOwned());
|
||||
}
|
||||
try {
|
||||
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
||||
} catch (InvalidPathError & e) {
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
|
||||
program, e.path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow<EvalError>();
|
||||
}
|
||||
|
||||
auto output = runProgram(program, true, commandArgs);
|
||||
|
@ -585,6 +594,7 @@ struct CompareValues
|
|||
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||
}
|
||||
} catch (Error & e) {
|
||||
if (!errorCtx.empty())
|
||||
e.addTrace(nullptr, errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
@ -607,15 +617,7 @@ static Bindings::iterator getAttr(
|
|||
{
|
||||
Bindings::iterator value = attrSet->find(attrSym);
|
||||
if (value == attrSet->end()) {
|
||||
throw TypeError({
|
||||
.msg = hintfmt("attribute '%s' missing %s", state.symbols[attrSym], normaltxt(errorCtx)),
|
||||
.errPos = state.positions[attrSet->pos],
|
||||
});
|
||||
// TODO XXX
|
||||
// Adding another trace for the function name to make it clear
|
||||
// which call received wrong arguments.
|
||||
//e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", funcName));
|
||||
//state.debugThrowLastTrace(e);
|
||||
state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow<TypeError>();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
@ -788,8 +790,10 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
|||
v = *args[1];
|
||||
} catch (Error & e) {
|
||||
PathSet context;
|
||||
e.addTrace(nullptr, state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext").toOwned());
|
||||
auto message = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext",
|
||||
false, false).toOwned();
|
||||
e.addTrace(nullptr, message, true);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -993,6 +997,7 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
* Derivations
|
||||
*************************************************************/
|
||||
|
||||
static void derivationStrictInternal(EvalState & state, const std::string & name, Bindings * attrs, Value & v);
|
||||
|
||||
/* Construct (as a unobservable side effect) a Nix derivation
|
||||
expression that performs the derivation described by the argument
|
||||
|
@ -1003,32 +1008,68 @@ static void prim_second(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
derivation. */
|
||||
static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
using nlohmann::json;
|
||||
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.derivationStrict");
|
||||
|
||||
Bindings * attrs = args[0]->attrs;
|
||||
|
||||
/* Figure out the name first (for stack backtraces). */
|
||||
Bindings::iterator attr = getAttr(state, state.sName, args[0]->attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
||||
Bindings::iterator nameAttr = getAttr(state, state.sName, attrs, "in the attrset passed as argument to builtins.derivationStrict");
|
||||
|
||||
std::string drvName;
|
||||
const auto posDrvName = attr->pos;
|
||||
try {
|
||||
drvName = state.forceStringNoCtx(*attr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict");
|
||||
drvName = state.forceStringNoCtx(*nameAttr->value, pos, "while evaluating the `name` attribute passed to builtins.derivationStrict");
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[posDrvName], "while evaluating the derivation attribute 'name'");
|
||||
e.addTrace(state.positions[nameAttr->pos], "while evaluating the derivation attribute 'name'");
|
||||
throw;
|
||||
}
|
||||
|
||||
try {
|
||||
derivationStrictInternal(state, drvName, attrs, v);
|
||||
} catch (Error & e) {
|
||||
Pos pos = state.positions[nameAttr->pos];
|
||||
/*
|
||||
* Here we make two abuses of the error system
|
||||
*
|
||||
* 1. We print the location as a string to avoid a code snippet being
|
||||
* printed. While the location of the name attribute is a good hint, the
|
||||
* exact code there is irrelevant.
|
||||
*
|
||||
* 2. We mark this trace as a frame trace, meaning that we stop printing
|
||||
* less important traces from now on. In particular, this prevents the
|
||||
* display of the automatic "while calling builtins.derivationStrict"
|
||||
* trace, which is of little use for the public we target here.
|
||||
*
|
||||
* Please keep in mind that error reporting is done on a best-effort
|
||||
* basis in nix. There is no accurate location for a derivation, as it
|
||||
* often results from the composition of several functions
|
||||
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
||||
*/
|
||||
e.addTrace(nullptr, hintfmt(
|
||||
"while evaluating derivation '%s'\n"
|
||||
" whose name attribute is located at %s",
|
||||
drvName, pos), true);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
static void derivationStrictInternal(EvalState & state, const std::string &
|
||||
drvName, Bindings * attrs, Value & v)
|
||||
{
|
||||
/* Check whether attributes should be passed as a JSON file. */
|
||||
using nlohmann::json;
|
||||
std::optional<json> jsonObject;
|
||||
attr = args[0]->attrs->find(state.sStructuredAttrs);
|
||||
if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict"))
|
||||
auto attr = attrs->find(state.sStructuredAttrs);
|
||||
if (attr != attrs->end() &&
|
||||
state.forceBool(*attr->value, noPos,
|
||||
"while evaluating the `__structuredAttrs` "
|
||||
"attribute passed to builtins.derivationStrict"))
|
||||
jsonObject = json::object();
|
||||
|
||||
/* Check whether null attributes should be ignored. */
|
||||
bool ignoreNulls = false;
|
||||
attr = args[0]->attrs->find(state.sIgnoreNulls);
|
||||
if (attr != args[0]->attrs->end())
|
||||
ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict");
|
||||
attr = attrs->find(state.sIgnoreNulls);
|
||||
if (attr != attrs->end())
|
||||
ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||
|
||||
/* Build the derivation expression by processing the attributes. */
|
||||
Derivation drv;
|
||||
|
@ -1045,7 +1086,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
StringSet outputs;
|
||||
outputs.insert("out");
|
||||
|
||||
for (auto & i : args[0]->attrs->lexicographicOrder(state.symbols)) {
|
||||
for (auto & i : attrs->lexicographicOrder(state.symbols)) {
|
||||
if (i->name == state.sIgnoreNulls) continue;
|
||||
const std::string & key = state.symbols[i->name];
|
||||
vomit("processing attribute '%1%'", key);
|
||||
|
@ -1056,7 +1097,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
};
|
||||
|
||||
|
@ -1066,7 +1107,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
if (outputs.find(j) != outputs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
/* !!! Check whether j is a valid attribute
|
||||
name. */
|
||||
|
@ -1076,34 +1117,35 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
if (j == "drv")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
outputs.insert(j);
|
||||
}
|
||||
if (outputs.empty())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
};
|
||||
|
||||
try {
|
||||
// This try-catch block adds context for most errors.
|
||||
// Use this empty error context to signify that we defer to it.
|
||||
const std::string_view context_below("");
|
||||
|
||||
if (ignoreNulls) {
|
||||
state.forceValue(*i->value, pos);
|
||||
state.forceValue(*i->value, noPos);
|
||||
if (i->value->type() == nNull) continue;
|
||||
}
|
||||
|
||||
if (i->name == state.sContentAddressed) {
|
||||
contentAddressed = state.forceBool(*i->value, pos,
|
||||
"while evaluating the `__contentAddressed` attribute passed to builtins.derivationStrict");
|
||||
contentAddressed = state.forceBool(*i->value, noPos, context_below);
|
||||
if (contentAddressed)
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
}
|
||||
|
||||
else if (i->name == state.sImpure) {
|
||||
isImpure = state.forceBool(*i->value, pos,
|
||||
"while evaluating the 'impure' attribute passed to builtins.derivationStrict");
|
||||
isImpure = state.forceBool(*i->value, noPos, context_below);
|
||||
if (isImpure)
|
||||
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
||||
}
|
||||
|
@ -1111,11 +1153,11 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
else if (i->name == state.sArgs) {
|
||||
state.forceList(*i->value, pos,
|
||||
"while evaluating the `args` attribute passed to builtins.derivationStrict");
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
for (auto elem : i->value->listItems()) {
|
||||
auto s = state.coerceToString(posDrvName, *elem, context, true,
|
||||
"while evaluating an element of the `args` argument passed to builtins.derivationStrict").toOwned();
|
||||
auto s = state.coerceToString(noPos, *elem, context,
|
||||
"while evaluating an element of the argument list",
|
||||
true).toOwned();
|
||||
drv.args.push_back(s);
|
||||
}
|
||||
}
|
||||
|
@ -1128,29 +1170,29 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
|
||||
if (i->name == state.sStructuredAttrs) continue;
|
||||
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context);
|
||||
|
||||
if (i->name == state.sBuilder)
|
||||
drv.builder = state.forceString(*i->value, context, posDrvName, "while evaluating the `builder` attribute passed to builtins.derivationStrict");
|
||||
drv.builder = state.forceString(*i->value, context, noPos, context_below);
|
||||
else if (i->name == state.sSystem)
|
||||
drv.platform = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `system` attribute passed to builtins.derivationStrict");
|
||||
drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
else if (i->name == state.sOutputHash)
|
||||
outputHash = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHash` attribute passed to builtins.derivationStrict");
|
||||
outputHash = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
else if (i->name == state.sOutputHashAlgo)
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHashAlgo` attribute passed to builtins.derivationStrict");
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
else if (i->name == state.sOutputHashMode)
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, posDrvName, "while evaluating the `outputHashMode` attribute passed to builtins.derivationStrict"));
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below));
|
||||
else if (i->name == state.sOutputs) {
|
||||
/* Require ‘outputs’ to be a list of strings. */
|
||||
state.forceList(*i->value, posDrvName, "while evaluating the `outputs` attribute passed to builtins.derivationStrict");
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
Strings ss;
|
||||
for (auto elem : i->value->listItems())
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, posDrvName, "while evaluating an element of the `outputs` attribute passed to builtins.derivationStrict"));
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below));
|
||||
handleOutputs(ss);
|
||||
}
|
||||
|
||||
} else {
|
||||
auto s = state.coerceToString(i->pos, *i->value, context, true, "while evaluating an attribute passed to builtins.derivationStrict").toOwned();
|
||||
auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned();
|
||||
drv.env.emplace(key, s);
|
||||
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
||||
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
||||
|
@ -1164,8 +1206,8 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
}
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace(nullptr,
|
||||
hintfmt("while evaluating the attribute '%1%' of the derivation '%2%'", key, drvName),
|
||||
e.addTrace(state.positions[i->pos],
|
||||
hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||
true);
|
||||
throw;
|
||||
}
|
||||
|
@ -1179,55 +1221,51 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
/* Everything in the context of the strings in the derivation
|
||||
attributes should be added as dependencies of the resulting
|
||||
derivation. */
|
||||
for (auto & path : context) {
|
||||
|
||||
/* Paths marked with `=' denote that the path of a derivation
|
||||
is explicitly passed to the builder. Since that allows the
|
||||
builder to gain access to every path in the dependency
|
||||
graph of the derivation (including all outputs), all paths
|
||||
in the graph must be added to this derivation's list of
|
||||
inputs to ensure that they are available when the builder
|
||||
runs. */
|
||||
if (path.at(0) == '=') {
|
||||
for (auto & c_ : context) {
|
||||
auto c = NixStringContextElem::parse(*state.store, c_);
|
||||
std::visit(overloaded {
|
||||
/* Since this allows the builder to gain access to every
|
||||
path in the dependency graph of the derivation (including
|
||||
all outputs), all paths in the graph must be added to
|
||||
this derivation's list of inputs to ensure that they are
|
||||
available when the builder runs. */
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
/* !!! This doesn't work if readOnlyMode is set. */
|
||||
StorePathSet refs;
|
||||
state.store->computeFSClosure(state.store->parseStorePath(std::string_view(path).substr(1)), refs);
|
||||
state.store->computeFSClosure(d.drvPath, refs);
|
||||
for (auto & j : refs) {
|
||||
drv.inputSrcs.insert(j);
|
||||
if (j.isDerivation())
|
||||
drv.inputDrvs[j] = state.store->readDerivation(j).outputNames();
|
||||
}
|
||||
}
|
||||
|
||||
/* Handle derivation outputs of the form ‘!<name>!<path>’. */
|
||||
else if (path.at(0) == '!') {
|
||||
auto ctx = decodeContext(*state.store, path);
|
||||
drv.inputDrvs[ctx.first].insert(ctx.second);
|
||||
}
|
||||
|
||||
/* Otherwise it's a source file. */
|
||||
else
|
||||
drv.inputSrcs.insert(state.store->parseStorePath(path));
|
||||
},
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
drv.inputDrvs[b.drvPath].insert(b.output);
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
drv.inputSrcs.insert(o.path);
|
||||
},
|
||||
}, c.raw());
|
||||
}
|
||||
|
||||
/* Do we have all required attributes? */
|
||||
if (drv.builder == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'builder' missing"),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
|
||||
if (drv.platform == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'system' missing"),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
|
||||
/* Check whether the derivation name is valid. */
|
||||
if (isDerivation(drvName))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
|
||||
if (outputHash) {
|
||||
|
@ -1238,7 +1276,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||
|
@ -1259,7 +1297,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
if (contentAddressed && isImpure)
|
||||
throw EvalError({
|
||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
||||
.errPos = state.positions[posDrvName]
|
||||
.errPos = state.positions[noPos]
|
||||
});
|
||||
|
||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
||||
|
@ -1303,7 +1341,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
if (!h)
|
||||
throw AssertionError({
|
||||
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
||||
.errPos = state.positions[posDrvName],
|
||||
.errPos = state.positions[noPos],
|
||||
});
|
||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
|
@ -1336,11 +1374,12 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
drvHashes.lock()->insert_or_assign(drvPath, h);
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(1 + drv.outputs.size());
|
||||
attrs.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS});
|
||||
auto result = state.buildBindings(1 + drv.outputs.size());
|
||||
result.alloc(state.sDrvPath).mkString(drvPathS, {"=" + drvPathS});
|
||||
for (auto & i : drv.outputs)
|
||||
mkOutputString(state, attrs, drvPath, drv, i);
|
||||
v.mkAttrs(attrs);
|
||||
mkOutputString(state, result, drvPath, drv, i);
|
||||
|
||||
v.mkAttrs(result);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_derivationStrict(RegisterPrimOp::Info {
|
||||
|
@ -1483,7 +1522,9 @@ static RegisterPrimOp primop_pathExists({
|
|||
static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.baseNameOf")), context);
|
||||
v.mkString(baseNameOf(*state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.baseNameOf",
|
||||
false, false)), context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_baseNameOf({
|
||||
|
@ -1503,7 +1544,9 @@ static RegisterPrimOp primop_baseNameOf({
|
|||
static void prim_dirOf(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto path = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.dirOf");
|
||||
auto path = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.dirOf",
|
||||
false, false);
|
||||
auto dir = dirOf(*path);
|
||||
if (args[0]->type() == nPath) v.mkPath(dir); else v.mkString(dir, context);
|
||||
}
|
||||
|
@ -1569,8 +1612,9 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
i = getAttr(state, state.sPath, v2->attrs, "in an element of the __nixPath");
|
||||
|
||||
PathSet context;
|
||||
auto path = state.coerceToString(pos, *i->value, context, false, false,
|
||||
"while evaluating the `path` attribute of an element of the list passed to builtins.findFile").toOwned();
|
||||
auto path = state.coerceToString(pos, *i->value, context,
|
||||
"while evaluating the `path` attribute of an element of the list passed to builtins.findFile",
|
||||
false, false).toOwned();
|
||||
|
||||
try {
|
||||
auto rewrites = state.realiseContext(context);
|
||||
|
@ -1623,23 +1667,73 @@ static RegisterPrimOp primop_hashFile({
|
|||
.fun = prim_hashFile,
|
||||
});
|
||||
|
||||
|
||||
/* Stringize a directory entry enum. Used by `readFileType' and `readDir'. */
|
||||
static const char * dirEntTypeToString(unsigned char dtType)
|
||||
{
|
||||
/* Enum DT_(DIR|LNK|REG|UNKNOWN) */
|
||||
switch(dtType) {
|
||||
case DT_REG: return "regular"; break;
|
||||
case DT_DIR: return "directory"; break;
|
||||
case DT_LNK: return "symlink"; break;
|
||||
default: return "unknown"; break;
|
||||
}
|
||||
return "unknown"; /* Unreachable */
|
||||
}
|
||||
|
||||
|
||||
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
/* Retrieve the directory entry type and stringize it. */
|
||||
v.mkString(dirEntTypeToString(getFileType(path)));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_readFileType({
|
||||
.name = "__readFileType",
|
||||
.args = {"p"},
|
||||
.doc = R"(
|
||||
Determine the directory entry type of a filesystem node, being
|
||||
one of "directory", "regular", "symlink", or "unknown".
|
||||
)",
|
||||
.fun = prim_readFileType,
|
||||
});
|
||||
|
||||
/* Read a directory (without . or ..) */
|
||||
static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto path = realisePath(state, pos, *args[0]);
|
||||
|
||||
// Retrieve directory entries for all nodes in a directory.
|
||||
// This is similar to `getFileType` but is optimized to reduce system calls
|
||||
// on many systems.
|
||||
DirEntries entries = readDirectory(path);
|
||||
|
||||
auto attrs = state.buildBindings(entries.size());
|
||||
|
||||
// If we hit unknown directory entry types we may need to fallback to
|
||||
// using `getFileType` on some systems.
|
||||
// In order to reduce system calls we make each lookup lazy by using
|
||||
// `builtins.readFileType` application.
|
||||
Value * readFileType = nullptr;
|
||||
|
||||
for (auto & ent : entries) {
|
||||
if (ent.type == DT_UNKNOWN)
|
||||
ent.type = getFileType(path + "/" + ent.name);
|
||||
attrs.alloc(ent.name).mkString(
|
||||
ent.type == DT_REG ? "regular" :
|
||||
ent.type == DT_DIR ? "directory" :
|
||||
ent.type == DT_LNK ? "symlink" :
|
||||
"unknown");
|
||||
auto & attr = attrs.alloc(ent.name);
|
||||
if (ent.type == DT_UNKNOWN) {
|
||||
// Some filesystems or operating systems may not be able to return
|
||||
// detailed node info quickly in this case we produce a thunk to
|
||||
// query the file type lazily.
|
||||
auto epath = state.allocValue();
|
||||
Path path2 = path + "/" + ent.name;
|
||||
epath->mkString(path2);
|
||||
if (!readFileType)
|
||||
readFileType = &state.getBuiltin("readFileType");
|
||||
attr.mkApp(readFileType, epath);
|
||||
} else {
|
||||
// This branch of the conditional is much more likely.
|
||||
// Here we just stringize the directory entry type.
|
||||
attr.mkString(dirEntTypeToString(ent.type));
|
||||
}
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
|
@ -1872,8 +1966,7 @@ static RegisterPrimOp primop_toFile({
|
|||
path. The file has suffix *name*. This file can be used as an
|
||||
input to derivations. One application is to write builders
|
||||
“inline”. For instance, the following Nix expression combines the
|
||||
[Nix expression for GNU Hello](expression-syntax.md) and its
|
||||
[build script](build-script.md) into one file:
|
||||
Nix expression for GNU Hello and its build script into one file:
|
||||
|
||||
```nix
|
||||
{ stdenv, fetchurl, perl }:
|
||||
|
@ -1917,7 +2010,7 @@ static RegisterPrimOp primop_toFile({
|
|||
```
|
||||
|
||||
Note that `${configFile}` is a
|
||||
[string interpolation](language/values.md#type-string), so the result of the
|
||||
[string interpolation](@docroot@/language/values.md#type-string), so the result of the
|
||||
expression `configFile`
|
||||
(i.e., a path like `/nix/store/m7p7jfny445k...-foo.conf`) will be
|
||||
spliced into the resulting string.
|
||||
|
@ -2614,14 +2707,9 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
|
|||
|
||||
for (unsigned int n = 0; n < listSize; ++n) {
|
||||
Value * vElem = listElems[n];
|
||||
try {
|
||||
state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith");
|
||||
for (auto & attr : *vElem->attrs)
|
||||
attrsSeen[attr.name].first++;
|
||||
} catch (TypeError & e) {
|
||||
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "zipAttrsWith"));
|
||||
state.debugThrowLastTrace(e);
|
||||
}
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(attrsSeen.size());
|
||||
|
@ -2807,7 +2895,7 @@ static RegisterPrimOp primop_map({
|
|||
example,
|
||||
|
||||
```nix
|
||||
map (x"foo" + x) [ "bar" "bla" "abc" ]
|
||||
map (x: "foo" + x) [ "bar" "bla" "abc" ]
|
||||
```
|
||||
|
||||
evaluates to `[ "foobar" "foobla" "fooabc" ]`.
|
||||
|
@ -3005,13 +3093,13 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||
|
||||
if (len < 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("cannot create list of size %1%", len),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error("cannot create list of size %1%", len).debugThrow<EvalError>();
|
||||
|
||||
// More strict than striclty (!) necessary, but acceptable
|
||||
// as evaluating map without accessing any values makes little sense.
|
||||
state.forceFunction(*args[0], noPos, "while evaluating the first argument passed to builtins.genList");
|
||||
|
||||
state.mkList(v, len);
|
||||
|
||||
for (unsigned int n = 0; n < (unsigned int) len; ++n) {
|
||||
auto arg = state.allocValue();
|
||||
arg->mkInt(n);
|
||||
|
@ -3059,6 +3147,8 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
auto comparator = [&](Value * a, Value * b) {
|
||||
/* Optimization: if the comparator is lessThan, bypass
|
||||
callFunction. */
|
||||
/* TODO: (layus) this is absurd. An optimisation like this
|
||||
should be outside the lambda creation */
|
||||
if (args[0]->isPrimOp() && args[0]->primOp->fun == prim_lessThan)
|
||||
return CompareValues(state, noPos, "while evaluating the ordering function passed to builtins.sort")(a, b);
|
||||
|
||||
|
@ -3219,12 +3309,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
|
|||
for (unsigned int n = 0; n < nrLists; ++n) {
|
||||
Value * vElem = args[1]->listElems()[n];
|
||||
state.callFunction(*args[0], *vElem, lists[n], pos);
|
||||
try {
|
||||
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
|
||||
} catch (TypeError &e) {
|
||||
e.addTrace(state.positions[pos], hintfmt("while invoking '%s'", "concatMap"));
|
||||
state.debugThrowLastTrace(e);
|
||||
}
|
||||
len += lists[n].listSize();
|
||||
}
|
||||
|
||||
|
@ -3404,7 +3489,7 @@ static void prim_lessThan(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
state.forceValue(*args[0], pos);
|
||||
state.forceValue(*args[1], pos);
|
||||
// pos is exact here, no need for a message.
|
||||
CompareValues comp(state, pos, "");
|
||||
CompareValues comp(state, noPos, "");
|
||||
v.mkBool(comp(args[0], args[1]));
|
||||
}
|
||||
|
||||
|
@ -3431,7 +3516,9 @@ static RegisterPrimOp primop_lessThan({
|
|||
static void prim_toString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, true, false, "while evaluating the first argument passed to builtins.toString");
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.toString",
|
||||
true, false);
|
||||
v.mkString(*s, context);
|
||||
}
|
||||
|
||||
|
@ -3783,21 +3870,18 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
|||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
||||
if (args[0]->listSize() != args[1]->listSize())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow<EvalError>();
|
||||
|
||||
std::vector<std::string> from;
|
||||
from.reserve(args[0]->listSize());
|
||||
for (auto elem : args[0]->listItems())
|
||||
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace in builtins.replaceStrings"));
|
||||
from.emplace_back(state.forceString(*elem, pos, "while evaluating one of the strings to replace passed to builtins.replaceStrings"));
|
||||
|
||||
std::vector<std::pair<std::string, PathSet>> to;
|
||||
to.reserve(args[1]->listSize());
|
||||
for (auto elem : args[1]->listItems()) {
|
||||
PathSet ctx;
|
||||
auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings of builtins.replaceStrings");
|
||||
auto s = state.forceString(*elem, ctx, pos, "while evaluating one of the replacement strings passed to builtins.replaceStrings");
|
||||
to.emplace_back(s, std::move(ctx));
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,15 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
|
|||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||
|
||||
PathSet context2;
|
||||
for (auto & p : context)
|
||||
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
|
||||
for (auto && p : context) {
|
||||
auto c = NixStringContextElem::parse(*state.store, p);
|
||||
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
|
||||
context2.emplace(state.store->printStorePath(ptr->drvPath));
|
||||
} else {
|
||||
/* Can reuse original item */
|
||||
context2.emplace(std::move(p));
|
||||
}
|
||||
}
|
||||
|
||||
v.mkString(*s, context2);
|
||||
}
|
||||
|
@ -74,34 +81,20 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
|||
};
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
||||
auto contextInfos = std::map<Path, ContextInfo>();
|
||||
auto contextInfos = std::map<StorePath, ContextInfo>();
|
||||
for (const auto & p : context) {
|
||||
Path drv;
|
||||
std::string output;
|
||||
const Path * path = &p;
|
||||
if (p.at(0) == '=') {
|
||||
drv = std::string(p, 1);
|
||||
path = &drv;
|
||||
} else if (p.at(0) == '!') {
|
||||
NixStringContextElem ctx = decodeContext(*state.store, p);
|
||||
drv = state.store->printStorePath(ctx.first);
|
||||
output = ctx.second;
|
||||
path = &drv;
|
||||
}
|
||||
auto isPath = drv.empty();
|
||||
auto isAllOutputs = (!drv.empty()) && output.empty();
|
||||
|
||||
auto iter = contextInfos.find(*path);
|
||||
if (iter == contextInfos.end()) {
|
||||
contextInfos.emplace(*path, ContextInfo{isPath, isAllOutputs, output.empty() ? Strings{} : Strings{std::move(output)}});
|
||||
} else {
|
||||
if (isPath)
|
||||
iter->second.path = true;
|
||||
else if (isAllOutputs)
|
||||
iter->second.allOutputs = true;
|
||||
else
|
||||
iter->second.outputs.emplace_back(std::move(output));
|
||||
}
|
||||
NixStringContextElem ctx = NixStringContextElem::parse(*state.store, p);
|
||||
std::visit(overloaded {
|
||||
[&](NixStringContextElem::DrvDeep & d) {
|
||||
contextInfos[d.drvPath].allOutputs = true;
|
||||
},
|
||||
[&](NixStringContextElem::Built & b) {
|
||||
contextInfos[b.drvPath].outputs.emplace_back(std::move(b.output));
|
||||
},
|
||||
[&](NixStringContextElem::Opaque & o) {
|
||||
contextInfos[o.path].path = true;
|
||||
},
|
||||
}, ctx.raw());
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
|
@ -120,7 +113,7 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
|||
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||
}
|
||||
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
||||
attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs);
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
|
|
|
@ -22,7 +22,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
for (auto & attr : *args[0]->attrs) {
|
||||
std::string_view n(state.symbols[attr.name]);
|
||||
if (n == "url")
|
||||
url = state.coerceToString(attr.pos, *attr.value, context, false, false, "while evaluating the `url` attribute passed to builtins.fetchMercurial").toOwned();
|
||||
url = state.coerceToString(attr.pos, *attr.value, context,
|
||||
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
|
||||
false, false).toOwned();
|
||||
else if (n == "rev") {
|
||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||
// be both a revision or a branch/tag name.
|
||||
|
@ -48,7 +50,9 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
});
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to builtins.fetchMercurial").toOwned();
|
||||
url = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.fetchMercurial",
|
||||
false, false).toOwned();
|
||||
|
||||
// FIXME: git externals probably can be used to bypass the URI
|
||||
// whitelist. Ah well.
|
||||
|
|
|
@ -125,7 +125,7 @@ static void fetchTree(
|
|||
if (attr.name == state.sType) continue;
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||
auto s = state.coerceToString(attr.pos, *attr.value, context, false, false, "").toOwned();
|
||||
auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned();
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
state.symbols[attr.name] == "url"
|
||||
? type == "git"
|
||||
|
@ -151,7 +151,9 @@ static void fetchTree(
|
|||
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
auto url = state.coerceToString(pos, *args[0], context, false, false, "while evaluating the first argument passed to the fetcher").toOwned();
|
||||
auto url = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to the fetcher",
|
||||
false, false).toOwned();
|
||||
|
||||
if (type == "git") {
|
||||
fetchers::Attrs attrs;
|
||||
|
@ -218,6 +220,9 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
} else
|
||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||
|
||||
if (who == "fetchTarball")
|
||||
url = evalSettings.resolvePseudoUrl(*url);
|
||||
|
||||
state.checkURI(*url);
|
||||
|
||||
if (name == "")
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,7 +6,9 @@ libexpr-tests_DIR := $(d)
|
|||
|
||||
libexpr-tests_INSTALL_DIR :=
|
||||
|
||||
libexpr-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests
|
||||
|
||||
|
|
72
src/libexpr/tests/value/context.cc
Normal file
72
src/libexpr/tests/value/context.cc
Normal file
|
@ -0,0 +1,72 @@
|
|||
#include "value/context.hh"
|
||||
|
||||
#include "libexprtests.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// Testing of trivial expressions
|
||||
struct NixStringContextElemTest : public LibExprTest {
|
||||
const Store & store() const {
|
||||
return *LibExprTest::store;
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(NixStringContextElemTest, empty_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), ""),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, single_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "!"),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, double_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "!!/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, eq_slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "=/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(store(), "/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, opaque) {
|
||||
std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = NixStringContextElem::parse(store(), opaque);
|
||||
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->path, store().parseStorePath(opaque));
|
||||
ASSERT_EQ(elem.to_string(store()), opaque);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, drvDeep) {
|
||||
std::string_view drvDeep = "=/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(store(), drvDeep);
|
||||
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->drvPath, store().parseStorePath(drvDeep.substr(1)));
|
||||
ASSERT_EQ(elem.to_string(store()), drvDeep);
|
||||
}
|
||||
|
||||
TEST_F(NixStringContextElemTest, built) {
|
||||
std::string_view built = "!foo!/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(store(), built);
|
||||
auto * p = std::get_if<NixStringContextElem::Built>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->output, "foo");
|
||||
ASSERT_EQ(p->drvPath, store().parseStorePath(built.substr(5)));
|
||||
ASSERT_EQ(elem.to_string(store()), built);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
#include "value-to-json.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <iomanip>
|
||||
|
@ -35,7 +36,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
|
||||
case nPath:
|
||||
if (copyToStore)
|
||||
out = state.copyPathToStore(context, v.path);
|
||||
out = state.store->printStorePath(state.copyPathToStore(context, v.path));
|
||||
else
|
||||
out = v.path;
|
||||
break;
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include <cassert>
|
||||
|
||||
#include "symbol-table.hh"
|
||||
#include "value/context.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#include <gc/gc_allocator.h>
|
||||
|
@ -67,8 +68,6 @@ class XMLWriter;
|
|||
|
||||
typedef int64_t NixInt;
|
||||
typedef double NixFloat;
|
||||
typedef std::pair<StorePath, std::string> NixStringContextElem;
|
||||
typedef std::vector<NixStringContextElem> NixStringContext;
|
||||
|
||||
/* External values must descend from ExternalValueBase, so that
|
||||
* type-agnostic nix functions (e.g. showType) can be implemented
|
||||
|
@ -90,7 +89,7 @@ class ExternalValueBase
|
|||
/* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error.
|
||||
*/
|
||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore, std::string_view errorCtx) const;
|
||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/* Compare to another value of the same type. Defaults to uncomparable,
|
||||
* i.e. always false.
|
||||
|
|
67
src/libexpr/value/context.cc
Normal file
67
src/libexpr/value/context.cc
Normal file
|
@ -0,0 +1,67 @@
|
|||
#include "value/context.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
NixStringContextElem NixStringContextElem::parse(const Store & store, std::string_view s0)
|
||||
{
|
||||
std::string_view s = s0;
|
||||
|
||||
if (s.size() == 0) {
|
||||
throw BadNixStringContextElem(s0,
|
||||
"String context element should never be an empty string");
|
||||
}
|
||||
switch (s.at(0)) {
|
||||
case '!': {
|
||||
s = s.substr(1); // advance string to parse after first !
|
||||
size_t index = s.find("!");
|
||||
// This makes index + 1 safe. Index can be the length (one after index
|
||||
// of last character), so given any valid character index --- a
|
||||
// successful find --- we can add one.
|
||||
if (index == std::string_view::npos) {
|
||||
throw BadNixStringContextElem(s0,
|
||||
"String content element beginning with '!' should have a second '!'");
|
||||
}
|
||||
return NixStringContextElem::Built {
|
||||
.drvPath = store.parseStorePath(s.substr(index + 1)),
|
||||
.output = std::string(s.substr(0, index)),
|
||||
};
|
||||
}
|
||||
case '=': {
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = store.parseStorePath(s.substr(1)),
|
||||
};
|
||||
}
|
||||
default: {
|
||||
return NixStringContextElem::Opaque {
|
||||
.path = store.parseStorePath(s),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string NixStringContextElem::to_string(const Store & store) const {
|
||||
return std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
std::string res;
|
||||
res += '!';
|
||||
res += b.output;
|
||||
res += '!';
|
||||
res += store.printStorePath(b.drvPath);
|
||||
return res;
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
std::string res;
|
||||
res += '=';
|
||||
res += store.printStorePath(d.drvPath);
|
||||
return res;
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
return store.printStorePath(o.path);
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
}
|
90
src/libexpr/value/context.hh
Normal file
90
src/libexpr/value/context.hh
Normal file
|
@ -0,0 +1,90 @@
|
|||
#pragma once
|
||||
|
||||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
class BadNixStringContextElem : public Error
|
||||
{
|
||||
public:
|
||||
std::string_view raw;
|
||||
|
||||
template<typename... Args>
|
||||
BadNixStringContextElem(std::string_view raw_, const Args & ... args)
|
||||
: Error("")
|
||||
{
|
||||
raw = raw_;
|
||||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw);
|
||||
}
|
||||
};
|
||||
|
||||
class Store;
|
||||
|
||||
/* Plain opaque path to some store object.
|
||||
|
||||
Encoded as just the path: ‘<path>’.
|
||||
*/
|
||||
struct NixStringContextElem_Opaque {
|
||||
StorePath path;
|
||||
};
|
||||
|
||||
/* Path to a derivation and its entire build closure.
|
||||
|
||||
The path doesn't just refer to derivation itself and its closure, but
|
||||
also all outputs of all derivations in that closure (including the
|
||||
root derivation).
|
||||
|
||||
Encoded in the form ‘=<drvPath>’.
|
||||
*/
|
||||
struct NixStringContextElem_DrvDeep {
|
||||
StorePath drvPath;
|
||||
};
|
||||
|
||||
/* Derivation output.
|
||||
|
||||
Encoded in the form ‘!<output>!<drvPath>’.
|
||||
*/
|
||||
struct NixStringContextElem_Built {
|
||||
StorePath drvPath;
|
||||
std::string output;
|
||||
};
|
||||
|
||||
using _NixStringContextElem_Raw = std::variant<
|
||||
NixStringContextElem_Opaque,
|
||||
NixStringContextElem_DrvDeep,
|
||||
NixStringContextElem_Built
|
||||
>;
|
||||
|
||||
struct NixStringContextElem : _NixStringContextElem_Raw {
|
||||
using Raw = _NixStringContextElem_Raw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Opaque = NixStringContextElem_Opaque;
|
||||
using DrvDeep = NixStringContextElem_DrvDeep;
|
||||
using Built = NixStringContextElem_Built;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
inline Raw & raw() {
|
||||
return static_cast<Raw &>(*this);
|
||||
}
|
||||
|
||||
/* Decode a context string, one of:
|
||||
- ‘<path>’
|
||||
- ‘=<path>’
|
||||
- ‘!<name>!<path>’
|
||||
*/
|
||||
static NixStringContextElem parse(const Store & store, std::string_view s);
|
||||
std::string to_string(const Store & store) const;
|
||||
};
|
||||
|
||||
typedef std::vector<NixStringContextElem> NixStringContext;
|
||||
|
||||
}
|
|
@ -363,6 +363,7 @@ void printVersion(const std::string & programName)
|
|||
<< "\n";
|
||||
std::cout << "Store directory: " << settings.nixStore << "\n";
|
||||
std::cout << "State directory: " << settings.nixStateDir << "\n";
|
||||
std::cout << "Data directory: " << settings.nixDataDir << "\n";
|
||||
}
|
||||
throw Exit();
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ void printVersion(const std::string & programName);
|
|||
void printGCWarning();
|
||||
|
||||
class Store;
|
||||
struct StorePathWithOutputs;
|
||||
|
||||
void printMissing(
|
||||
ref<Store> store,
|
||||
|
|
|
@ -502,22 +502,9 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
|
|||
writeNarInfo(narInfo);
|
||||
}
|
||||
|
||||
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||
std::optional<std::string> BinaryCacheStore::getBuildLogExact(const StorePath & path)
|
||||
{
|
||||
auto drvPath = path;
|
||||
|
||||
if (!path.isDerivation()) {
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
// FIXME: add a "Log" field to .narinfo
|
||||
if (!info->deriver) return std::nullopt;
|
||||
drvPath = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
auto logPath = "log/" + std::string(baseNameOf(printStorePath(drvPath)));
|
||||
auto logPath = "log/" + std::string(baseNameOf(printStorePath(path)));
|
||||
|
||||
debug("fetching build log from binary cache '%s/%s'", getUri(), logPath);
|
||||
|
||||
|
|
|
@ -129,7 +129,7 @@ public:
|
|||
|
||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLogExact(const StorePath & path) override;
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@
|
|||
namespace nix {
|
||||
|
||||
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||
, useDerivation(true)
|
||||
, drvPath(drvPath)
|
||||
|
@ -82,7 +82,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
|||
|
||||
|
||||
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||
, useDerivation(false)
|
||||
, drvPath(drvPath)
|
||||
|
@ -142,18 +142,12 @@ void DerivationGoal::work()
|
|||
(this->*state)();
|
||||
}
|
||||
|
||||
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
|
||||
void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||
{
|
||||
/* If we already want all outputs, there is nothing to do. */
|
||||
if (wantedOutputs.empty()) return;
|
||||
|
||||
if (outputs.empty()) {
|
||||
wantedOutputs.clear();
|
||||
needRestart = true;
|
||||
} else
|
||||
for (auto & i : outputs)
|
||||
if (wantedOutputs.insert(i).second)
|
||||
auto newWanted = wantedOutputs.union_(outputs);
|
||||
if (!newWanted.isSubsetOf(wantedOutputs))
|
||||
needRestart = true;
|
||||
wantedOutputs = newWanted;
|
||||
}
|
||||
|
||||
|
||||
|
@ -390,7 +384,7 @@ void DerivationGoal::repairClosure()
|
|||
auto outputs = queryDerivationOutputMap();
|
||||
StorePathSet outputClosure;
|
||||
for (auto & i : outputs) {
|
||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
||||
if (!wantedOutputs.contains(i.first)) continue;
|
||||
worker.store.computeFSClosure(i.second, outputClosure);
|
||||
}
|
||||
|
||||
|
@ -422,7 +416,7 @@ void DerivationGoal::repairClosure()
|
|||
if (drvPath2 == outputsToDrv.end())
|
||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
|
||||
else
|
||||
addWaitee(worker.makeDerivationGoal(drvPath2->second, StringSet(), bmRepair));
|
||||
addWaitee(worker.makeDerivationGoal(drvPath2->second, OutputsSpec::All(), bmRepair));
|
||||
}
|
||||
|
||||
if (waitees.empty()) {
|
||||
|
@ -544,7 +538,8 @@ void DerivationGoal::inputsRealised()
|
|||
However, the impure derivations feature still relies on this
|
||||
fragile way of doing things, because its builds do not have
|
||||
a representation in the store, which is a usability problem
|
||||
in itself */
|
||||
in itself. When implementing this logic entirely with lookups
|
||||
make sure that they're cached. */
|
||||
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j })) {
|
||||
worker.store.computeFSClosure(*outPath, inputPaths);
|
||||
}
|
||||
|
@ -990,10 +985,15 @@ void DerivationGoal::resolvedFinished()
|
|||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv.outputNames();
|
||||
// `wantedOutputs` might merely indicate “all the outputs”
|
||||
auto realWantedOutputs = std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
return resolvedDrv.outputNames();
|
||||
},
|
||||
[&](const OutputsSpec::Names & names) {
|
||||
return static_cast<std::set<std::string>>(names);
|
||||
},
|
||||
}, wantedOutputs.raw());
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
auto initialOutput = get(initialOutputs, wantedOutput);
|
||||
|
@ -1321,7 +1321,14 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
|||
if (!drv->type().isPure()) return { false, {} };
|
||||
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
auto wantedOutputsLeft = wantedOutputs;
|
||||
auto wantedOutputsLeft = std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
return StringSet {};
|
||||
},
|
||||
[&](const OutputsSpec::Names & names) {
|
||||
return static_cast<StringSet>(names);
|
||||
},
|
||||
}, wantedOutputs.raw());
|
||||
DrvOutputs validOutputs;
|
||||
|
||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||
|
@ -1330,7 +1337,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
|||
// this is an invalid output, gets catched with (!wantedOutputsLeft.empty())
|
||||
continue;
|
||||
auto & info = *initialOutput;
|
||||
info.wanted = wantOutput(i.first, wantedOutputs);
|
||||
info.wanted = wantedOutputs.contains(i.first);
|
||||
if (info.wanted)
|
||||
wantedOutputsLeft.erase(i.first);
|
||||
if (i.second) {
|
||||
|
@ -1368,7 +1375,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
|||
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
||||
}
|
||||
|
||||
// If we requested all the outputs via the empty set, we are always fine.
|
||||
// If we requested all the outputs, we are always fine.
|
||||
// If we requested specific elements, the loop above removes all the valid
|
||||
// ones, so any that are left must be invalid.
|
||||
if (!wantedOutputsLeft.empty())
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "parsed-derivations.hh"
|
||||
#include "lock.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "store-api.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "goal.hh"
|
||||
|
@ -55,7 +56,7 @@ struct DerivationGoal : public Goal
|
|||
|
||||
/* The specific outputs that we need to build. Empty means all of
|
||||
them. */
|
||||
StringSet wantedOutputs;
|
||||
OutputsSpec wantedOutputs;
|
||||
|
||||
/* Mapping from input derivations + output names to actual store
|
||||
paths. This is filled in by waiteeDone() as each dependency
|
||||
|
@ -128,10 +129,10 @@ struct DerivationGoal : public Goal
|
|||
std::string machineName;
|
||||
|
||||
DerivationGoal(const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs, Worker & worker,
|
||||
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||
BuildMode buildMode = bmNormal);
|
||||
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const StringSet & wantedOutputs, Worker & worker,
|
||||
const OutputsSpec & wantedOutputs, Worker & worker,
|
||||
BuildMode buildMode = bmNormal);
|
||||
virtual ~DerivationGoal();
|
||||
|
||||
|
@ -142,7 +143,7 @@ struct DerivationGoal : public Goal
|
|||
void work() override;
|
||||
|
||||
/* Add wanted outputs to an already existing derivation goal. */
|
||||
void addWantedOutputs(const StringSet & outputs);
|
||||
void addWantedOutputs(const OutputsSpec & outputs);
|
||||
|
||||
/* The states. */
|
||||
void getDerivation();
|
||||
|
|
|
@ -80,7 +80,7 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
|||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
|
||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All {}, buildMode);
|
||||
|
||||
try {
|
||||
worker.run(Goals{goal});
|
||||
|
@ -89,7 +89,10 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
|||
return BuildResult {
|
||||
.status = BuildResult::MiscFailure,
|
||||
.errorMsg = e.msg(),
|
||||
.path = DerivedPath::Built { .drvPath = drvPath },
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
};
|
||||
};
|
||||
}
|
||||
|
@ -130,7 +133,8 @@ void LocalStore::repairPath(const StorePath & path)
|
|||
auto info = queryPathInfo(path);
|
||||
if (info->deriver && isValidPath(*info->deriver)) {
|
||||
goals.clear();
|
||||
goals.insert(worker.makeDerivationGoal(*info->deriver, StringSet(), bmRepair));
|
||||
// FIXME: Should just build the specific output we need.
|
||||
goals.insert(worker.makeDerivationGoal(*info->deriver, OutputsSpec::All { }, bmRepair));
|
||||
worker.run(goals);
|
||||
} else
|
||||
throw Error(worker.exitStatus(), "cannot repair path '%s'", printStorePath(path));
|
||||
|
|
|
@ -1459,7 +1459,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
unknown, downloadSize, narSize);
|
||||
}
|
||||
|
||||
virtual std::optional<std::string> getBuildLog(const StorePath & path) override
|
||||
virtual std::optional<std::string> getBuildLogExact(const StorePath & path) override
|
||||
{ return std::nullopt; }
|
||||
|
||||
virtual void addBuildLog(const StorePath & path, std::string_view log) override
|
||||
|
@ -2050,10 +2050,14 @@ void LocalDerivationGoal::runChild()
|
|||
sandboxProfile += "(deny default (with no-log))\n";
|
||||
}
|
||||
|
||||
sandboxProfile += "(import \"sandbox-defaults.sb\")\n";
|
||||
sandboxProfile +=
|
||||
#include "sandbox-defaults.sb"
|
||||
;
|
||||
|
||||
if (!derivationType.isSandboxed())
|
||||
sandboxProfile += "(import \"sandbox-network.sb\")\n";
|
||||
sandboxProfile +=
|
||||
#include "sandbox-network.sb"
|
||||
;
|
||||
|
||||
/* Add the output paths we'll use at build-time to the chroot */
|
||||
sandboxProfile += "(allow file-read* file-write* process-exec\n";
|
||||
|
@ -2096,7 +2100,9 @@ void LocalDerivationGoal::runChild()
|
|||
|
||||
sandboxProfile += additionalSandboxProfile;
|
||||
} else
|
||||
sandboxProfile += "(import \"sandbox-minimal.sb\")\n";
|
||||
sandboxProfile +=
|
||||
#include "sandbox-minimal.sb"
|
||||
;
|
||||
|
||||
debug("Generated sandbox profile:");
|
||||
debug(sandboxProfile);
|
||||
|
@ -2121,8 +2127,6 @@ void LocalDerivationGoal::runChild()
|
|||
args.push_back(sandboxFile);
|
||||
args.push_back("-D");
|
||||
args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir);
|
||||
args.push_back("-D");
|
||||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
||||
if (allowLocalNetworking) {
|
||||
args.push_back("-D");
|
||||
args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
|
@ -2748,7 +2752,7 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
signRealisation(thisRealisation);
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
}
|
||||
if (wantOutput(outputName, wantedOutputs))
|
||||
if (wantedOutputs.contains(outputName))
|
||||
builtOutputs.emplace(thisRealisation.id, thisRealisation);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
R""(
|
||||
|
||||
(define TMPDIR (param "_GLOBAL_TMP_DIR"))
|
||||
|
||||
(deny default)
|
||||
|
@ -104,3 +106,5 @@
|
|||
(subpath "/System/Library/Apple/usr/libexec/oah")
|
||||
(subpath "/System/Library/LaunchDaemons/com.apple.oahd.plist")
|
||||
(subpath "/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist"))
|
||||
|
||||
)""
|
|
@ -1,5 +1,9 @@
|
|||
R""(
|
||||
|
||||
(allow default)
|
||||
|
||||
; Disallow creating setuid/setgid binaries, since that
|
||||
; would allow breaking build user isolation.
|
||||
(deny file-write-setugid)
|
||||
|
||||
)""
|
|
@ -1,3 +1,5 @@
|
|||
R""(
|
||||
|
||||
; Allow local and remote network traffic.
|
||||
(allow network* (local ip) (remote ip))
|
||||
|
||||
|
@ -18,3 +20,5 @@
|
|||
; Allow access to trustd.
|
||||
(allow mach-lookup (global-name "com.apple.trustd"))
|
||||
(allow mach-lookup (global-name "com.apple.trustd.agent"))
|
||||
|
||||
)""
|
|
@ -42,7 +42,7 @@ Worker::~Worker()
|
|||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||
const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs,
|
||||
const OutputsSpec & wantedOutputs,
|
||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
|
||||
{
|
||||
std::weak_ptr<DerivationGoal> & goal_weak = derivationGoals[drvPath];
|
||||
|
@ -59,7 +59,7 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
|||
|
||||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs, BuildMode buildMode)
|
||||
const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||
{
|
||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return !dynamic_cast<LocalStore *>(&store)
|
||||
|
@ -70,7 +70,7 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drv
|
|||
|
||||
|
||||
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
||||
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
|
||||
const BasicDerivation & drv, const OutputsSpec & wantedOutputs, BuildMode buildMode)
|
||||
{
|
||||
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
|
||||
return !dynamic_cast<LocalStore *>(&store)
|
||||
|
|
|
@ -140,15 +140,15 @@ public:
|
|||
/* derivation goal */
|
||||
private:
|
||||
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
||||
const StorePath & drvPath, const StringSet & wantedOutputs,
|
||||
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
|
||||
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
||||
public:
|
||||
std::shared_ptr<DerivationGoal> makeDerivationGoal(
|
||||
const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
|
||||
const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
|
||||
/* substitution goal */
|
||||
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
|
|
|
@ -222,7 +222,8 @@ struct ClientSettings
|
|||
else if (!hasSuffix(s, "/") && trusted.count(s + "/"))
|
||||
subs.push_back(s + "/");
|
||||
else
|
||||
warn("ignoring untrusted substituter '%s'", s);
|
||||
warn("ignoring untrusted substituter '%s', you are not a trusted user.\n"
|
||||
"Run `man nix.conf` for more information on the `substituters` configuration option.", s);
|
||||
res = subs;
|
||||
return true;
|
||||
};
|
||||
|
|
|
@ -688,12 +688,6 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
|||
}
|
||||
|
||||
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted)
|
||||
{
|
||||
return wanted.empty() || wanted.find(output) != wanted.end();
|
||||
}
|
||||
|
||||
|
||||
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||
{
|
||||
const auto pathS = readString(in);
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class Store;
|
||||
|
||||
/* Abstract syntax of derivations. */
|
||||
|
||||
|
@ -294,8 +295,6 @@ typedef std::map<StorePath, DrvHash> DrvHashes;
|
|||
// FIXME: global, though at least thread-safe.
|
||||
extern Sync<DrvHashes> drvHashes;
|
||||
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted);
|
||||
|
||||
struct Source;
|
||||
struct Sink;
|
||||
|
||||
|
|
|
@ -19,11 +19,11 @@ nlohmann::json DerivedPath::Built::toJSON(ref<Store> store) const {
|
|||
res["drvPath"] = store->printStorePath(drvPath);
|
||||
// Fallback for the input-addressed derivation case: We expect to always be
|
||||
// able to print the output paths, so let’s do it
|
||||
const auto knownOutputs = store->queryPartialDerivationOutputMap(drvPath);
|
||||
for (const auto & output : outputs) {
|
||||
auto knownOutput = get(knownOutputs, output);
|
||||
if (knownOutput && *knownOutput)
|
||||
res["outputs"][output] = store->printStorePath(**knownOutput);
|
||||
const auto outputMap = store->queryPartialDerivationOutputMap(drvPath);
|
||||
for (const auto & [output, outputPathOpt] : outputMap) {
|
||||
if (!outputs.contains(output)) continue;
|
||||
if (outputPathOpt)
|
||||
res["outputs"][output] = store->printStorePath(*outputPathOpt);
|
||||
else
|
||||
res["outputs"][output] = nullptr;
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ std::string DerivedPath::Built::to_string(const Store & store) const
|
|||
{
|
||||
return store.printStorePath(drvPath)
|
||||
+ "!"
|
||||
+ (outputs.empty() ? std::string { "*" } : concatStringsSep(",", outputs));
|
||||
+ outputs.to_string();
|
||||
}
|
||||
|
||||
std::string DerivedPath::to_string(const Store & store) const
|
||||
|
@ -81,15 +81,10 @@ DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_
|
|||
|
||||
DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_view drvS, std::string_view outputsS)
|
||||
{
|
||||
auto drvPath = store.parseStorePath(drvS);
|
||||
std::set<std::string> outputs;
|
||||
if (outputsS != "*") {
|
||||
outputs = tokenizeString<std::set<std::string>>(outputsS, ",");
|
||||
if (outputs.empty())
|
||||
throw Error(
|
||||
"Explicit list of wanted outputs '%s' must not be empty. Consider using '*' as a wildcard meaning all outputs if no output in particular is wanted.", outputsS);
|
||||
}
|
||||
return {drvPath, outputs};
|
||||
return {
|
||||
.drvPath = store.parseStorePath(drvS),
|
||||
.outputs = OutputsSpec::parse(outputsS),
|
||||
};
|
||||
}
|
||||
|
||||
DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "util.hh"
|
||||
#include "path.hh"
|
||||
#include "realisation.hh"
|
||||
#include "outputs-spec.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
|
@ -44,7 +45,7 @@ struct DerivedPathOpaque {
|
|||
*/
|
||||
struct DerivedPathBuilt {
|
||||
StorePath drvPath;
|
||||
std::set<std::string> outputs;
|
||||
OutputsSpec outputs;
|
||||
|
||||
std::string to_string(const Store & store) const;
|
||||
static DerivedPathBuilt parse(const Store & store, std::string_view, std::string_view);
|
||||
|
|
|
@ -77,12 +77,12 @@ Path LocalFSStore::addPermRoot(const StorePath & storePath, const Path & _gcRoot
|
|||
}
|
||||
|
||||
|
||||
void LocalStore::addTempRoot(const StorePath & path)
|
||||
void LocalStore::createTempRootsFile()
|
||||
{
|
||||
auto state(_state.lock());
|
||||
auto fdTempRoots(_fdTempRoots.lock());
|
||||
|
||||
/* Create the temporary roots file for this process. */
|
||||
if (!state->fdTempRoots) {
|
||||
if (*fdTempRoots) return;
|
||||
|
||||
while (1) {
|
||||
if (pathExists(fnTempRoots))
|
||||
|
@ -90,47 +90,60 @@ void LocalStore::addTempRoot(const StorePath & path)
|
|||
processes with the same pid. */
|
||||
unlink(fnTempRoots.c_str());
|
||||
|
||||
state->fdTempRoots = openLockFile(fnTempRoots, true);
|
||||
*fdTempRoots = openLockFile(fnTempRoots, true);
|
||||
|
||||
debug("acquiring write lock on '%s'", fnTempRoots);
|
||||
lockFile(state->fdTempRoots.get(), ltWrite, true);
|
||||
lockFile(fdTempRoots->get(), ltWrite, true);
|
||||
|
||||
/* Check whether the garbage collector didn't get in our
|
||||
way. */
|
||||
struct stat st;
|
||||
if (fstat(state->fdTempRoots.get(), &st) == -1)
|
||||
if (fstat(fdTempRoots->get(), &st) == -1)
|
||||
throw SysError("statting '%1%'", fnTempRoots);
|
||||
if (st.st_size == 0) break;
|
||||
|
||||
/* The garbage collector deleted this file before we could
|
||||
get a lock. (It won't delete the file after we get a
|
||||
lock.) Try again. */
|
||||
/* The garbage collector deleted this file before we could get
|
||||
a lock. (It won't delete the file after we get a lock.)
|
||||
Try again. */
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void LocalStore::addTempRoot(const StorePath & path)
|
||||
{
|
||||
createTempRootsFile();
|
||||
|
||||
/* Open/create the global GC lock file. */
|
||||
{
|
||||
auto fdGCLock(_fdGCLock.lock());
|
||||
if (!*fdGCLock)
|
||||
*fdGCLock = openGCLock();
|
||||
}
|
||||
|
||||
if (!state->fdGCLock)
|
||||
state->fdGCLock = openGCLock();
|
||||
|
||||
restart:
|
||||
FdLock gcLock(state->fdGCLock.get(), ltRead, false, "");
|
||||
/* Try to acquire a shared global GC lock (non-blocking). This
|
||||
only succeeds if the garbage collector is not currently
|
||||
running. */
|
||||
FdLock gcLock(_fdGCLock.lock()->get(), ltRead, false, "");
|
||||
|
||||
if (!gcLock.acquired) {
|
||||
/* We couldn't get a shared global GC lock, so the garbage
|
||||
collector is running. So we have to connect to the garbage
|
||||
collector and inform it about our root. */
|
||||
if (!state->fdRootsSocket) {
|
||||
auto fdRootsSocket(_fdRootsSocket.lock());
|
||||
|
||||
if (!*fdRootsSocket) {
|
||||
auto socketPath = stateDir.get() + gcSocketPath;
|
||||
debug("connecting to '%s'", socketPath);
|
||||
state->fdRootsSocket = createUnixDomainSocket();
|
||||
*fdRootsSocket = createUnixDomainSocket();
|
||||
try {
|
||||
nix::connect(state->fdRootsSocket.get(), socketPath);
|
||||
nix::connect(fdRootsSocket->get(), socketPath);
|
||||
} catch (SysError & e) {
|
||||
/* The garbage collector may have exited, so we need to
|
||||
restart. */
|
||||
if (e.errNo == ECONNREFUSED) {
|
||||
debug("GC socket connection refused");
|
||||
state->fdRootsSocket.close();
|
||||
fdRootsSocket->close();
|
||||
goto restart;
|
||||
}
|
||||
throw;
|
||||
|
@ -139,9 +152,9 @@ void LocalStore::addTempRoot(const StorePath & path)
|
|||
|
||||
try {
|
||||
debug("sending GC root '%s'", printStorePath(path));
|
||||
writeFull(state->fdRootsSocket.get(), printStorePath(path) + "\n", false);
|
||||
writeFull(fdRootsSocket->get(), printStorePath(path) + "\n", false);
|
||||
char c;
|
||||
readFull(state->fdRootsSocket.get(), &c, 1);
|
||||
readFull(fdRootsSocket->get(), &c, 1);
|
||||
assert(c == '1');
|
||||
debug("got ack for GC root '%s'", printStorePath(path));
|
||||
} catch (SysError & e) {
|
||||
|
@ -149,20 +162,21 @@ void LocalStore::addTempRoot(const StorePath & path)
|
|||
restart. */
|
||||
if (e.errNo == EPIPE || e.errNo == ECONNRESET) {
|
||||
debug("GC socket disconnected");
|
||||
state->fdRootsSocket.close();
|
||||
fdRootsSocket->close();
|
||||
goto restart;
|
||||
}
|
||||
throw;
|
||||
} catch (EndOfFile & e) {
|
||||
debug("GC socket disconnected");
|
||||
state->fdRootsSocket.close();
|
||||
fdRootsSocket->close();
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
/* Append the store path to the temporary roots file. */
|
||||
/* Record the store path in the temporary roots file so it will be
|
||||
seen by a future run of the garbage collector. */
|
||||
auto s = printStorePath(path) + '\0';
|
||||
writeFull(state->fdTempRoots.get(), s);
|
||||
writeFull(_fdTempRoots.lock()->get(), s);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -570,11 +570,15 @@ public:
|
|||
{"cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY="},
|
||||
"trusted-public-keys",
|
||||
R"(
|
||||
A whitespace-separated list of public keys. When paths are copied
|
||||
from another Nix store (such as a binary cache), they must be
|
||||
signed with one of these keys. For example:
|
||||
`cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
|
||||
hydra.nixos.org-1:CNHJZBh9K4tP3EKF6FkkgeVYsS3ohTl+oS0Qa8bezVs=`.
|
||||
A whitespace-separated list of public keys.
|
||||
|
||||
At least one of the following condition must be met
|
||||
for Nix to accept copying a store object from another
|
||||
Nix store (such as a substituter):
|
||||
|
||||
- the store object has been signed using a key in the trusted keys list
|
||||
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||
- the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object)
|
||||
)",
|
||||
{"binary-cache-public-keys"}};
|
||||
|
||||
|
@ -670,13 +674,14 @@ public:
|
|||
independently. Lower value means higher priority.
|
||||
The default is `https://cache.nixos.org`, with a Priority of 40.
|
||||
|
||||
Nix will copy a store path from a remote store only if one
|
||||
of the following is true:
|
||||
At least one of the following conditions must be met for Nix to use
|
||||
a substituter:
|
||||
|
||||
- the store object is signed by one of the [`trusted-public-keys`](#conf-trusted-public-keys)
|
||||
- the substituter is in the [`trusted-substituters`](#conf-trusted-substituters) list
|
||||
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||
- the store object is [output-addressed](glossary.md#gloss-output-addressed-store-object)
|
||||
- the user calling Nix is in the [`trusted-users`](#conf-trusted-users) list
|
||||
|
||||
In addition, each store path should be trusted as described
|
||||
in [`trusted-public-keys`](#conf-trusted-public-keys)
|
||||
)",
|
||||
{"binary-caches"}};
|
||||
|
||||
|
|
|
@ -279,7 +279,12 @@ public:
|
|||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult status { .path = DerivedPath::Built { .drvPath = drvPath } };
|
||||
BuildResult status {
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
};
|
||||
status.status = (BuildResult::Status) readInt(conn->from);
|
||||
conn->from >> status.errorMsg;
|
||||
|
||||
|
|
|
@ -87,20 +87,8 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
|
|||
|
||||
const std::string LocalFSStore::drvsLogDir = "drvs";
|
||||
|
||||
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||
std::optional<std::string> LocalFSStore::getBuildLogExact(const StorePath & path)
|
||||
{
|
||||
auto path = path_;
|
||||
|
||||
if (!path.isDerivation()) {
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
if (!info->deriver) return std::nullopt;
|
||||
path = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
auto baseName = path.to_string();
|
||||
|
||||
for (int j = 0; j < 2; j++) {
|
||||
|
|
|
@ -50,7 +50,7 @@ public:
|
|||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||
}
|
||||
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLogExact(const StorePath & path) override;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -441,9 +441,9 @@ LocalStore::~LocalStore()
|
|||
}
|
||||
|
||||
try {
|
||||
auto state(_state.lock());
|
||||
if (state->fdTempRoots) {
|
||||
state->fdTempRoots = -1;
|
||||
auto fdTempRoots(_fdTempRoots.lock());
|
||||
if (*fdTempRoots) {
|
||||
*fdTempRoots = -1;
|
||||
unlink(fnTempRoots.c_str());
|
||||
}
|
||||
} catch (...) {
|
||||
|
|
|
@ -59,15 +59,6 @@ private:
|
|||
struct Stmts;
|
||||
std::unique_ptr<Stmts> stmts;
|
||||
|
||||
/* The global GC lock */
|
||||
AutoCloseFD fdGCLock;
|
||||
|
||||
/* The file to which we write our temporary roots. */
|
||||
AutoCloseFD fdTempRoots;
|
||||
|
||||
/* Connection to the garbage collector. */
|
||||
AutoCloseFD fdRootsSocket;
|
||||
|
||||
/* The last time we checked whether to do an auto-GC, or an
|
||||
auto-GC finished. */
|
||||
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
|
||||
|
@ -156,6 +147,21 @@ public:
|
|||
|
||||
void addTempRoot(const StorePath & path) override;
|
||||
|
||||
private:
|
||||
|
||||
void createTempRootsFile();
|
||||
|
||||
/* The file to which we write our temporary roots. */
|
||||
Sync<AutoCloseFD> _fdTempRoots;
|
||||
|
||||
/* The global GC lock. */
|
||||
Sync<AutoCloseFD> _fdGCLock;
|
||||
|
||||
/* Connection to the garbage collector. */
|
||||
Sync<AutoCloseFD> _fdRootsSocket;
|
||||
|
||||
public:
|
||||
|
||||
void addIndirectRoot(const Path & path) override;
|
||||
|
||||
private:
|
||||
|
|
|
@ -13,10 +13,6 @@ ifdef HOST_LINUX
|
|||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
ifdef HOST_DARWIN
|
||||
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
|
||||
endif
|
||||
|
||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||
|
||||
ifeq ($(ENABLE_S3), 1)
|
||||
|
|
|
@ -123,8 +123,12 @@ struct AutoUserLock : UserLock
|
|||
|
||||
std::vector<gid_t> getSupplementaryGIDs() override { return {}; }
|
||||
|
||||
static std::unique_ptr<UserLock> acquire(uid_t nrIds, bool useChroot)
|
||||
static std::unique_ptr<UserLock> acquire(uid_t nrIds, bool useUserNamespace)
|
||||
{
|
||||
#if !defined(__linux__)
|
||||
useUserNamespace = false;
|
||||
#endif
|
||||
|
||||
settings.requireExperimentalFeature(Xp::AutoAllocateUids);
|
||||
assert(settings.startId > 0);
|
||||
assert(settings.uidCount % maxIdsPerBuild == 0);
|
||||
|
@ -157,7 +161,7 @@ struct AutoUserLock : UserLock
|
|||
auto lock = std::make_unique<AutoUserLock>();
|
||||
lock->fdUserLock = std::move(fd);
|
||||
lock->firstUid = firstUid;
|
||||
if (useChroot)
|
||||
if (useUserNamespace)
|
||||
lock->firstGid = firstUid;
|
||||
else {
|
||||
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
|
||||
|
@ -174,10 +178,10 @@ struct AutoUserLock : UserLock
|
|||
}
|
||||
};
|
||||
|
||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useChroot)
|
||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useUserNamespace)
|
||||
{
|
||||
if (settings.autoAllocateUids)
|
||||
return AutoUserLock::acquire(nrIds, useChroot);
|
||||
return AutoUserLock::acquire(nrIds, useUserNamespace);
|
||||
else
|
||||
return SimpleUserLock::acquire();
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ struct UserLock
|
|||
|
||||
/* Acquire a user lock for a UID range of size `nrIds`. Note that this
|
||||
may return nullptr if no user is available. */
|
||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useChroot);
|
||||
std::unique_ptr<UserLock> acquireUserLock(uid_t nrIds, bool useUserNamespace);
|
||||
|
||||
bool useBuildUsers();
|
||||
|
||||
|
|
12
src/libstore/log-store.cc
Normal file
12
src/libstore/log-store.cc
Normal file
|
@ -0,0 +1,12 @@
|
|||
#include "log-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<std::string> LogStore::getBuildLog(const StorePath & path) {
|
||||
auto maybePath = getBuildDerivationPath(path);
|
||||
if (!maybePath)
|
||||
return std::nullopt;
|
||||
return getBuildLogExact(maybePath.value());
|
||||
}
|
||||
|
||||
}
|
|
@ -11,7 +11,9 @@ struct LogStore : public virtual Store
|
|||
|
||||
/* Return the build log of the specified store path, if available,
|
||||
or null otherwise. */
|
||||
virtual std::optional<std::string> getBuildLog(const StorePath & path) = 0;
|
||||
std::optional<std::string> getBuildLog(const StorePath & path);
|
||||
|
||||
virtual std::optional<std::string> getBuildLogExact(const StorePath & path) = 0;
|
||||
|
||||
virtual void addBuildLog(const StorePath & path, std::string_view log) = 0;
|
||||
|
||||
|
|
|
@ -185,7 +185,7 @@ void Store::queryMissing(const std::vector<DerivedPath> & targets,
|
|||
knownOutputPaths = false;
|
||||
break;
|
||||
}
|
||||
if (wantOutput(outputName, bfd.outputs) && !isValidPath(*pathOpt))
|
||||
if (bfd.outputs.contains(outputName) && !isValidPath(*pathOpt))
|
||||
invalid.insert(*pathOpt);
|
||||
}
|
||||
if (knownOutputPaths && invalid.empty()) return;
|
||||
|
@ -301,4 +301,47 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
||||
}
|
||||
|
||||
OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd, Store * evalStore_)
|
||||
{
|
||||
auto & evalStore = evalStore_ ? *evalStore_ : store;
|
||||
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore.readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(store, drv);
|
||||
auto drvOutputs = drv.outputsAndOptPaths(store);
|
||||
auto outputNames = std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
StringSet names;
|
||||
for (auto & [outputName, _] : drv.outputs)
|
||||
names.insert(outputName);
|
||||
return names;
|
||||
},
|
||||
[&](const OutputsSpec::Names & names) {
|
||||
return static_cast<std::set<std::string>>(names);
|
||||
},
|
||||
}, bfd.outputs.raw());
|
||||
for (auto & output : outputNames) {
|
||||
auto outputHash = get(outputHashes, output);
|
||||
if (!outputHash)
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store.printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
DrvOutput outputId { *outputHash, output };
|
||||
auto realisation = store.queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw MissingRealisation(outputId);
|
||||
outputs.insert_or_assign(output, realisation->outPath);
|
||||
} else {
|
||||
// If ca-derivations isn't enabled, assume that
|
||||
// the output path is statically known.
|
||||
auto drvOutput = get(drvOutputs, output);
|
||||
assert(drvOutput);
|
||||
assert(drvOutput->second);
|
||||
outputs.insert_or_assign(output, *drvOutput->second);
|
||||
}
|
||||
}
|
||||
return outputs;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
195
src/libstore/outputs-spec.cc
Normal file
195
src/libstore/outputs-spec.cc
Normal file
|
@ -0,0 +1,195 @@
|
|||
#include <regex>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "util.hh"
|
||||
#include "regex-combinators.hh"
|
||||
#include "outputs-spec.hh"
|
||||
#include "path-regex.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
bool OutputsSpec::contains(const std::string & outputName) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
return true;
|
||||
},
|
||||
[&](const OutputsSpec::Names & outputNames) {
|
||||
return outputNames.count(outputName) > 0;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
static std::string outputSpecRegexStr =
|
||||
regex::either(
|
||||
regex::group(R"(\*)"),
|
||||
regex::group(regex::list(nameRegexStr)));
|
||||
|
||||
std::optional<OutputsSpec> OutputsSpec::parseOpt(std::string_view s)
|
||||
{
|
||||
static std::regex regex(std::string { outputSpecRegexStr });
|
||||
|
||||
std::smatch match;
|
||||
std::string s2 { s }; // until some improves std::regex
|
||||
if (!std::regex_match(s2, match, regex))
|
||||
return std::nullopt;
|
||||
|
||||
if (match[1].matched)
|
||||
return { OutputsSpec::All {} };
|
||||
|
||||
if (match[2].matched)
|
||||
return OutputsSpec::Names { tokenizeString<StringSet>(match[2].str(), ",") };
|
||||
|
||||
assert(false);
|
||||
}
|
||||
|
||||
|
||||
OutputsSpec OutputsSpec::parse(std::string_view s)
|
||||
{
|
||||
std::optional spec = parseOpt(s);
|
||||
if (!spec)
|
||||
throw Error("invalid outputs specifier '%s'", s);
|
||||
return *spec;
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::pair<std::string_view, ExtendedOutputsSpec>> ExtendedOutputsSpec::parseOpt(std::string_view s)
|
||||
{
|
||||
auto found = s.rfind('^');
|
||||
|
||||
if (found == std::string::npos)
|
||||
return std::pair { s, ExtendedOutputsSpec::Default {} };
|
||||
|
||||
auto specOpt = OutputsSpec::parseOpt(s.substr(found + 1));
|
||||
if (!specOpt)
|
||||
return std::nullopt;
|
||||
return std::pair { s.substr(0, found), ExtendedOutputsSpec::Explicit { *std::move(specOpt) } };
|
||||
}
|
||||
|
||||
|
||||
std::pair<std::string_view, ExtendedOutputsSpec> ExtendedOutputsSpec::parse(std::string_view s)
|
||||
{
|
||||
std::optional spec = parseOpt(s);
|
||||
if (!spec)
|
||||
throw Error("invalid extended outputs specifier '%s'", s);
|
||||
return *spec;
|
||||
}
|
||||
|
||||
|
||||
std::string OutputsSpec::to_string() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) -> std::string {
|
||||
return "*";
|
||||
},
|
||||
[&](const OutputsSpec::Names & outputNames) -> std::string {
|
||||
return concatStringsSep(",", outputNames);
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
std::string ExtendedOutputsSpec::to_string() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default &) -> std::string {
|
||||
return "";
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> std::string {
|
||||
return "^" + outputSpec.to_string();
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
OutputsSpec OutputsSpec::union_(const OutputsSpec & that) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) -> OutputsSpec {
|
||||
return OutputsSpec::All { };
|
||||
},
|
||||
[&](const OutputsSpec::Names & theseNames) -> OutputsSpec {
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) -> OutputsSpec {
|
||||
return OutputsSpec::All {};
|
||||
},
|
||||
[&](const OutputsSpec::Names & thoseNames) -> OutputsSpec {
|
||||
OutputsSpec::Names ret = theseNames;
|
||||
ret.insert(thoseNames.begin(), thoseNames.end());
|
||||
return ret;
|
||||
},
|
||||
}, that.raw());
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
bool OutputsSpec::isSubsetOf(const OutputsSpec & that) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
return true;
|
||||
},
|
||||
[&](const OutputsSpec::Names & thoseNames) {
|
||||
return std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
return false;
|
||||
},
|
||||
[&](const OutputsSpec::Names & theseNames) {
|
||||
bool ret = true;
|
||||
for (auto & o : theseNames)
|
||||
if (thoseNames.count(o) == 0)
|
||||
ret = false;
|
||||
return ret;
|
||||
},
|
||||
}, raw());
|
||||
},
|
||||
}, that.raw());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
using namespace nix;
|
||||
|
||||
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
||||
auto names = json.get<StringSet>();
|
||||
if (names == StringSet({"*"}))
|
||||
return OutputsSpec::All {};
|
||||
else
|
||||
return OutputsSpec::Names { std::move(names) };
|
||||
}
|
||||
|
||||
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
||||
std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
json = std::vector<std::string>({"*"});
|
||||
},
|
||||
[&](const OutputsSpec::Names & names) {
|
||||
json = names;
|
||||
},
|
||||
}, t.raw());
|
||||
}
|
||||
|
||||
|
||||
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json) {
|
||||
if (json.is_null())
|
||||
return ExtendedOutputsSpec::Default {};
|
||||
else {
|
||||
return ExtendedOutputsSpec::Explicit { json.get<OutputsSpec>() };
|
||||
}
|
||||
}
|
||||
|
||||
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t) {
|
||||
std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default &) {
|
||||
json = nullptr;
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) {
|
||||
adl_serializer<OutputsSpec>::to_json(json, e);
|
||||
},
|
||||
}, t.raw());
|
||||
}
|
||||
|
||||
}
|
95
src/libstore/outputs-spec.hh
Normal file
95
src/libstore/outputs-spec.hh
Normal file
|
@ -0,0 +1,95 @@
|
|||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
#include <optional>
|
||||
#include <set>
|
||||
#include <variant>
|
||||
|
||||
#include "json-impls.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct OutputNames : std::set<std::string> {
|
||||
using std::set<std::string>::set;
|
||||
|
||||
/* These need to be "inherited manually" */
|
||||
|
||||
OutputNames(const std::set<std::string> & s)
|
||||
: std::set<std::string>(s)
|
||||
{ assert(!empty()); }
|
||||
|
||||
OutputNames(std::set<std::string> && s)
|
||||
: std::set<std::string>(s)
|
||||
{ assert(!empty()); }
|
||||
|
||||
/* This set should always be non-empty, so we delete this
|
||||
constructor in order make creating empty ones by mistake harder.
|
||||
*/
|
||||
OutputNames() = delete;
|
||||
};
|
||||
|
||||
struct AllOutputs : std::monostate { };
|
||||
|
||||
typedef std::variant<AllOutputs, OutputNames> _OutputsSpecRaw;
|
||||
|
||||
struct OutputsSpec : _OutputsSpecRaw {
|
||||
using Raw = _OutputsSpecRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
/* Force choosing a variant */
|
||||
OutputsSpec() = delete;
|
||||
|
||||
using Names = OutputNames;
|
||||
using All = AllOutputs;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
inline Raw & raw() {
|
||||
return static_cast<Raw &>(*this);
|
||||
}
|
||||
|
||||
bool contains(const std::string & output) const;
|
||||
|
||||
/* Create a new OutputsSpec which is the union of this and that. */
|
||||
OutputsSpec union_(const OutputsSpec & that) const;
|
||||
|
||||
/* Whether this OutputsSpec is a subset of that. */
|
||||
bool isSubsetOf(const OutputsSpec & outputs) const;
|
||||
|
||||
/* Parse a string of the form 'output1,...outputN' or
|
||||
'*', returning the outputs spec. */
|
||||
static OutputsSpec parse(std::string_view s);
|
||||
static std::optional<OutputsSpec> parseOpt(std::string_view s);
|
||||
|
||||
std::string to_string() const;
|
||||
};
|
||||
|
||||
struct DefaultOutputs : std::monostate { };
|
||||
|
||||
typedef std::variant<DefaultOutputs, OutputsSpec> _ExtendedOutputsSpecRaw;
|
||||
|
||||
struct ExtendedOutputsSpec : _ExtendedOutputsSpecRaw {
|
||||
using Raw = _ExtendedOutputsSpecRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Default = DefaultOutputs;
|
||||
using Explicit = OutputsSpec;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
/* Parse a string of the form 'prefix^output1,...outputN' or
|
||||
'prefix^*', returning the prefix and the extended outputs spec. */
|
||||
static std::pair<std::string_view, ExtendedOutputsSpec> parse(std::string_view s);
|
||||
static std::optional<std::pair<std::string_view, ExtendedOutputsSpec>> parseOpt(std::string_view s);
|
||||
|
||||
std::string to_string() const;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
JSON_IMPL(OutputsSpec)
|
||||
JSON_IMPL(ExtendedOutputsSpec)
|
|
@ -3,6 +3,80 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||
{
|
||||
if (narSize == 0)
|
||||
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||
store.printStorePath(path));
|
||||
return
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(Base32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||
}
|
||||
|
||||
|
||||
void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||
{
|
||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||
}
|
||||
|
||||
|
||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||
{
|
||||
if (! ca) return false;
|
||||
|
||||
auto caPath = std::visit(overloaded {
|
||||
[&](const TextHash & th) {
|
||||
return store.makeTextPath(path.name(), th.hash, references);
|
||||
},
|
||||
[&](const FixedOutputHash & fsh) {
|
||||
auto refs = references;
|
||||
bool hasSelfReference = false;
|
||||
if (refs.count(path)) {
|
||||
hasSelfReference = true;
|
||||
refs.erase(path);
|
||||
}
|
||||
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
||||
}
|
||||
}, *ca);
|
||||
|
||||
bool res = caPath == path;
|
||||
|
||||
if (!res)
|
||||
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const
|
||||
{
|
||||
if (isContentAddressed(store)) return maxSigs;
|
||||
|
||||
size_t good = 0;
|
||||
for (auto & sig : sigs)
|
||||
if (checkSignature(store, publicKeys, sig))
|
||||
good++;
|
||||
return good;
|
||||
}
|
||||
|
||||
|
||||
bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const
|
||||
{
|
||||
return verifyDetached(fingerprint(store), sig, publicKeys);
|
||||
}
|
||||
|
||||
|
||||
Strings ValidPathInfo::shortRefs() const
|
||||
{
|
||||
Strings refs;
|
||||
for (auto & r : references)
|
||||
refs.push_back(std::string(r.to_string()));
|
||||
return refs;
|
||||
}
|
||||
|
||||
|
||||
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format)
|
||||
{
|
||||
return read(source, store, format, store.parseStorePath(readString(source)));
|
||||
|
@ -24,6 +98,7 @@ ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned
|
|||
return info;
|
||||
}
|
||||
|
||||
|
||||
void ValidPathInfo::write(
|
||||
Sink & sink,
|
||||
const Store & store,
|
||||
|
|
7
src/libstore/path-regex.hh
Normal file
7
src/libstore/path-regex.hh
Normal file
|
@ -0,0 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
namespace nix {
|
||||
|
||||
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)";
|
||||
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
#include "path-with-outputs.hh"
|
||||
#include "store-api.hh"
|
||||
#include "nlohmann/json.hpp"
|
||||
|
||||
#include <regex>
|
||||
|
||||
|
@ -16,10 +15,14 @@ std::string StorePathWithOutputs::to_string(const Store & store) const
|
|||
|
||||
DerivedPath StorePathWithOutputs::toDerivedPath() const
|
||||
{
|
||||
if (!outputs.empty() || path.isDerivation())
|
||||
return DerivedPath::Built { path, outputs };
|
||||
else
|
||||
if (!outputs.empty()) {
|
||||
return DerivedPath::Built { path, OutputsSpec::Names { outputs } };
|
||||
} else if (path.isDerivation()) {
|
||||
assert(outputs.empty());
|
||||
return DerivedPath::Built { path, OutputsSpec::All { } };
|
||||
} else {
|
||||
return DerivedPath::Opaque { path };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -42,7 +45,18 @@ std::variant<StorePathWithOutputs, StorePath> StorePathWithOutputs::tryFromDeriv
|
|||
return StorePathWithOutputs { bo.path };
|
||||
},
|
||||
[&](const DerivedPath::Built & bfd) -> std::variant<StorePathWithOutputs, StorePath> {
|
||||
return StorePathWithOutputs { bfd.drvPath, bfd.outputs };
|
||||
return StorePathWithOutputs {
|
||||
.path = bfd.drvPath,
|
||||
// Use legacy encoding of wildcard as empty set
|
||||
.outputs = std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) -> StringSet {
|
||||
return {};
|
||||
},
|
||||
[&](const OutputsSpec::Names & outputs) {
|
||||
return static_cast<StringSet>(outputs);
|
||||
},
|
||||
}, bfd.outputs.raw()),
|
||||
};
|
||||
},
|
||||
}, p.raw());
|
||||
}
|
||||
|
@ -53,8 +67,8 @@ std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s)
|
|||
size_t n = s.find("!");
|
||||
return n == s.npos
|
||||
? std::make_pair(s, std::set<std::string>())
|
||||
: std::make_pair(((std::string_view) s).substr(0, n),
|
||||
tokenizeString<std::set<std::string>>(((std::string_view) s).substr(n + 1), ","));
|
||||
: std::make_pair(s.substr(0, n),
|
||||
tokenizeString<std::set<std::string>>(s.substr(n + 1), ","));
|
||||
}
|
||||
|
||||
|
||||
|
@ -71,57 +85,4 @@ StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std:
|
|||
return StorePathWithOutputs { store.followLinksToStorePath(path), std::move(outputs) };
|
||||
}
|
||||
|
||||
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s)
|
||||
{
|
||||
static std::regex regex(R"((.*)\^((\*)|([a-z]+(,[a-z]+)*)))");
|
||||
|
||||
std::smatch match;
|
||||
if (!std::regex_match(s, match, regex))
|
||||
return {s, DefaultOutputs()};
|
||||
|
||||
if (match[3].matched)
|
||||
return {match[1], AllOutputs()};
|
||||
|
||||
return {match[1], tokenizeString<OutputNames>(match[4].str(), ",")};
|
||||
}
|
||||
|
||||
std::string printOutputsSpec(const OutputsSpec & outputsSpec)
|
||||
{
|
||||
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
||||
return "";
|
||||
|
||||
if (std::get_if<AllOutputs>(&outputsSpec))
|
||||
return "^*";
|
||||
|
||||
if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||
return "^" + concatStringsSep(",", *outputNames);
|
||||
|
||||
assert(false);
|
||||
}
|
||||
|
||||
void to_json(nlohmann::json & json, const OutputsSpec & outputsSpec)
|
||||
{
|
||||
if (std::get_if<DefaultOutputs>(&outputsSpec))
|
||||
json = nullptr;
|
||||
|
||||
else if (std::get_if<AllOutputs>(&outputsSpec))
|
||||
json = std::vector<std::string>({"*"});
|
||||
|
||||
else if (auto outputNames = std::get_if<OutputNames>(&outputsSpec))
|
||||
json = *outputNames;
|
||||
}
|
||||
|
||||
void from_json(const nlohmann::json & json, OutputsSpec & outputsSpec)
|
||||
{
|
||||
if (json.is_null())
|
||||
outputsSpec = DefaultOutputs();
|
||||
else {
|
||||
auto names = json.get<OutputNames>();
|
||||
if (names == OutputNames({"*"}))
|
||||
outputsSpec = AllOutputs();
|
||||
else
|
||||
outputsSpec = names;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
#pragma once
|
||||
|
||||
#include <variant>
|
||||
|
||||
#include "path.hh"
|
||||
#include "derived-path.hh"
|
||||
#include "nlohmann/json_fwd.hpp"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* This is a deprecated old type just for use by the old CLI, and older
|
||||
versions of the RPC protocols. In new code don't use it; you want
|
||||
`DerivedPath` instead.
|
||||
|
||||
`DerivedPath` is better because it handles more cases, and does so more
|
||||
explicitly without devious punning tricks.
|
||||
*/
|
||||
struct StorePathWithOutputs
|
||||
{
|
||||
StorePath path;
|
||||
|
@ -33,25 +37,4 @@ StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view
|
|||
|
||||
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
|
||||
|
||||
typedef std::set<std::string> OutputNames;
|
||||
|
||||
struct AllOutputs {
|
||||
bool operator < (const AllOutputs & _) const { return false; }
|
||||
};
|
||||
|
||||
struct DefaultOutputs {
|
||||
bool operator < (const DefaultOutputs & _) const { return false; }
|
||||
};
|
||||
|
||||
typedef std::variant<DefaultOutputs, AllOutputs, OutputNames> OutputsSpec;
|
||||
|
||||
/* Parse a string of the form 'prefix^output1,...outputN' or
|
||||
'prefix^*', returning the prefix and the outputs spec. */
|
||||
std::pair<std::string, OutputsSpec> parseOutputsSpec(const std::string & s);
|
||||
|
||||
std::string printOutputsSpec(const OutputsSpec & outputsSpec);
|
||||
|
||||
void to_json(nlohmann::json &, const OutputsSpec &);
|
||||
void from_json(const nlohmann::json &, OutputsSpec &);
|
||||
|
||||
}
|
||||
|
|
|
@ -8,8 +8,10 @@ static void checkName(std::string_view path, std::string_view name)
|
|||
{
|
||||
if (name.empty())
|
||||
throw BadStorePath("store path '%s' has an empty name", path);
|
||||
if (name.size() > 211)
|
||||
throw BadStorePath("store path '%s' has a name longer than 211 characters", path);
|
||||
if (name.size() > StorePath::MaxPathLen)
|
||||
throw BadStorePath("store path '%s' has a name longer than '%d characters",
|
||||
StorePath::MaxPathLen, path);
|
||||
// See nameRegexStr for the definition
|
||||
for (auto c : name)
|
||||
if (!((c >= '0' && c <= '9')
|
||||
|| (c >= 'a' && c <= 'z')
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class Store;
|
||||
struct Hash;
|
||||
|
||||
class StorePath
|
||||
|
@ -17,6 +16,8 @@ public:
|
|||
/* Size of the hash part of store paths, in base-32 characters. */
|
||||
constexpr static size_t HashLen = 32; // i.e. 160 bits
|
||||
|
||||
constexpr static size_t MaxPathLen = 211;
|
||||
|
||||
StorePath() = delete;
|
||||
|
||||
StorePath(std::string_view baseName);
|
||||
|
@ -64,7 +65,6 @@ public:
|
|||
|
||||
typedef std::set<StorePath> StorePathSet;
|
||||
typedef std::vector<StorePath> StorePaths;
|
||||
typedef std::map<std::string, StorePath> OutputPathMap;
|
||||
|
||||
typedef std::map<StorePath, std::optional<ContentAddress>> StorePathCAMap;
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class Store;
|
||||
|
||||
struct DrvOutput {
|
||||
// The hash modulo of the derivation
|
||||
Hash drvHash;
|
||||
|
@ -93,4 +95,14 @@ struct RealisedPath {
|
|||
GENERATE_CMP(RealisedPath, me->raw);
|
||||
};
|
||||
|
||||
class MissingRealisation : public Error
|
||||
{
|
||||
public:
|
||||
MissingRealisation(DrvOutput & outputId)
|
||||
: Error( "cannot operate on an output of the "
|
||||
"unbuilt derivation '%s'",
|
||||
outputId.to_string())
|
||||
{}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -867,8 +867,8 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
|||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
const auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
const auto drvOutputs = drv.outputsAndOptPaths(*this);
|
||||
for (auto & output : bfd.outputs) {
|
||||
auto built = resolveDerivedPath(*this, bfd, &*evalStore);
|
||||
for (auto & [output, outputPath] : built) {
|
||||
auto outputHash = get(outputHashes, output);
|
||||
if (!outputHash)
|
||||
throw Error(
|
||||
|
@ -879,22 +879,14 @@ std::vector<BuildResult> RemoteStore::buildPathsWithResults(
|
|||
auto realisation =
|
||||
queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw Error(
|
||||
"cannot operate on an output of unbuilt "
|
||||
"content-addressed derivation '%s'",
|
||||
outputId.to_string());
|
||||
throw MissingRealisation(outputId);
|
||||
res.builtOutputs.emplace(realisation->id, *realisation);
|
||||
} else {
|
||||
// If ca-derivations isn't enabled, assume that
|
||||
// the output path is statically known.
|
||||
const auto drvOutput = get(drvOutputs, output);
|
||||
assert(drvOutput);
|
||||
assert(drvOutput->second);
|
||||
res.builtOutputs.emplace(
|
||||
outputId,
|
||||
Realisation {
|
||||
.id = outputId,
|
||||
.outPath = *drvOutput->second,
|
||||
.outPath = outputPath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -918,7 +910,12 @@ BuildResult RemoteStore::buildDerivation(const StorePath & drvPath, const BasicD
|
|||
writeDerivation(conn->to, *this, drv);
|
||||
conn->to << buildMode;
|
||||
conn.processStderr();
|
||||
BuildResult res { .path = DerivedPath::Built { .drvPath = drvPath } };
|
||||
BuildResult res {
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = drvPath,
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
};
|
||||
res.status = (BuildResult::Status) readInt(conn->from);
|
||||
conn->from >> res.errorMsg;
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
|
||||
|
|
|
@ -53,8 +53,8 @@ public:
|
|||
{ return false; }
|
||||
|
||||
// FIXME extend daemon protocol, move implementation to RemoteStore
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override
|
||||
{ unsupported("getBuildLog"); }
|
||||
std::optional<std::string> getBuildLogExact(const StorePath & path) override
|
||||
{ unsupported("getBuildLogExact"); }
|
||||
|
||||
private:
|
||||
|
||||
|
|
|
@ -1210,79 +1210,6 @@ std::string showPaths(const PathSet & paths)
|
|||
}
|
||||
|
||||
|
||||
std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||
{
|
||||
if (narSize == 0)
|
||||
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
|
||||
store.printStorePath(path));
|
||||
return
|
||||
"1;" + store.printStorePath(path) + ";"
|
||||
+ narHash.to_string(Base32, true) + ";"
|
||||
+ std::to_string(narSize) + ";"
|
||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||
}
|
||||
|
||||
|
||||
void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||
{
|
||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||
}
|
||||
|
||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||
{
|
||||
if (! ca) return false;
|
||||
|
||||
auto caPath = std::visit(overloaded {
|
||||
[&](const TextHash & th) {
|
||||
return store.makeTextPath(path.name(), th.hash, references);
|
||||
},
|
||||
[&](const FixedOutputHash & fsh) {
|
||||
auto refs = references;
|
||||
bool hasSelfReference = false;
|
||||
if (refs.count(path)) {
|
||||
hasSelfReference = true;
|
||||
refs.erase(path);
|
||||
}
|
||||
return store.makeFixedOutputPath(fsh.method, fsh.hash, path.name(), refs, hasSelfReference);
|
||||
}
|
||||
}, *ca);
|
||||
|
||||
bool res = caPath == path;
|
||||
|
||||
if (!res)
|
||||
printError("warning: path '%s' claims to be content-addressed but isn't", store.printStorePath(path));
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const
|
||||
{
|
||||
if (isContentAddressed(store)) return maxSigs;
|
||||
|
||||
size_t good = 0;
|
||||
for (auto & sig : sigs)
|
||||
if (checkSignature(store, publicKeys, sig))
|
||||
good++;
|
||||
return good;
|
||||
}
|
||||
|
||||
|
||||
bool ValidPathInfo::checkSignature(const Store & store, const PublicKeys & publicKeys, const std::string & sig) const
|
||||
{
|
||||
return verifyDetached(fingerprint(store), sig, publicKeys);
|
||||
}
|
||||
|
||||
|
||||
Strings ValidPathInfo::shortRefs() const
|
||||
{
|
||||
Strings refs;
|
||||
for (auto & r : references)
|
||||
refs.push_back(std::string(r.to_string()));
|
||||
return refs;
|
||||
}
|
||||
|
||||
|
||||
Derivation Store::derivationFromPath(const StorePath & drvPath)
|
||||
{
|
||||
ensurePath(drvPath);
|
||||
|
@ -1301,6 +1228,34 @@ Derivation readDerivationCommon(Store& store, const StorePath& drvPath, bool req
|
|||
}
|
||||
}
|
||||
|
||||
std::optional<StorePath> Store::getBuildDerivationPath(const StorePath & path)
|
||||
{
|
||||
|
||||
if (!path.isDerivation()) {
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
if (!info->deriver) return std::nullopt;
|
||||
return *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations) || !isValidPath(path))
|
||||
return path;
|
||||
|
||||
auto drv = readDerivation(path);
|
||||
if (!drv.type().hasKnownOutputPaths()) {
|
||||
// The build log is actually attached to the corresponding
|
||||
// resolved derivation, so we need to get it first
|
||||
auto resolvedDrv = drv.tryResolve(*this);
|
||||
if (resolvedDrv)
|
||||
return writeDerivation(*this, *resolvedDrv, NoRepair, true);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
Derivation Store::readDerivation(const StorePath & drvPath)
|
||||
{ return readDerivationCommon(*this, drvPath, true); }
|
||||
|
||||
|
|
|
@ -71,6 +71,9 @@ class NarInfoDiskCache;
|
|||
class Store;
|
||||
|
||||
|
||||
typedef std::map<std::string, StorePath> OutputPathMap;
|
||||
|
||||
|
||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||
|
@ -120,6 +123,8 @@ public:
|
|||
|
||||
typedef std::map<std::string, std::string> Params;
|
||||
|
||||
|
||||
|
||||
protected:
|
||||
|
||||
struct PathInfoCacheValue {
|
||||
|
@ -618,6 +623,13 @@ public:
|
|||
*/
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
|
||||
|
||||
/**
|
||||
* Given a store path, return the realisation actually used in the realisation of this path:
|
||||
* - If the path is a content-addressed derivation, try to resolve it
|
||||
* - Otherwise, find one of its derivers
|
||||
*/
|
||||
std::optional<StorePath> getBuildDerivationPath(const StorePath &);
|
||||
|
||||
/* Hack to allow long-running processes like hydra-queue-runner to
|
||||
occasionally flush their path info cache. */
|
||||
void clearPathInfoCache()
|
||||
|
@ -719,6 +731,11 @@ void copyClosure(
|
|||
void removeTempRoots();
|
||||
|
||||
|
||||
/* Resolve the derived path completely, failing if any derivation output
|
||||
is unknown. */
|
||||
OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * evalStore = nullptr);
|
||||
|
||||
|
||||
/* Return a Store object to access the Nix store denoted by
|
||||
‘uri’ (slight misnomer...). Supported values are:
|
||||
|
||||
|
|
23
src/libstore/tests/libstoretests.hh
Normal file
23
src/libstore/tests/libstoretests.hh
Normal file
|
@ -0,0 +1,23 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <gmock/gmock.h>
|
||||
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class LibStoreTest : public ::testing::Test {
|
||||
public:
|
||||
static void SetUpTestSuite() {
|
||||
initLibStore();
|
||||
}
|
||||
|
||||
protected:
|
||||
LibStoreTest()
|
||||
: store(openStore("dummy://"))
|
||||
{ }
|
||||
|
||||
ref<Store> store;
|
||||
};
|
||||
|
||||
|
||||
} /* namespace nix */
|
|
@ -12,4 +12,4 @@ libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil
|
|||
|
||||
libstore-tests_LIBS = libstore libutil
|
||||
|
||||
libstore-tests_LDFLAGS := $(GTEST_LIBS)
|
||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
|
201
src/libstore/tests/outputs-spec.cc
Normal file
201
src/libstore/tests/outputs-spec.cc
Normal file
|
@ -0,0 +1,201 @@
|
|||
#include "outputs-spec.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifndef NDEBUG
|
||||
TEST(OutputsSpec, no_empty_names) {
|
||||
ASSERT_DEATH(OutputsSpec::Names { std::set<std::string> { } }, "");
|
||||
}
|
||||
#endif
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST(OutputsSpec, bad_ ## NAME) { \
|
||||
std::optional OutputsSpecOpt = \
|
||||
OutputsSpec::parseOpt(STR); \
|
||||
ASSERT_FALSE(OutputsSpecOpt); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(empty, "")
|
||||
TEST_DONT_PARSE(garbage, "&*()")
|
||||
TEST_DONT_PARSE(double_star, "**")
|
||||
TEST_DONT_PARSE(star_first, "*,foo")
|
||||
TEST_DONT_PARSE(star_second, "foo,*")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
TEST(OutputsSpec, all) {
|
||||
std::string_view str = "*";
|
||||
OutputsSpec expected = OutputsSpec::All { };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_out) {
|
||||
std::string_view str = "out";
|
||||
OutputsSpec expected = OutputsSpec::Names { "out" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_underscore) {
|
||||
std::string_view str = "a_b";
|
||||
OutputsSpec expected = OutputsSpec::Names { "a_b" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_numberic) {
|
||||
std::string_view str = "01";
|
||||
OutputsSpec expected = OutputsSpec::Names { "01" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_out_bin) {
|
||||
OutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(OutputsSpec::parse("out,bin"), expected);
|
||||
// N.B. This normalization is OK.
|
||||
ASSERT_EQ(expected.to_string(), "bin,out");
|
||||
}
|
||||
|
||||
#define TEST_SUBSET(X, THIS, THAT) \
|
||||
X((OutputsSpec { THIS }).isSubsetOf(THAT));
|
||||
|
||||
TEST(OutputsSpec, subsets_all_all) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All { }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_all) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_names_eq) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_names_noneq) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, (OutputsSpec::Names { "a", "b" }));
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, not_subsets_all_names) {
|
||||
TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, not_subsets_names_names) {
|
||||
TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names { "a", "b" }), (OutputsSpec::Names { "a" }));
|
||||
}
|
||||
|
||||
#undef TEST_SUBSET
|
||||
|
||||
#define TEST_UNION(RES, THIS, THAT) \
|
||||
ASSERT_EQ(OutputsSpec { RES }, (OutputsSpec { THIS }).union_(THAT));
|
||||
|
||||
TEST(OutputsSpec, union_all_all) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_all_names) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_names_all) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_names_names) {
|
||||
TEST_UNION((OutputsSpec::Names { "a", "b" }), OutputsSpec::Names { "a" }, OutputsSpec::Names { "b" });
|
||||
}
|
||||
|
||||
#undef TEST_UNION
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST(ExtendedOutputsSpec, bad_ ## NAME) { \
|
||||
std::optional extendedOutputsSpecOpt = \
|
||||
ExtendedOutputsSpec::parseOpt(STR); \
|
||||
ASSERT_FALSE(extendedOutputsSpecOpt); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(carot_empty, "^")
|
||||
TEST_DONT_PARSE(prefix_carot_empty, "foo^")
|
||||
TEST_DONT_PARSE(garbage, "^&*()")
|
||||
TEST_DONT_PARSE(double_star, "^**")
|
||||
TEST_DONT_PARSE(star_first, "^*,foo")
|
||||
TEST_DONT_PARSE(star_second, "^foo,*")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
TEST(ExtendedOutputsSpec, defeault) {
|
||||
std::string_view str = "foo";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = ExtendedOutputsSpec::Default { };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, all) {
|
||||
std::string_view str = "foo^*";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::All { };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, out) {
|
||||
std::string_view str = "foo^out";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, out_bin) {
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^out,bin");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bin,out");
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, many_carrot) {
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^bar^out,bin");
|
||||
ASSERT_EQ(prefix, "foo^bar");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bar^bin,out");
|
||||
}
|
||||
|
||||
|
||||
#define TEST_JSON(TYPE, NAME, STR, VAL) \
|
||||
\
|
||||
TEST(TYPE, NAME ## _to_json) { \
|
||||
using nlohmann::literals::operator "" _json; \
|
||||
ASSERT_EQ( \
|
||||
STR ## _json, \
|
||||
((nlohmann::json) TYPE { VAL })); \
|
||||
} \
|
||||
\
|
||||
TEST(TYPE, NAME ## _from_json) { \
|
||||
using nlohmann::literals::operator "" _json; \
|
||||
ASSERT_EQ( \
|
||||
TYPE { VAL }, \
|
||||
(STR ## _json).get<TYPE>()); \
|
||||
}
|
||||
|
||||
TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All { })
|
||||
TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names { "a" })
|
||||
TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names { "a", "b" }))
|
||||
|
||||
TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default { })
|
||||
TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit { OutputsSpec::All { } })
|
||||
TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a" } })
|
||||
TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a", "b" } }))
|
||||
|
||||
#undef TEST_JSON
|
||||
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
#include "path-with-outputs.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(parseOutputsSpec, basic)
|
||||
{
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
||||
}
|
||||
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^*");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ASSERT_TRUE(std::get_if<AllOutputs>(&outputsSpec));
|
||||
}
|
||||
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out"}));
|
||||
}
|
||||
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^out,bin");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
||||
}
|
||||
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^bar^out,bin");
|
||||
ASSERT_EQ(prefix, "foo^bar");
|
||||
ASSERT_TRUE(std::get<OutputNames>(outputsSpec) == OutputNames({"out", "bin"}));
|
||||
}
|
||||
|
||||
{
|
||||
auto [prefix, outputsSpec] = parseOutputsSpec("foo^&*()");
|
||||
ASSERT_EQ(prefix, "foo^&*()");
|
||||
ASSERT_TRUE(std::get_if<DefaultOutputs>(&outputsSpec));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
144
src/libstore/tests/path.cc
Normal file
144
src/libstore/tests/path.cc
Normal file
|
@ -0,0 +1,144 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "path-regex.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include "libstoretests.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
#define STORE_DIR "/nix/store/"
|
||||
#define HASH_PART "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q"
|
||||
|
||||
class StorePathTest : public LibStoreTest
|
||||
{
|
||||
};
|
||||
|
||||
static std::regex nameRegex { std::string { nameRegexStr } };
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST_F(StorePathTest, bad_ ## NAME) { \
|
||||
std::string_view str = \
|
||||
STORE_DIR HASH_PART "-" STR; \
|
||||
ASSERT_THROW( \
|
||||
store->parseStorePath(str), \
|
||||
BadStorePath); \
|
||||
std::string name { STR }; \
|
||||
EXPECT_FALSE(std::regex_match(name, nameRegex)); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(empty, "")
|
||||
TEST_DONT_PARSE(garbage, "&*()")
|
||||
TEST_DONT_PARSE(double_star, "**")
|
||||
TEST_DONT_PARSE(star_first, "*,foo")
|
||||
TEST_DONT_PARSE(star_second, "foo,*")
|
||||
TEST_DONT_PARSE(bang, "foo!o")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
#define TEST_DO_PARSE(NAME, STR) \
|
||||
TEST_F(StorePathTest, good_ ## NAME) { \
|
||||
std::string_view str = \
|
||||
STORE_DIR HASH_PART "-" STR; \
|
||||
auto p = store->parseStorePath(str); \
|
||||
std::string name { p.name() }; \
|
||||
EXPECT_TRUE(std::regex_match(name, nameRegex)); \
|
||||
}
|
||||
|
||||
// 0-9 a-z A-Z + - . _ ? =
|
||||
|
||||
TEST_DO_PARSE(numbers, "02345")
|
||||
TEST_DO_PARSE(lower_case, "foo")
|
||||
TEST_DO_PARSE(upper_case, "FOO")
|
||||
TEST_DO_PARSE(plus, "foo+bar")
|
||||
TEST_DO_PARSE(dash, "foo-dev")
|
||||
TEST_DO_PARSE(underscore, "foo_bar")
|
||||
TEST_DO_PARSE(period, "foo.txt")
|
||||
TEST_DO_PARSE(question_mark, "foo?why")
|
||||
TEST_DO_PARSE(equals_sign, "foo=foo")
|
||||
|
||||
#undef TEST_DO_PARSE
|
||||
|
||||
// For rapidcheck
|
||||
void showValue(const StorePath & p, std::ostream & os) {
|
||||
os << p.to_string();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<StorePath> {
|
||||
static Gen<StorePath> arbitrary();
|
||||
};
|
||||
|
||||
Gen<StorePath> Arbitrary<StorePath>::arbitrary()
|
||||
{
|
||||
auto len = *gen::inRange<size_t>(1, StorePath::MaxPathLen);
|
||||
|
||||
std::string pre { HASH_PART "-" };
|
||||
pre.reserve(pre.size() + len);
|
||||
|
||||
for (size_t c = 0; c < len; ++c) {
|
||||
switch (auto i = *gen::inRange<uint8_t>(0, 10 + 2 * 26 + 6)) {
|
||||
case 0 ... 9:
|
||||
pre += '0' + i;
|
||||
case 10 ... 35:
|
||||
pre += 'A' + (i - 10);
|
||||
break;
|
||||
case 36 ... 61:
|
||||
pre += 'a' + (i - 36);
|
||||
break;
|
||||
case 62:
|
||||
pre += '+';
|
||||
break;
|
||||
case 63:
|
||||
pre += '-';
|
||||
break;
|
||||
case 64:
|
||||
pre += '.';
|
||||
break;
|
||||
case 65:
|
||||
pre += '_';
|
||||
break;
|
||||
case 66:
|
||||
pre += '?';
|
||||
break;
|
||||
case 67:
|
||||
pre += '=';
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
return gen::just(StorePath { pre });
|
||||
}
|
||||
|
||||
} // namespace rc
|
||||
|
||||
namespace nix {
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
StorePathTest,
|
||||
prop_regex_accept,
|
||||
(const StorePath & p))
|
||||
{
|
||||
RC_ASSERT(std::regex_match(std::string { p.name() }, nameRegex));
|
||||
}
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
StorePathTest,
|
||||
prop_round_rip,
|
||||
(const StorePath & p))
|
||||
{
|
||||
RC_ASSERT(p == store->parseStorePath(store->printStorePath(p)));
|
||||
}
|
||||
|
||||
}
|
|
@ -74,6 +74,8 @@ struct AbstractPos
|
|||
virtual void print(std::ostream & out) const = 0;
|
||||
|
||||
std::optional<LinesOfCode> getCodeLines() const;
|
||||
|
||||
virtual ~AbstractPos() = default;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
|
||||
|
|
14
src/libutil/json-impls.hh
Normal file
14
src/libutil/json-impls.hh
Normal file
|
@ -0,0 +1,14 @@
|
|||
#pragma once
|
||||
|
||||
#include "nlohmann/json_fwd.hpp"
|
||||
|
||||
// Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types
|
||||
#define JSON_IMPL(TYPE) \
|
||||
namespace nlohmann { \
|
||||
using namespace nix; \
|
||||
template <> \
|
||||
struct adl_serializer<TYPE> { \
|
||||
static TYPE from_json(const json & json); \
|
||||
static void to_json(json & json, TYPE t); \
|
||||
}; \
|
||||
}
|
|
@ -25,11 +25,17 @@ public:
|
|||
/* Wait indefinitely until a POLLHUP occurs. */
|
||||
struct pollfd fds[1];
|
||||
fds[0].fd = fd;
|
||||
/* This shouldn't be necessary, but macOS doesn't seem to
|
||||
like a zeroed out events field.
|
||||
See rdar://37537852.
|
||||
*/
|
||||
fds[0].events = POLLHUP;
|
||||
/* Polling for no specific events (i.e. just waiting
|
||||
for an error/hangup) doesn't work on macOS
|
||||
anymore. So wait for read events and ignore
|
||||
them. */
|
||||
fds[0].events =
|
||||
#ifdef __APPLE__
|
||||
POLLRDNORM
|
||||
#else
|
||||
0
|
||||
#endif
|
||||
;
|
||||
auto count = poll(fds, 1, -1);
|
||||
if (count == -1) abort(); // can't happen
|
||||
/* This shouldn't happen, but can on macOS due to a bug.
|
||||
|
@ -40,10 +46,15 @@ public:
|
|||
too harmful.
|
||||
*/
|
||||
if (count == 0) continue;
|
||||
assert(fds[0].revents & POLLHUP);
|
||||
if (fds[0].revents & POLLHUP) {
|
||||
triggerInterrupt();
|
||||
break;
|
||||
}
|
||||
/* This will only happen on macOS. We sleep a bit to
|
||||
avoid waking up too often if the client is sending
|
||||
input. */
|
||||
sleep(1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue