forked from lix-project/lix
Compare commits
216 commits
repl-overl
...
main
Author | SHA1 | Date | |
---|---|---|---|
Linus Heckemann | c95b73d8a1 | ||
V. | 56ead73fda | ||
Dusk Banks | 22eb47f0fd | ||
Dusk Banks | 8b2f8d538b | ||
V. | fb1b211037 | ||
Linus Heckemann | 8b0ac51f12 | ||
eldritch horrors | 9c22a4d31b | ||
eldritch horrors | c83b13eafd | ||
eldritch horrors | d65838a900 | ||
eldritch horrors | c68f0cdf00 | ||
eldritch horrors | 14eff10fe4 | ||
eldritch horrors | 923abe347c | ||
eldritch horrors | 64864c3730 | ||
eldritch horrors | 10488f7431 | ||
Justin ! | 8c567c0424 | ||
eldritch horrors | 61146c73ce | ||
Linus Heckemann | f55ed83991 | ||
eldritch horrors | 212a14bb1f | ||
eldritch horrors | 7c716b9716 | ||
eldritch horrors | 2b3bdda027 | ||
eldritch horrors | 97c76c4655 | ||
eldritch horrors | c82407fc1e | ||
eldritch horrors | 982d049d3b | ||
eldritch horrors | 5cd7055044 | ||
eldritch horrors | 6f18e1ebde | ||
eldritch horrors | a839c31e6c | ||
eldritch horrors | 30bec83fa4 | ||
eldritch horrors | d82b212d33 | ||
eldritch horrors | 220251ba51 | ||
raito | 9f682204b5 | ||
raito | f9e7df01f3 | ||
raito | f7edee7c14 | ||
eldritch horrors | 6c2609c5f9 | ||
eldritch horrors | af27d1ecd8 | ||
eldritch horrors | 1e3b45546c | ||
eldritch horrors | ce3e1d1e7a | ||
eldritch horrors | 2d49efaa2e | ||
eldritch horrors | 98b55c3a1d | ||
eldritch horrors | a83bf24281 | ||
eldritch horrors | a8d6577bf0 | ||
eldritch horrors | 59e364c2a8 | ||
b020d1fc27 | |||
alois31 | 2734a9cf94 | ||
eldritch horrors | 5f1344dd8a | ||
eldritch horrors | faee771b30 | ||
eldritch horrors | b8cc54df0a | ||
eldritch horrors | 67f1aafd61 | ||
eldritch horrors | 1d9d40b2a6 | ||
eldritch horrors | 343aca3a27 | ||
Lily Ballard | 5ce1d8463a | ||
b77687945e | |||
Lily Ballard | 4308ec1ae4 | ||
Lily Ballard | 65551175e3 | ||
Lily Ballard | 068f4b147d | ||
eldritch horrors | 0ff8f91325 | ||
eldritch horrors | b0e619b8bd | ||
eldritch horrors | 564d931134 | ||
jade | dccde94369 | ||
Dusk Banks | 60b89c63db | ||
jade | 60578b4d7d | ||
jade | eadce58a90 | ||
jade | ca55060ac6 | ||
jade | 77b6f6734f | ||
piegames | e2d00ac3a8 | ||
piegames | 3ba5ef91bc | ||
piegames | e5de1d13c4 | ||
piegames | 878e181882 | ||
piegames | c852ae60da | ||
piegames | 765771a355 | ||
piegames | 14291856e4 | ||
piegames | e7d6212f77 | ||
piegames | f98ee07573 | ||
piegames | 580df9bdb3 | ||
jade | 52b64662a4 | ||
alois31 | 689eb45630 | ||
alois31 | ece99fee23 | ||
alois31 | 4dbbd721eb | ||
Linus Heckemann | e55cd3beea | ||
Justin ! | 5a06b17b91 | ||
jade | f6077314fa | ||
jade | a020f5f6cb | ||
V. | fbf7a8b440 | ||
V. | 31ff77b3f9 | ||
jade | c1f4c60bc2 | ||
jade | 8497f0fe19 | ||
Maximilian Bosch | 4682e40183 | ||
d726236e27 | |||
jade | 326cbecb61 | ||
Maximilian Bosch | a322fcea4a | ||
jade | a0fb52c0af | ||
jade | 822997bd34 | ||
jade | 4180b84a67 | ||
jade | 3571817e3a | ||
jade | 3caf3e1e08 | ||
jade | 9865ebaaa6 | ||
jade | 7f7a38f278 | ||
Rebecca Turner | 0012887310 | ||
Lulu | 4ea8c9d643 | ||
Lulu | 43e79f4434 | ||
Lulu | 299813f324 | ||
Lulu | d6e1b11d3e | ||
Lulu | 51a5025913 | ||
eldritch horrors | ed9b7f4f84 | ||
eldritch horrors | 649d8cd08f | ||
eldritch horrors | 9adf6f4568 | ||
eldritch horrors | 03cbc0ecb9 | ||
eldritch horrors | 1caf2afb1d | ||
eldritch horrors | 7ff60b7445 | ||
eldritch horrors | fc6291e46d | ||
eldritch horrors | 40f154c0ed | ||
eldritch horrors | f389a54079 | ||
eldritch horrors | 7ef4466018 | ||
eldritch horrors | a9f2aab226 | ||
eldritch horrors | 99edc2ae38 | ||
eldritch horrors | 896a123605 | ||
Rebecca Turner | 0d484aa498 | ||
Rebecca Turner | 86b213e632 | ||
eldritch horrors | a3dd07535c | ||
alois31 | 5df2cccc49 | ||
jade | 345e3d068a | ||
jade | 19edaed81b | ||
eldritch horrors | 5b1715e633 | ||
Rebecca Turner | 0b29859cfe | ||
Olivia Crain | 1bfc37fea5 | ||
Olivia Crain | 8f300fbd82 | ||
Rebecca Turner | 36073781fb | ||
Rebecca Turner | b63d4a0c62 | ||
Robert Hensing | ee0c195eba | ||
eldritch horrors | 7752927660 | ||
eldritch horrors | 3edc272341 | ||
eldritch horrors | 9b05636937 | ||
eldritch horrors | 9889c79fe3 | ||
eldritch horrors | 732de75f67 | ||
eldritch horrors | d5db0b1abc | ||
eldritch horrors | b0c7c1ec66 | ||
eldritch horrors | d31310bf59 | ||
raito | 8e05cc1e6c | ||
Jonas Chevalier | a16ceb9411 | ||
eldritch horrors | aa33c34c9b | ||
eldritch horrors | ccd2862666 | ||
eldritch horrors | 47ddd11933 | ||
eldritch horrors | 7f4f86795c | ||
eldritch horrors | a5240b23ab | ||
eldritch horrors | 8fb642b6e0 | ||
eldritch horrors | 1a52e4f755 | ||
eldritch horrors | 3f7519526f | ||
Maximilian Bosch | 289e7a6b5a | ||
Olivia Crain | f12b60273b | ||
Maximilian Bosch | 04daff94e3 | ||
Olivia Crain | 4780dd6bc4 | ||
Olivia Crain | b86863d935 | ||
Olivia Crain | 624f44bf25 | ||
Olivia Crain | 0e6b3435a1 | ||
eldritch horrors | ae5d8dae1b | ||
eldritch horrors | 852da07b67 | ||
eldritch horrors | bf32085d63 | ||
eldritch horrors | cd1ceffb0e | ||
eldritch horrors | 0478949c72 | ||
Jonas Chevalier | 2265536e85 | ||
jade | 14dc84ed03 | ||
eldritch horrors | 619a93bd54 | ||
jade | 5dc7671d81 | ||
jade | b6038e988d | ||
eldritch horrors | 531d040e8c | ||
eldritch horrors | ca9256a789 | ||
eldritch horrors | 4b66e1e24f | ||
puck | 37b22dae04 | ||
Maximilian Bosch | 31954b5136 | ||
jade | acf963468f | ||
c1631b0a39 | |||
jade | aca19187d0 | ||
jade | 19e0ce2c03 | ||
jade | 8a6b84df14 | ||
Maximilian Bosch | eccbe9586a | ||
Olivia Crain | 2f794733b2 | ||
alois31 | 5f298f74c9 | ||
jade | 79246a3733 | ||
jade | 789b19a0cf | ||
Rebecca Turner | 0943b214c9 | ||
alois31 | 2afdf1ed66 | ||
jade | ed381cd58a | ||
jade | 4046e019ca | ||
Rebecca Turner | 8ab5743904 | ||
Rebecca Turner | 7ae0409989 | ||
Maximilian Bosch | 80202e3ca3 | ||
jade | 727258241f | ||
jade | 5246cea6c8 | ||
jade | 8f88590d13 | ||
alois31 | 3f07c65510 | ||
jade | b7fc37b015 | ||
jade | ca1dc3f70b | ||
alois31 | b2fc007811 | ||
alois31 | 82aa1ccab4 | ||
Rebecca Turner | df0137226d | ||
jade | 81c2e0ac8e | ||
Rebecca Turner | 24db81eaf2 | ||
Rebecca Turner | cc183fdbc1 | ||
Rebecca Turner | f5ae72d445 | ||
Rebecca Turner | 6de6cae3e7 | ||
alois31 | 8f7ab26f96 | ||
eldritch horrors | c14486ae8d | ||
alois31 | e9505dcc5a | ||
eldritch horrors | f2a49032a6 | ||
eldritch horrors | 92eccfbd68 | ||
alois31 | 4715d557ef | ||
alois31 | 991d8ce275 | ||
Rebecca Turner | 72589e7032 | ||
Rebecca Turner | 644176a631 | ||
alois31 | 63ee2cdda3 | ||
alois31 | d7c37324bb | ||
Rebecca Turner | 75c0de3e3c | ||
Rebecca Turner | fc4a160878 | ||
Rebecca Turner | b7b1b9723f | ||
Rebecca Turner | 9d8f433246 | ||
Rebecca Turner | 742303dc3a | ||
alois31 | de552c42cb |
|
@ -29,3 +29,7 @@ trim_trailing_whitespace = false
|
|||
indent_style = space
|
||||
indent_size = 2
|
||||
max_line_length = 0
|
||||
|
||||
[meson.build]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
name: Missing or incorrect documentation
|
||||
about: Help us improve the reference manual
|
||||
title: ''
|
||||
labels: documentation
|
||||
labels: docs
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
@ -19,10 +19,10 @@ assignees: ''
|
|||
|
||||
<!-- make sure this issue is not redundant or obsolete -->
|
||||
|
||||
- [ ] checked [latest Lix manual] \([source]\)
|
||||
- [ ] checked [latest Lix manual] or its [source code]
|
||||
- [ ] checked [documentation issues] and [recent documentation changes] for possible duplicates
|
||||
|
||||
[latest Nix manual]: https://docs.lix.systems/manual/lix/nightly
|
||||
[source]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
|
||||
[latest Lix manual]: https://docs.lix.systems/manual/lix/nightly
|
||||
[source code]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
|
||||
[documentation issues]: https://git.lix.systems/lix-project/lix/issues?labels=151&state=all
|
||||
[recent documentation changes]: https://gerrit.lix.systems/q/p:lix+path:%22%5Edoc/manual/.*%22
|
||||
|
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -36,3 +36,6 @@ buildtime.bin
|
|||
|
||||
# Rust build files when using Cargo (not actually supported for building but it spews the files anyway)
|
||||
/target/
|
||||
|
||||
# Python compiled files from the test suite
|
||||
*.pyc
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p bash -p hyperfine
|
||||
|
||||
set -euo pipefail
|
||||
shopt -s inherit_errexit
|
||||
|
@ -21,16 +22,21 @@ fi
|
|||
_exit=""
|
||||
trap "$_exit" EXIT
|
||||
|
||||
# XXX: yes this is very silly. flakes~!!
|
||||
nix build --impure --expr '(builtins.getFlake "git+file:.").inputs.nixpkgs.outPath' -o bench/nixpkgs
|
||||
flake_args=("--extra-experimental-features" "nix-command flakes")
|
||||
|
||||
# XXX: yes this is very silly. flakes~!!
|
||||
nix build "${flake_args[@]}" --impure --expr '(builtins.getFlake "git+file:.").inputs.nixpkgs.outPath' -o bench/nixpkgs
|
||||
|
||||
# We must ignore the global config, or else NIX_PATH won't work reliably.
|
||||
# See https://github.com/NixOS/nix/issues/9574
|
||||
export NIX_CONF_DIR='/var/empty'
|
||||
export NIX_REMOTE="$(mktemp -d)"
|
||||
_exit='rm -rfv "$NIX_REMOTE"; $_exit'
|
||||
export NIX_PATH="nixpkgs=bench/nixpkgs:nixos-config=bench/configuration.nix"
|
||||
|
||||
builds=("$@")
|
||||
|
||||
flake_args="--extra-experimental-features 'nix-command flakes'"
|
||||
flake_args="${flake_args[*]@Q}"
|
||||
|
||||
hyperfineArgs=(
|
||||
--parameter-list BUILD "$(IFS=,; echo "${builds[*]}")"
|
||||
|
|
|
@ -33,32 +33,7 @@ GENERATE_LATEX = NO
|
|||
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
|
||||
# Note: If this tag is empty the current directory is searched.
|
||||
|
||||
# FIXME Make this list more maintainable somehow. We could maybe generate this
|
||||
# in the Makefile, but we would need to change how `.in` files are preprocessed
|
||||
# so they can expand variables despite configure variables.
|
||||
|
||||
INPUT = \
|
||||
src/libcmd \
|
||||
src/libexpr \
|
||||
src/libexpr/flake \
|
||||
tests/unit/libexpr \
|
||||
tests/unit/libexpr/value \
|
||||
tests/unit/libexpr/test \
|
||||
tests/unit/libexpr/test/value \
|
||||
src/libexpr/value \
|
||||
src/libfetchers \
|
||||
src/libmain \
|
||||
src/libstore \
|
||||
src/libstore/build \
|
||||
src/libstore/builtins \
|
||||
tests/unit/libstore \
|
||||
tests/unit/libstore/test \
|
||||
src/libutil \
|
||||
tests/unit/libutil \
|
||||
tests/unit/libutil/test \
|
||||
src/nix \
|
||||
src/nix-env \
|
||||
src/nix-store
|
||||
INPUT = @INPUT_PATHS@
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
|
||||
# in the source code. If set to NO, only conditional compilation will be
|
||||
|
@ -97,3 +72,15 @@ EXPAND_AS_DEFINED = \
|
|||
DECLARE_WORKER_SERIALISER \
|
||||
DECLARE_SERVE_SERIALISER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER
|
||||
|
||||
# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
|
||||
# Stripping is only done if one of the specified strings matches the left-hand
|
||||
# part of the path. The tag can be used to show relative paths in the file list.
|
||||
# If left blank the directory from which doxygen is run is used as the path to
|
||||
# strip.
|
||||
#
|
||||
# Note that you can specify absolute paths here, but also relative paths, which
|
||||
# will be relative from the directory where doxygen is started.
|
||||
# This tag requires that the tag FULL_PATH_NAMES is set to YES.
|
||||
|
||||
STRIP_FROM_PATH = "@PROJECT_SOURCE_ROOT@"
|
||||
|
|
|
@ -1,3 +1,35 @@
|
|||
internal_api_sources = [
|
||||
'src/libcmd',
|
||||
'src/libexpr',
|
||||
'src/libexpr/flake',
|
||||
'tests/unit/libexpr',
|
||||
'tests/unit/libexpr/value',
|
||||
'tests/unit/libexpr/test',
|
||||
'tests/unit/libexpr/test/value',
|
||||
'src/libexpr/value',
|
||||
'src/libfetchers',
|
||||
'src/libmain',
|
||||
'src/libstore',
|
||||
'src/libstore/build',
|
||||
'src/libstore/builtins',
|
||||
'tests/unit/libstore',
|
||||
'tests/unit/libstore/test',
|
||||
'src/libutil',
|
||||
'tests/unit/libutil',
|
||||
'tests/unit/libutil/test',
|
||||
'src/nix',
|
||||
'src/nix-env',
|
||||
'src/nix-store',
|
||||
]
|
||||
|
||||
# We feed Doxygen absolute paths so it can be invoked from any working directory.
|
||||
internal_api_sources_absolute = []
|
||||
foreach src : internal_api_sources
|
||||
internal_api_sources_absolute += '"' + (meson.project_source_root() / src) + '"'
|
||||
endforeach
|
||||
|
||||
internal_api_sources_oneline = ' \\\n '.join(internal_api_sources_absolute)
|
||||
|
||||
doxygen_cfg = configure_file(
|
||||
input : 'doxygen.cfg.in',
|
||||
output : 'doxygen.cfg',
|
||||
|
@ -5,22 +37,16 @@ doxygen_cfg = configure_file(
|
|||
'PACKAGE_VERSION': meson.project_version(),
|
||||
'RAPIDCHECK_HEADERS': rapidcheck_meson.get_variable('includedir'),
|
||||
'docdir' : meson.current_build_dir(),
|
||||
'INPUT_PATHS' : internal_api_sources_oneline,
|
||||
'PROJECT_SOURCE_ROOT' : meson.project_source_root(),
|
||||
},
|
||||
)
|
||||
|
||||
internal_api_docs = custom_target(
|
||||
'internal-api-docs',
|
||||
command : [
|
||||
bash,
|
||||
# Meson can you please just give us a `workdir` argument to custom targets...
|
||||
'-c',
|
||||
# We have to prefix the doxygen_cfg path with the project build root
|
||||
# because of the cd in front.
|
||||
'cd @0@ && @1@ @2@/@INPUT0@'.format(
|
||||
meson.project_source_root(),
|
||||
doxygen.full_path(),
|
||||
meson.project_build_root(),
|
||||
),
|
||||
doxygen.full_path(),
|
||||
'@INPUT0@',
|
||||
],
|
||||
input : [
|
||||
doxygen_cfg,
|
||||
|
|
|
@ -85,6 +85,10 @@ kloenk:
|
|||
forgejo: kloenk
|
||||
github: kloenk
|
||||
|
||||
lheckemann:
|
||||
forgejo: lheckemann
|
||||
github: lheckemann
|
||||
|
||||
lovesegfault:
|
||||
github: lovesegfault
|
||||
|
||||
|
@ -141,9 +145,17 @@ valentin:
|
|||
display_name: Valentin Gagarin
|
||||
github: fricklerhandwerk
|
||||
|
||||
vigress8:
|
||||
display_name: Vigress
|
||||
forgejo: vigress8
|
||||
github: vigress8
|
||||
|
||||
winter:
|
||||
forgejo: winter
|
||||
github: winterqt
|
||||
|
||||
yshui:
|
||||
github: yshui
|
||||
|
||||
zimbatm:
|
||||
github: zimbatm
|
||||
|
|
|
@ -126,20 +126,19 @@ manual = custom_target(
|
|||
'manual',
|
||||
'markdown',
|
||||
],
|
||||
install : true,
|
||||
install_dir : [
|
||||
datadir / 'doc/nix',
|
||||
false,
|
||||
],
|
||||
depfile : 'manual.d',
|
||||
env : {
|
||||
'RUST_LOG': 'info',
|
||||
'MDBOOK_SUBSTITUTE_SEARCH': meson.current_build_dir() / 'src',
|
||||
},
|
||||
)
|
||||
manual_html = manual[0]
|
||||
manual_md = manual[1]
|
||||
|
||||
install_subdir(
|
||||
manual_html.full_path(),
|
||||
install_dir : datadir / 'doc/nix',
|
||||
)
|
||||
|
||||
nix_nested_manpages = [
|
||||
[ 'nix-env',
|
||||
[
|
||||
|
|
10
doc/manual/rl-next/alt-left-and-alt-right-in-repl.md
Normal file
10
doc/manual/rl-next/alt-left-and-alt-right-in-repl.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
synopsis: "`Alt+Left` and `Alt+Right` go back/forwards by words in `nix repl`"
|
||||
issues: [fj#501]
|
||||
cls: [1883]
|
||||
category: Fixes
|
||||
credits: 9999years
|
||||
---
|
||||
|
||||
`nix repl` now recognizes `Alt+Left` and `Alt+Right` for navigating by words
|
||||
when entering input in `nix repl` on more terminals/platforms.
|
15
doc/manual/rl-next/content-encodings.md
Normal file
15
doc/manual/rl-next/content-encodings.md
Normal file
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
synopsis: "Drop support for `xz` and `bzip2` Content-Encoding"
|
||||
category: Miscellany
|
||||
cls: [2134]
|
||||
credits: horrors
|
||||
---
|
||||
|
||||
Lix no longer supports the non-standard HTTP Content-Encoding values `xz` and `bzip2`.
|
||||
We do not expect this to cause any problems in practice since these encodings *aren't*
|
||||
standard, and any server delivering them anyway without being asked to is already well
|
||||
and truly set on the path of causing inexplicable client breakages.
|
||||
|
||||
Lix's ability to decompress files compressed with `xz` or `bzip2` is unaffected. We're
|
||||
only bringing Lix more in line with the HTTP standard; all post-transfer data handling
|
||||
remains as it was before.
|
13
doc/manual/rl-next/ctrl-c-improved.md
Normal file
13
doc/manual/rl-next/ctrl-c-improved.md
Normal file
|
@ -0,0 +1,13 @@
|
|||
---
|
||||
synopsis: Ctrl-C stops Nix commands much more reliably and responsively
|
||||
issues: [7245, fj#393]
|
||||
cls: [2016]
|
||||
prs: [11618]
|
||||
category: Fixes
|
||||
credits: [roberth, 9999years]
|
||||
---
|
||||
|
||||
CTRL-C will now stop Nix commands much more reliably and responsively. While
|
||||
there are still some cases where a Nix command can be slow or unresponsive
|
||||
following a `SIGINT` (please report these as issues!), the vast majority of
|
||||
signals will now cause the Nix command to quit quickly and consistently.
|
10
doc/manual/rl-next/download-protocols.md
Normal file
10
doc/manual/rl-next/download-protocols.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
synopsis: "transfers no longer allow arbitrary url schemas"
|
||||
category: Breaking Changes
|
||||
cls: [2106]
|
||||
credits: horrors
|
||||
---
|
||||
|
||||
Lix no longer allows transfers using arbitrary url schemas. Only `http://`, `https://`, `ftp://`, `ftps://`, and `file://` urls are supported going forward. This affects `builtins.fetchurl`, `<nix/fetchurl.nix>`, transfers to and from binary caches, and all other uses of the internal file transfer code. Flake inputs using multi-protocol schemas (e.g. `git+ssh`) are not affected as those use external utilities to transfer data.
|
||||
|
||||
The `s3://` scheme is not affected at all by this change and continues to work if S3 support is built into Lix.
|
23
doc/manual/rl-next/fetchGit-regression.md
Normal file
23
doc/manual/rl-next/fetchGit-regression.md
Normal file
|
@ -0,0 +1,23 @@
|
|||
---
|
||||
synopsis: restore backwards-compatibility of `builtins.fetchGit` with Nix 2.3
|
||||
issues: [5291, 5128]
|
||||
credits: [ma27]
|
||||
category: Fixes
|
||||
---
|
||||
|
||||
Compatibility with `builtins.fetchGit` from Nix 2.3 has been restored as follows:
|
||||
|
||||
* Until now, each `ref` was prefixed with `refs/heads` unless it starts with `refs/` itself.
|
||||
|
||||
Now, this is not done if the `ref` looks like a commit hash.
|
||||
|
||||
* Specifying `builtins.fetchGit { ref = "a-tag"; /* … */ }` was broken because `refs/heads` was appended.
|
||||
|
||||
Now, the fetcher doesn't turn a ref into `refs/heads/ref`, but into `refs/*/ref`. That way,
|
||||
the value in `ref` can be either a tag or a branch.
|
||||
|
||||
* The ref resolution happens the same way as in git:
|
||||
|
||||
* If `refs/ref` exists, it's used.
|
||||
* If a tag `refs/tags/ref` exists, it's used.
|
||||
* If a branch `refs/heads/ref` exists, it's used.
|
38
doc/manual/rl-next/nix-fmt-default-argument.md
Normal file
38
doc/manual/rl-next/nix-fmt-default-argument.md
Normal file
|
@ -0,0 +1,38 @@
|
|||
---
|
||||
synopsis: Removing the `.` default argument passed to the `nix fmt` formatter
|
||||
issues: []
|
||||
prs: [11438]
|
||||
cls: [1902]
|
||||
category: Breaking Changes
|
||||
credits: zimbatm
|
||||
---
|
||||
|
||||
The underlying formatter no longer receives the ". " default argument when `nix fmt` is called with no arguments.
|
||||
|
||||
This change was necessary as the formatter wasn't able to distinguish between
|
||||
a user wanting to format the current folder with `nix fmt .` or the generic
|
||||
`nix fmt`.
|
||||
|
||||
The default behaviour is now the responsibility of the formatter itself, and
|
||||
allows tools such as treefmt to format the whole tree instead of only the
|
||||
current directory and below.
|
||||
|
||||
This may cause issues with some formatters: nixfmt, nixpkgs-fmt and alejandra currently format stdin when no arguments are passed.
|
||||
|
||||
Here is a small wrapper example that will restore the previous behaviour for such a formatter:
|
||||
|
||||
```nix
|
||||
{
|
||||
outputs = { self, nixpkgs, systems }:
|
||||
let
|
||||
eachSystem = nixpkgs.lib.genAttrs (import systems) (system: nixpkgs.legacyPackages.${system});
|
||||
in
|
||||
{
|
||||
formatter = eachSystem (pkgs:
|
||||
pkgs.writeShellScriptBin "formatter" ''
|
||||
if [[ $# = 0 ]]; set -- .; fi
|
||||
exec "${pkgs.nixfmt-rfc-style}/bin/nixfmt "$@"
|
||||
'');
|
||||
};
|
||||
}
|
||||
```
|
10
doc/manual/rl-next/pytest-suite.md
Normal file
10
doc/manual/rl-next/pytest-suite.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
synopsis: "The beginnings of a new pytest-based functional test suite"
|
||||
category: Development
|
||||
cls: [2036, 2037]
|
||||
credits: jade
|
||||
---
|
||||
|
||||
The existing integration/functional test suite is based on a large volume of shell scripts.
|
||||
This often makes it somewhat challenging to debug at the best of times.
|
||||
The goal of the pytest test suite is to make tests have more obvious dependencies on files and to make tests more concise and easier to write, as well as making new testing methods like snapshot testing easy.
|
17
doc/manual/rl-next/readline-support-removed.md
Normal file
17
doc/manual/rl-next/readline-support-removed.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
synopsis: readline support removed
|
||||
cls: [1885]
|
||||
category: Packaging
|
||||
credits: [9999years]
|
||||
---
|
||||
|
||||
Support for building Lix with [`readline`][readline] instead of
|
||||
[`editline`][editline] has been removed. `readline` support hasn't worked for a
|
||||
long time (attempting to use it would lead to build errors) and would make Lix
|
||||
subject to the GPL if it did work. In the future, we're hoping to replace
|
||||
`editline` with [`rustyline`][rustyline] for improved ergonomics in the `nix
|
||||
repl`.
|
||||
|
||||
[readline]: https://en.wikipedia.org/wiki/GNU_Readline
|
||||
[editline]: https://github.com/troglobit/editline
|
||||
[rustyline]: https://github.com/kkawakam/rustyline
|
8
doc/manual/rl-next/remove-mono-coreutils-dependency.md
Normal file
8
doc/manual/rl-next/remove-mono-coreutils-dependency.md
Normal file
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
synopsis: "Dependency on monolithic coreutils removed"
|
||||
category: Development
|
||||
cls: [2108]
|
||||
credits: vigress8
|
||||
---
|
||||
|
||||
Previously, the build erroneously depended on a `coreutils` binary, which requires `coreutils` to be built with a specific configuration. This was only used in one test and was not required to be a single binary. This dependency is removed now.
|
22
doc/manual/rl-next/report-differing-outputs.md
Normal file
22
doc/manual/rl-next/report-differing-outputs.md
Normal file
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
synopsis: "Reproducibility check builds now report all differing outputs"
|
||||
cls: [2069]
|
||||
category: Improvements
|
||||
credits: [lheckemann]
|
||||
---
|
||||
|
||||
`nix-build --check` allows rerunning the build of an already-built derivation to check that it produces the same output again.
|
||||
|
||||
If a multiple-output derivation with impure behaviour is built with `--check`, only the first output would be shown in the resulting error message (and kept for comparison):
|
||||
|
||||
```
|
||||
error: derivation '/nix/store/4spy3nz1661zm15gkybsy1h5f36aliwx-python3.11-test-1.0.0.drv' may not be deterministic: output '/nix/store/ccqcp01zg18wp9iadzmzimqzdi3ll08d-python3.11-test
|
||||
-1.0.0-dist' differs from '/nix/store/ccqcp01zg18wp9iadzmzimqzdi3ll08d-python3.11-test-1.0.0-dist.check'
|
||||
```
|
||||
|
||||
Now, all differing outputs are kept and reported:
|
||||
```
|
||||
error: derivation '4spy3nz1661zm15gkybsy1h5f36aliwx-python3.11-test-1.0.0.drv' may not be deterministic: outputs differ
|
||||
output differs: output '/nix/store/ccqcp01zg18wp9iadzmzimqzdi3ll08d-python3.11-test-1.0.0-dist' differs from '/nix/store/ccqcp01zg18wp9iadzmzimqzdi3ll08d-python3.11-test-1.0.0-dist.check'
|
||||
output differs: output '/nix/store/yl59v08356i841c560alb0zmk7q16klb-python3.11-test-1.0.0' differs from '/nix/store/yl59v08356i841c560alb0zmk7q16klb-python3.11-test-1.0.0.check'
|
||||
```
|
26
doc/manual/rl-next/stack-traces.md
Normal file
26
doc/manual/rl-next/stack-traces.md
Normal file
|
@ -0,0 +1,26 @@
|
|||
---
|
||||
synopsis: "Some Lix crashes now produce reporting instructions and a stack trace, then abort"
|
||||
cls: [1854]
|
||||
category: Improvements
|
||||
credits: jade
|
||||
---
|
||||
|
||||
Lix, being a C++ program, can crash in a few kinds of ways.
|
||||
It can obviously do a memory access violation, which will generate a core dump and thus be relatively debuggable.
|
||||
But, worse, it could throw an unhandled exception, and, in the past, we would just show the message but not where it comes from, in spite of this always being a bug, since we expect all such errors to be translated to a Lix specific error.
|
||||
Now the latter kind of bug should print reporting instructions, a rudimentary stack trace and (depending on system configuration) generate a core dump.
|
||||
|
||||
Sample output:
|
||||
|
||||
```
|
||||
Lix crashed. This is a bug. We would appreciate if you report it along with what caused it at https://git.lix.systems/lix-project/lix/issues with the following information included:
|
||||
|
||||
Exception: std::runtime_error: test exception
|
||||
Stack trace:
|
||||
0# nix::printStackTrace() in /home/jade/lix/lix3/build/src/nix/../libutil/liblixutil.so
|
||||
1# 0x000073C9862331F2 in /home/jade/lix/lix3/build/src/nix/../libmain/liblixmain.so
|
||||
2# 0x000073C985F2E21A in /nix/store/p44qan69linp3ii0xrviypsw2j4qdcp2-gcc-13.2.0-lib/lib/libstdc++.so.6
|
||||
3# 0x000073C985F2E285 in /nix/store/p44qan69linp3ii0xrviypsw2j4qdcp2-gcc-13.2.0-lib/lib/libstdc++.so.6
|
||||
4# nix::handleExceptions(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::function<void ()>) in /home/jade/lix/lix3/build/src/nix/../libmain/liblixmain.so
|
||||
...
|
||||
```
|
10
doc/manual/rl-next/verify-tls.md
Normal file
10
doc/manual/rl-next/verify-tls.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
synopsis: "`<nix/fetchurl.nix>` now uses TLS verification"
|
||||
category: Fixes
|
||||
prs: [11585]
|
||||
credits: edolstra
|
||||
---
|
||||
|
||||
Previously `<nix/fetchurl.nix>` did not do TLS verification. This was because the Nix sandbox in the past did not have access to TLS certificates, and Nix checks the hash of the fetched file anyway. However, this can expose authentication data from `netrc` and URLs to man-in-the-middle attackers. In addition, Nix now in some cases (such as when using impure derivations) does *not* check the hash. Therefore we have now enabled TLS verification. This means that downloads by `<nix/fetchurl.nix>` will now fail if you're fetching from a HTTPS server that does not have a valid certificate.
|
||||
|
||||
`<nix/fetchurl.nix>` is also known as the builtin derivation builder `builtin:fetchurl`. It's not to be confused with the evaluation-time function `builtins.fetchurl`, which was not affected by this issue.
|
18
flake.lock
18
flake.lock
|
@ -19,11 +19,11 @@
|
|||
"nix2container": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1720642556,
|
||||
"narHash": "sha256-qsnqk13UmREKmRT7c8hEnz26X3GFFyIQrqx4EaRc1Is=",
|
||||
"lastModified": 1724996935,
|
||||
"narHash": "sha256-njRK9vvZ1JJsP8oV2OgkBrpJhgQezI03S7gzskCcHos=",
|
||||
"owner": "nlewo",
|
||||
"repo": "nix2container",
|
||||
"rev": "3853e5caf9ad24103b13aa6e0e8bcebb47649fe4",
|
||||
"rev": "fa6bb0a1159f55d071ba99331355955ae30b3401",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -34,11 +34,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1721931987,
|
||||
"narHash": "sha256-1Zg8LY0T5EfXtv0Kf4M6SFnjH7Eto4VV+EKJ/YSnhiI=",
|
||||
"lastModified": 1727184566,
|
||||
"narHash": "sha256-mgdK8BcFsLSNhe780+cHbEUbZ3OruLa1T/xgQlL4Aj4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "e21630230c77140bc6478a21cd71e8bb73706fce",
|
||||
"rev": "48c3030083c46042584531bc9d931020f1975677",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -67,11 +67,11 @@
|
|||
"pre-commit-hooks": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1721042469,
|
||||
"narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=",
|
||||
"lastModified": 1726745158,
|
||||
"narHash": "sha256-D5AegvGoEjt4rkKedmxlSEmC+nNLMBPWFxvmYnVLhjk=",
|
||||
"owner": "cachix",
|
||||
"repo": "git-hooks.nix",
|
||||
"rev": "f451c19376071a90d8c58ab1a953c6e9840527fd",
|
||||
"rev": "4e743a6920eab45e8ba0fbe49dc459f1423a4b74",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
17
flake.nix
17
flake.nix
|
@ -99,9 +99,10 @@
|
|||
];
|
||||
|
||||
stdenvs = [
|
||||
"gccStdenv"
|
||||
# see assertion in package.nix why these two are disabled
|
||||
# "stdenv"
|
||||
# "gccStdenv"
|
||||
"clangStdenv"
|
||||
"stdenv"
|
||||
"libcxxStdenv"
|
||||
"ccacheStdenv"
|
||||
];
|
||||
|
@ -121,7 +122,11 @@
|
|||
name = "${stdenvName}Packages";
|
||||
value = f stdenvName;
|
||||
}) stdenvs
|
||||
);
|
||||
)
|
||||
// {
|
||||
# TODO delete this and reënable gcc stdenvs once gcc compiles kj coros correctly
|
||||
stdenvPackages = f "clangStdenv";
|
||||
};
|
||||
|
||||
# Memoize nixpkgs for different platforms for efficiency.
|
||||
nixpkgsFor = forAllSystems (
|
||||
|
@ -212,7 +217,7 @@
|
|||
|
||||
# A Nixpkgs overlay that overrides the 'nix' and
|
||||
# 'nix.perl-bindings' packages.
|
||||
overlays.default = overlayFor (p: p.stdenv);
|
||||
overlays.default = overlayFor (p: p.clangStdenv);
|
||||
|
||||
hydraJobs = {
|
||||
# Binary package for various platforms.
|
||||
|
@ -264,6 +269,8 @@
|
|||
nix = pkgs.callPackage ./package.nix {
|
||||
inherit versionSuffix officialRelease buildUnreleasedNotes;
|
||||
inherit (pkgs) build-release-notes;
|
||||
# Required since we don't support gcc stdenv
|
||||
stdenv = pkgs.clangStdenv;
|
||||
internalApiDocs = true;
|
||||
busybox-sandbox-shell = pkgs.busybox-sandbox-shell;
|
||||
};
|
||||
|
@ -321,6 +328,8 @@
|
|||
inherit (nixpkgs) pkgs;
|
||||
in
|
||||
pkgs.callPackage ./package.nix {
|
||||
# Required since we don't support gcc stdenv
|
||||
stdenv = pkgs.clangStdenv;
|
||||
versionSuffix = "";
|
||||
lintInsteadOfBuild = true;
|
||||
};
|
||||
|
|
34
meson.build
34
meson.build
|
@ -47,12 +47,12 @@
|
|||
# in the build directory.
|
||||
|
||||
project('lix', 'cpp', 'rust',
|
||||
meson_version : '>=1.4.0',
|
||||
version : run_command('bash', '-c', 'echo -n $(jq -r .version < ./version.json)$VERSION_SUFFIX', check : true).stdout().strip(),
|
||||
default_options : [
|
||||
'cpp_std=c++2a',
|
||||
'cpp_std=c++23',
|
||||
'rust_std=2021',
|
||||
# TODO(Qyriad): increase the warning level
|
||||
'warning_level=1',
|
||||
'warning_level=2',
|
||||
'debug=true',
|
||||
'optimization=2',
|
||||
'errorlogs=true', # Please print logs for tests that fail
|
||||
|
@ -167,10 +167,18 @@ endif
|
|||
# frees one would expect when the objects are unique_ptrs. these problems
|
||||
# often show up as memory corruption when nesting generators (since we do
|
||||
# treat generators like owned memory) and will cause inexplicable crashs.
|
||||
#
|
||||
# gcc 13 does not compile capnp coroutine code correctly. a newer version
|
||||
# may fix this. (cf. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102051)
|
||||
# we allow gcc 13 here anyway because CI uses it for clang-tidy, and when
|
||||
# the compiler crashes outright if won't produce any bad binaries either.
|
||||
assert(
|
||||
cxx.get_id() != 'gcc' or cxx.version().version_compare('>=13'),
|
||||
'GCC 12 and earlier are known to miscompile lix coroutines, use GCC 13 or clang.'
|
||||
'GCC is known to miscompile coroutines, use clang.'
|
||||
)
|
||||
if cxx.get_id() == 'gcc'
|
||||
warning('GCC is known to crash while building coroutines, use clang.')
|
||||
endif
|
||||
|
||||
|
||||
# Translate some historical and Mesony CPU names to Lixy CPU names.
|
||||
|
@ -229,6 +237,7 @@ configdata += {
|
|||
}
|
||||
|
||||
boost = dependency('boost', required : true, modules : ['container'], include_type : 'system')
|
||||
kj = dependency('kj-async', required : true, include_type : 'system')
|
||||
|
||||
# cpuid only makes sense on x86_64
|
||||
cpuid_required = is_x64 ? get_option('cpuid') : false
|
||||
|
@ -349,7 +358,6 @@ endif
|
|||
#
|
||||
# Build-time tools
|
||||
#
|
||||
coreutils = find_program('coreutils', native : true)
|
||||
dot = find_program('dot', required : false, native : true)
|
||||
pymod = import('python')
|
||||
python = pymod.find_installation('python3')
|
||||
|
@ -475,6 +483,7 @@ add_project_arguments(
|
|||
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
|
||||
# It would be nice for our headers to be idempotent instead.
|
||||
'-include', 'config.h',
|
||||
'-Wno-unused-parameter',
|
||||
'-Wno-deprecated-declarations',
|
||||
'-Wimplicit-fallthrough',
|
||||
'-Werror=switch',
|
||||
|
@ -483,12 +492,6 @@ add_project_arguments(
|
|||
'-Wdeprecated-copy',
|
||||
'-Wignored-qualifiers',
|
||||
'-Werror=suggest-override',
|
||||
# Enable assertions in libstdc++ by default. Harmless on libc++. Benchmarked
|
||||
# at ~1% overhead in `nix search`.
|
||||
#
|
||||
# FIXME: remove when we get meson 1.4.0 which will default this to on for us:
|
||||
# https://mesonbuild.com/Release-notes-for-1-4-0.html#ndebug-setting-now-controls-c-stdlib-assertions
|
||||
'-D_GLIBCXX_ASSERTIONS=1',
|
||||
language : 'cpp',
|
||||
)
|
||||
|
||||
|
@ -584,10 +587,10 @@ run_command(
|
|||
)
|
||||
|
||||
if is_darwin
|
||||
configure_file(
|
||||
input : 'misc/launchd/org.nixos.nix-daemon.plist.in',
|
||||
output : 'org.nixos.nix-daemon.plist',
|
||||
copy : true,
|
||||
fs.copyfile(
|
||||
'misc/launchd/org.nixos.nix-daemon.plist.in',
|
||||
'org.nixos.nix-daemon.plist',
|
||||
install : true,
|
||||
install_dir : prefix / 'Library/LaunchDaemons',
|
||||
)
|
||||
endif
|
||||
|
@ -603,6 +606,7 @@ endif
|
|||
if enable_tests
|
||||
subdir('tests/unit')
|
||||
subdir('tests/functional')
|
||||
subdir('tests/functional2')
|
||||
endif
|
||||
|
||||
subdir('meson/clang-tidy')
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
configure_file(
|
||||
input : 'completion.sh',
|
||||
output : 'nix',
|
||||
fs.copyfile(
|
||||
'completion.sh',
|
||||
'nix',
|
||||
install : true,
|
||||
install_dir : datadir / 'bash-completion/completions',
|
||||
install_mode : 'rw-r--r--',
|
||||
copy : true,
|
||||
)
|
||||
|
|
60
misc/capnproto.nix
Normal file
60
misc/capnproto.nix
Normal file
|
@ -0,0 +1,60 @@
|
|||
# FIXME: upstream to nixpkgs (do NOT build with gcc due to gcc coroutine bugs)
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchFromGitHub,
|
||||
cmake,
|
||||
openssl,
|
||||
zlib,
|
||||
}:
|
||||
assert stdenv.cc.isClang;
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "capnproto";
|
||||
version = "1.0.2";
|
||||
|
||||
# release tarballs are missing some ekam rules
|
||||
src = fetchFromGitHub {
|
||||
owner = "capnproto";
|
||||
repo = "capnproto";
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-LVdkqVBTeh8JZ1McdVNtRcnFVwEJRNjt0JV2l7RkuO8=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ cmake ];
|
||||
propagatedBuildInputs = [
|
||||
openssl
|
||||
zlib
|
||||
];
|
||||
|
||||
# FIXME: separate the binaries from the stuff that user systems actually use
|
||||
# This runs into a terrible UX issue in Lix and I just don't want to debug it
|
||||
# right now for the couple MB of closure size:
|
||||
# https://git.lix.systems/lix-project/lix/issues/551
|
||||
# outputs = [ "bin" "dev" "out" ];
|
||||
|
||||
cmakeFlags = [
|
||||
(lib.cmakeBool "BUILD_SHARED_LIBS" true)
|
||||
# Take optimization flags from CXXFLAGS rather than cmake injecting them
|
||||
(lib.cmakeFeature "CMAKE_BUILD_TYPE" "None")
|
||||
];
|
||||
|
||||
env = {
|
||||
# Required to build the coroutine library
|
||||
CXXFLAGS = "-std=c++20";
|
||||
};
|
||||
|
||||
separateDebugInfo = true;
|
||||
|
||||
meta = with lib; {
|
||||
homepage = "https://capnproto.org/";
|
||||
description = "Cap'n Proto cerealization protocol";
|
||||
longDescription = ''
|
||||
Cap’n Proto is an insanely fast data interchange format and
|
||||
capability-based RPC system. Think JSON, except binary. Or think Protocol
|
||||
Buffers, except faster.
|
||||
'';
|
||||
license = licenses.mit;
|
||||
platforms = platforms.all;
|
||||
maintainers = lib.teams.lix.members;
|
||||
};
|
||||
}
|
|
@ -14,7 +14,7 @@ function _nix_complete
|
|||
# But the variable also misses the current token so it cancels out.
|
||||
set -l nix_arg_to_complete (count $nix_args)
|
||||
|
||||
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token
|
||||
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token 2>/dev/null
|
||||
end
|
||||
|
||||
function _nix_accepts_files
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
configure_file(
|
||||
input : 'completion.fish',
|
||||
output : 'nix.fish',
|
||||
fs.copyfile(
|
||||
'completion.fish',
|
||||
'nix.fish',
|
||||
install : true,
|
||||
install_dir : datadir / 'fish/vendor_completions.d',
|
||||
install_mode : 'rw-r--r--',
|
||||
copy : true,
|
||||
)
|
||||
|
|
|
@ -5,8 +5,4 @@ subdir('zsh')
|
|||
subdir('systemd')
|
||||
subdir('flake-registry')
|
||||
|
||||
runinpty = configure_file(
|
||||
copy : true,
|
||||
input : meson.current_source_dir() / 'runinpty.py',
|
||||
output : 'runinpty.py',
|
||||
)
|
||||
runinpty = fs.copyfile('runinpty.py')
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
foreach script : [ [ 'completion.zsh', '_nix' ], [ 'run-help-nix' ] ]
|
||||
configure_file(
|
||||
input : script[0],
|
||||
output : script.get(1, script[0]),
|
||||
fs.copyfile(
|
||||
script[0],
|
||||
script.get(1, script[0]),
|
||||
install : true,
|
||||
install_dir : datadir / 'zsh/site-functions',
|
||||
install_mode : 'rw-r--r--',
|
||||
copy : true,
|
||||
)
|
||||
endforeach
|
||||
|
|
106
nix-support/editline.patch
Normal file
106
nix-support/editline.patch
Normal file
|
@ -0,0 +1,106 @@
|
|||
From d0f2a5bc2300b96b2434c7838184c1dfd6a639f5 Mon Sep 17 00:00:00 2001
|
||||
From: Rebecca Turner <rbt@sent.as>
|
||||
Date: Sun, 8 Sep 2024 15:42:42 -0700
|
||||
Subject: [PATCH 1/2] Recognize Meta+Left and Meta+Right
|
||||
|
||||
Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
|
||||
terminals/shells/platforms.
|
||||
|
||||
I'm not sure exactly where to find canonical documentation for these
|
||||
codes, but this seems to match what my terminal produces (macOS + iTerm2
|
||||
+ Fish + Tmux).
|
||||
|
||||
It might also be nice to have some more support for editing the bindings
|
||||
for these characters; sequences of more than one character are not
|
||||
supported by `el_bind_key` and similar.
|
||||
|
||||
Originally from: https://github.com/troglobit/editline/pull/70
|
||||
This patch is applied upstream: https://gerrit.lix.systems/c/lix/+/1883
|
||||
|
||||
---
|
||||
src/editline.c | 29 +++++++++++++++++++++++++++--
|
||||
1 file changed, 27 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/editline.c b/src/editline.c
|
||||
index 5ec9afb..d1cfbbc 100644
|
||||
--- a/src/editline.c
|
||||
+++ b/src/editline.c
|
||||
@@ -1034,6 +1034,30 @@ static el_status_t meta(void)
|
||||
return CSeof;
|
||||
|
||||
#ifdef CONFIG_ANSI_ARROWS
|
||||
+ /* See: https://en.wikipedia.org/wiki/ANSI_escape_code */
|
||||
+ /* Recognize ANSI escapes for `Meta+Left` and `Meta+Right`. */
|
||||
+ if (c == '\e') {
|
||||
+ switch (tty_get()) {
|
||||
+ case '[':
|
||||
+ {
|
||||
+ switch (tty_get()) {
|
||||
+ /* \e\e[C = Meta+Left */
|
||||
+ case 'C': return fd_word();
|
||||
+ /* \e\e[D = Meta+Right */
|
||||
+ case 'D': return bk_word();
|
||||
+ default:
|
||||
+ break;
|
||||
+ }
|
||||
+
|
||||
+ return el_ring_bell();
|
||||
+ }
|
||||
+ default:
|
||||
+ break;
|
||||
+ }
|
||||
+
|
||||
+ return el_ring_bell();
|
||||
+ }
|
||||
+
|
||||
/* Also include VT-100 arrows. */
|
||||
if (c == '[' || c == 'O') {
|
||||
switch (tty_get()) {
|
||||
@@ -1043,6 +1067,7 @@ static el_status_t meta(void)
|
||||
char seq[4] = { 0 };
|
||||
seq[0] = tty_get();
|
||||
|
||||
+ /* \e[1~ */
|
||||
if (seq[0] == '~')
|
||||
return beg_line(); /* Home */
|
||||
|
||||
@@ -1050,9 +1075,9 @@ static el_status_t meta(void)
|
||||
seq[c] = tty_get();
|
||||
|
||||
if (!strncmp(seq, ";5C", 3))
|
||||
- return fd_word(); /* Ctrl+Right */
|
||||
+ return fd_word(); /* \e[1;5C = Ctrl+Right */
|
||||
if (!strncmp(seq, ";5D", 3))
|
||||
- return bk_word(); /* Ctrl+Left */
|
||||
+ return bk_word(); /* \e[1;5D = Ctrl+Left */
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
From 4c4455353a0a88bee09d5f27c28f81f747682fed Mon Sep 17 00:00:00 2001
|
||||
From: Rebecca Turner <rbt@sent.as>
|
||||
Date: Mon, 9 Sep 2024 09:44:44 -0700
|
||||
Subject: [PATCH 2/2] Add support for \e[1;3C and \e[1;3D
|
||||
|
||||
---
|
||||
src/editline.c | 6 ++++--
|
||||
1 file changed, 4 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/editline.c b/src/editline.c
|
||||
index d1cfbbc..350b5cb 100644
|
||||
--- a/src/editline.c
|
||||
+++ b/src/editline.c
|
||||
@@ -1074,9 +1074,11 @@ static el_status_t meta(void)
|
||||
for (c = 1; c < 3; c++)
|
||||
seq[c] = tty_get();
|
||||
|
||||
- if (!strncmp(seq, ";5C", 3))
|
||||
+ if (!strncmp(seq, ";5C", 3)
|
||||
+ || !strncmp(seq, ";3C", 3))
|
||||
return fd_word(); /* \e[1;5C = Ctrl+Right */
|
||||
- if (!strncmp(seq, ";5D", 3))
|
||||
+ if (!strncmp(seq, ";5D", 3)
|
||||
+ || !strncmp(seq, ";3D", 3))
|
||||
return bk_word(); /* \e[1;5D = Ctrl+Left */
|
||||
|
||||
break;
|
48
package.nix
48
package.nix
|
@ -15,6 +15,7 @@
|
|||
brotli,
|
||||
bzip2,
|
||||
callPackage,
|
||||
capnproto-lix ? __forDefaults.capnproto-lix,
|
||||
cmake,
|
||||
curl,
|
||||
doxygen,
|
||||
|
@ -30,12 +31,15 @@
|
|||
lix-clang-tidy ? null,
|
||||
llvmPackages,
|
||||
lsof,
|
||||
# FIXME: remove default after dropping NixOS 24.05
|
||||
lowdown-unsandboxed ? lowdown,
|
||||
lowdown,
|
||||
mdbook,
|
||||
mdbook-linkcheck,
|
||||
mercurial,
|
||||
meson,
|
||||
ninja,
|
||||
ncurses,
|
||||
openssl,
|
||||
pegtl,
|
||||
pkg-config,
|
||||
|
@ -79,12 +83,37 @@
|
|||
boehmgc-nix = boehmgc.override { enableLargeConfig = true; };
|
||||
|
||||
editline-lix = editline.overrideAttrs (prev: {
|
||||
configureFlags = prev.configureFlags or [ ] ++ [ (lib.enableFeature true "sigstop") ];
|
||||
patches = (prev.patches or [ ]) ++ [
|
||||
# Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
|
||||
# terminals/shells/platforms.
|
||||
#
|
||||
# See: https://github.com/troglobit/editline/pull/70
|
||||
./nix-support/editline.patch
|
||||
];
|
||||
|
||||
configureFlags = (prev.configureFlags or [ ]) ++ [
|
||||
# Enable SIGSTOP (Ctrl-Z) behavior.
|
||||
(lib.enableFeature true "sigstop")
|
||||
# Enable ANSI arrow keys.
|
||||
(lib.enableFeature true "arrow-keys")
|
||||
# Use termcap library to query terminal size.
|
||||
(lib.enableFeature (ncurses != null) "termcap")
|
||||
];
|
||||
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ ncurses ];
|
||||
});
|
||||
|
||||
build-release-notes = callPackage ./maintainers/build-release-notes.nix { };
|
||||
|
||||
# needs derivation patching to add debuginfo and coroutine library support
|
||||
# !! must build this with clang as it is affected by the gcc coroutine bugs
|
||||
capnproto-lix = callPackage ./misc/capnproto.nix { inherit stdenv; };
|
||||
},
|
||||
}:
|
||||
|
||||
# gcc miscompiles coroutines at least until 13.2, possibly longer
|
||||
assert stdenv.cc.isClang;
|
||||
|
||||
let
|
||||
inherit (__forDefaults) canRunInstalled;
|
||||
inherit (lib) fileset;
|
||||
|
@ -143,6 +172,7 @@ let
|
|||
|
||||
functionalTestFiles = fileset.unions [
|
||||
./tests/functional
|
||||
./tests/functional2
|
||||
./tests/unit
|
||||
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
||||
];
|
||||
|
@ -216,13 +246,16 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
nativeBuildInputs =
|
||||
[
|
||||
python3
|
||||
python3.pkgs.pytest
|
||||
python3.pkgs.pytest-xdist
|
||||
meson
|
||||
ninja
|
||||
cmake
|
||||
rustc
|
||||
capnproto-lix
|
||||
]
|
||||
++ [
|
||||
(lib.getBin lowdown)
|
||||
(lib.getBin lowdown-unsandboxed)
|
||||
mdbook
|
||||
mdbook-linkcheck
|
||||
]
|
||||
|
@ -241,6 +274,10 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
++ lib.optionals lintInsteadOfBuild [
|
||||
# required for a wrapped clang-tidy
|
||||
llvmPackages.clang-tools
|
||||
# load-bearing order (just as below); the actual stdenv wrapped clang
|
||||
# needs to precede the unwrapped clang in PATH such that calling `clang`
|
||||
# can compile things.
|
||||
stdenv.cc
|
||||
# required for run-clang-tidy
|
||||
llvmPackages.clang-unwrapped
|
||||
];
|
||||
|
@ -260,6 +297,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
libsodium
|
||||
toml11
|
||||
pegtl
|
||||
capnproto-lix
|
||||
]
|
||||
++ lib.optionals hostPlatform.isLinux [
|
||||
libseccomp
|
||||
|
@ -411,6 +449,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
editline-lix
|
||||
build-release-notes
|
||||
pegtl
|
||||
capnproto-lix
|
||||
;
|
||||
|
||||
# The collection of dependency logic for this derivation is complicated enough that
|
||||
|
@ -445,6 +484,11 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
|
||||
pythonPackages = (
|
||||
p: [
|
||||
# FIXME: these have to be added twice due to the nix shell using a
|
||||
# wrapped python instead of build inputs for its python inputs
|
||||
p.pytest
|
||||
p.pytest-xdist
|
||||
|
||||
p.yapf
|
||||
p.python-frontmatter
|
||||
p.requests
|
||||
|
|
|
@ -8,12 +8,7 @@ configure_file(
|
|||
}
|
||||
)
|
||||
|
||||
# https://github.com/mesonbuild/meson/issues/860
|
||||
configure_file(
|
||||
input : 'nix-profile.sh.in',
|
||||
output : 'nix-profile.sh.in',
|
||||
copy : true,
|
||||
)
|
||||
fs.copyfile('nix-profile.sh.in')
|
||||
|
||||
foreach rc : [ '.sh', '.fish', '-daemon.sh', '-daemon.fish' ]
|
||||
configure_file(
|
||||
|
|
|
@ -19,8 +19,9 @@
|
|||
#include "legacy.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "hash.hh"
|
||||
#include "build-remote.hh"
|
||||
|
||||
using namespace nix;
|
||||
namespace nix {
|
||||
|
||||
static void handleAlarm(int sig) {
|
||||
}
|
||||
|
@ -388,4 +389,8 @@ connected:
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_build_remote("build-remote", main_build_remote);
|
||||
void registerBuildRemote() {
|
||||
LegacyCommands::add("build-remote", main_build_remote);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/build-remote.hh
Normal file
8
src/legacy/build-remote.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerBuildRemote();
|
||||
|
||||
}
|
35
src/legacy/meson.build
Normal file
35
src/legacy/meson.build
Normal file
|
@ -0,0 +1,35 @@
|
|||
legacy_include_directories = include_directories('.')
|
||||
|
||||
legacy_sources = files(
|
||||
# `build-remote` is not really legacy (it powers all remote builds), but it's
|
||||
# not a `nix3` command.
|
||||
'build-remote.cc',
|
||||
'dotgraph.cc',
|
||||
'graphml.cc',
|
||||
'nix-build.cc',
|
||||
'nix-channel.cc',
|
||||
'nix-collect-garbage.cc',
|
||||
'nix-copy-closure.cc',
|
||||
'nix-env.cc',
|
||||
'nix-env.hh',
|
||||
'nix-instantiate.cc',
|
||||
'nix-store.cc',
|
||||
'user-env.cc',
|
||||
)
|
||||
|
||||
legacy_headers = files(
|
||||
'build-remote.hh',
|
||||
'nix-build.hh',
|
||||
'nix-channel.hh',
|
||||
'nix-collect-garbage.hh',
|
||||
'nix-copy-closure.hh',
|
||||
'nix-instantiate.hh',
|
||||
'nix-store.hh',
|
||||
)
|
||||
|
||||
legacy_generated_headers = [
|
||||
gen_header.process('buildenv.nix', preserve_path_from: meson.current_source_dir()),
|
||||
gen_header.process('unpack-channel.nix', preserve_path_from: meson.current_source_dir()),
|
||||
]
|
||||
|
||||
fs.copyfile('unpack-channel.nix')
|
|
@ -24,12 +24,14 @@
|
|||
#include "attr-path.hh"
|
||||
#include "legacy.hh"
|
||||
#include "shlex.hh"
|
||||
#include "nix-build.hh"
|
||||
|
||||
extern char * * environ __attribute__((weak)); // Man what even is this
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace nix;
|
||||
using namespace std::string_literals;
|
||||
|
||||
extern char * * environ __attribute__((weak));
|
||||
|
||||
static void main_nix_build(int argc, char * * argv)
|
||||
{
|
||||
auto dryRun = false;
|
||||
|
@ -488,42 +490,51 @@ static void main_nix_build(int argc, char * * argv)
|
|||
environment variables and shell functions. Also don't
|
||||
lose the current $PATH directories. */
|
||||
auto rcfile = (Path) tmpDir + "/rc";
|
||||
auto tz = getEnv("TZ");
|
||||
std::string rc = fmt(
|
||||
R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; }; )"s +
|
||||
(keepTmp ?
|
||||
"trap _nix_shell_clean_tmpdir EXIT; "
|
||||
"exitHooks+=(_nix_shell_clean_tmpdir); "
|
||||
"failureHooks+=(_nix_shell_clean_tmpdir); ":
|
||||
"_nix_shell_clean_tmpdir; ") +
|
||||
(pure ? "" : "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc;") +
|
||||
"%2%"
|
||||
// always clear PATH.
|
||||
// when nix-shell is run impure, we rehydrate it with the `p=$PATH` above
|
||||
"unset PATH;"
|
||||
"dontAddDisableDepTrack=1;\n"
|
||||
+ structuredAttrsRC +
|
||||
"\n[ -e $stdenv/setup ] && source $stdenv/setup; "
|
||||
"%3%"
|
||||
"PATH=%4%:\"$PATH\"; "
|
||||
"SHELL=%5%; "
|
||||
"BASH=%5%; "
|
||||
"set +e; "
|
||||
R"s([ -n "$PS1" -a -z "$NIX_SHELL_PRESERVE_PROMPT" ] && )s" +
|
||||
(getuid() == 0 ? R"s(PS1='\n\[\033[1;31m\][nix-shell:\w]\$\[\033[0m\] '; )s"
|
||||
: R"s(PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s") +
|
||||
"if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; "
|
||||
"unset NIX_ENFORCE_PURITY; "
|
||||
"shopt -u nullglob; "
|
||||
"unset TZ; %6%"
|
||||
"shopt -s execfail;"
|
||||
"%7%",
|
||||
shellEscape(tmpDir),
|
||||
(pure ? "" : "p=$PATH; "),
|
||||
(pure ? "" : "PATH=$PATH:$p; unset p; "),
|
||||
shellEscape(dirOf(*shell)),
|
||||
shellEscape(*shell),
|
||||
(getenv("TZ") ? (std::string("export TZ=") + shellEscape(getenv("TZ")) + "; ") : ""),
|
||||
envCommand);
|
||||
R"(_nix_shell_clean_tmpdir() { command rm -rf %1%; }; )"
|
||||
"%2%"
|
||||
"%3%"
|
||||
// always clear PATH.
|
||||
// when nix-shell is run impure, we rehydrate it with the `p=$PATH` above
|
||||
"unset PATH;"
|
||||
"dontAddDisableDepTrack=1;\n",
|
||||
shellEscape(tmpDir),
|
||||
(keepTmp
|
||||
? "trap _nix_shell_clean_tmpdir EXIT; "
|
||||
"exitHooks+=(_nix_shell_clean_tmpdir); "
|
||||
"failureHooks+=(_nix_shell_clean_tmpdir); "
|
||||
: "_nix_shell_clean_tmpdir; "),
|
||||
(pure
|
||||
? ""
|
||||
: "[ -n \"$PS1\" ] && [ -e ~/.bashrc ] && source ~/.bashrc; p=$PATH; ")
|
||||
);
|
||||
rc += structuredAttrsRC;
|
||||
rc += fmt(
|
||||
"\n[ -e $stdenv/setup ] && source $stdenv/setup; "
|
||||
"%1%"
|
||||
"PATH=%2%:\"$PATH\"; "
|
||||
"SHELL=%3%; "
|
||||
"BASH=%3%; "
|
||||
"set +e; "
|
||||
R"s([ -n "$PS1" -a -z "$NIX_SHELL_PRESERVE_PROMPT" ] && )s"
|
||||
"%4%"
|
||||
"if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; "
|
||||
"unset NIX_ENFORCE_PURITY; "
|
||||
"shopt -u nullglob; "
|
||||
"unset TZ; %5%"
|
||||
"shopt -s execfail;"
|
||||
"%6%",
|
||||
(pure ? "" : "PATH=$PATH:$p; unset p; "),
|
||||
shellEscape(dirOf(*shell)),
|
||||
shellEscape(*shell),
|
||||
(getuid() == 0 ? R"s(PS1='\n\[\033[1;31m\][nix-shell:\w]\$\[\033[0m\] '; )s"
|
||||
: R"s(PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"),
|
||||
(tz.has_value()
|
||||
? (std::string("export TZ=") + shellEscape(*tz) + "; ")
|
||||
: ""),
|
||||
envCommand
|
||||
);
|
||||
vomit("Sourcing nix-shell with file %s and contents:\n%s", rcfile, rc);
|
||||
writeFile(rcfile, rc);
|
||||
|
||||
|
@ -613,5 +624,9 @@ static void main_nix_build(int argc, char * * argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_build("nix-build", main_nix_build);
|
||||
static RegisterLegacyCommand r_nix_shell("nix-shell", main_nix_build);
|
||||
void registerNixBuildAndNixShell() {
|
||||
LegacyCommands::add("nix-build", main_nix_build);
|
||||
LegacyCommands::add("nix-shell", main_nix_build);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-build.hh
Normal file
8
src/legacy/nix-build.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixBuildAndNixShell();
|
||||
|
||||
}
|
|
@ -7,12 +7,13 @@
|
|||
#include "fetchers.hh"
|
||||
#include "eval-settings.hh" // for defexpr
|
||||
#include "users.hh"
|
||||
#include "nix-channel.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <regex>
|
||||
#include <pwd.h>
|
||||
|
||||
using namespace nix;
|
||||
namespace nix {
|
||||
|
||||
typedef std::map<std::string, std::string> Channels;
|
||||
|
||||
|
@ -264,4 +265,8 @@ static int main_nix_channel(int argc, char ** argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_channel("nix-channel", main_nix_channel);
|
||||
void registerNixChannel() {
|
||||
LegacyCommands::add("nix-channel", main_nix_channel);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-channel.hh
Normal file
8
src/legacy/nix-channel.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixChannel();
|
||||
|
||||
}
|
|
@ -7,11 +7,12 @@
|
|||
#include "globals.hh"
|
||||
#include "legacy.hh"
|
||||
#include "signals.hh"
|
||||
#include "nix-collect-garbage.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <cerrno>
|
||||
|
||||
using namespace nix;
|
||||
namespace nix {
|
||||
|
||||
std::string deleteOlderThan;
|
||||
bool dryRun = false;
|
||||
|
@ -110,4 +111,8 @@ static int main_nix_collect_garbage(int argc, char * * argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_collect_garbage("nix-collect-garbage", main_nix_collect_garbage);
|
||||
void registerNixCollectGarbage() {
|
||||
LegacyCommands::add("nix-collect-garbage", main_nix_collect_garbage);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-collect-garbage.hh
Normal file
8
src/legacy/nix-collect-garbage.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixCollectGarbage();
|
||||
|
||||
}
|
|
@ -1,8 +1,9 @@
|
|||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "legacy.hh"
|
||||
#include "nix-copy-closure.hh"
|
||||
|
||||
using namespace nix;
|
||||
namespace nix {
|
||||
|
||||
static int main_nix_copy_closure(int argc, char ** argv)
|
||||
{
|
||||
|
@ -60,4 +61,8 @@ static int main_nix_copy_closure(int argc, char ** argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_copy_closure("nix-copy-closure", main_nix_copy_closure);
|
||||
void registerNixCopyClosure() {
|
||||
LegacyCommands::add("nix-copy-closure", main_nix_copy_closure);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-copy-closure.hh
Normal file
8
src/legacy/nix-copy-closure.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixCopyClosure();
|
||||
|
||||
}
|
|
@ -17,6 +17,7 @@
|
|||
#include "xml-writer.hh"
|
||||
#include "legacy.hh"
|
||||
#include "eval-settings.hh" // for defexpr
|
||||
#include "nix-env.hh"
|
||||
|
||||
#include <ctime>
|
||||
#include <algorithm>
|
||||
|
@ -28,9 +29,10 @@
|
|||
#include <unistd.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
using namespace nix;
|
||||
using std::cout;
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
typedef enum {
|
||||
srcNixExprDrvs,
|
||||
|
@ -1544,4 +1546,8 @@ static int main_nix_env(int argc, char * * argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_env("nix-env", main_nix_env);
|
||||
void registerNixEnv() {
|
||||
LegacyCommands::add("nix-env", main_nix_env);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-env.hh
Normal file
8
src/legacy/nix-env.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixEnv();
|
||||
|
||||
}
|
|
@ -11,12 +11,13 @@
|
|||
#include "local-fs-store.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "legacy.hh"
|
||||
#include "nix-instantiate.hh"
|
||||
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
|
||||
|
||||
using namespace nix;
|
||||
namespace nix {
|
||||
|
||||
|
||||
static Path gcRoot;
|
||||
|
@ -195,4 +196,8 @@ static int main_nix_instantiate(int argc, char * * argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_instantiate("nix-instantiate", main_nix_instantiate);
|
||||
void registerNixInstantiate() {
|
||||
LegacyCommands::add("nix-instantiate", main_nix_instantiate);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-instantiate.hh
Normal file
8
src/legacy/nix-instantiate.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixInstantiate();
|
||||
|
||||
}
|
|
@ -15,6 +15,7 @@
|
|||
#include "graphml.hh"
|
||||
#include "legacy.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "nix-store.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
@ -24,10 +25,9 @@
|
|||
#include <fcntl.h>
|
||||
|
||||
|
||||
namespace nix_store {
|
||||
namespace nix {
|
||||
|
||||
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
using std::cout;
|
||||
|
||||
|
@ -831,12 +831,12 @@ static void opServe(Strings opFlags, Strings opArgs)
|
|||
// FIXME: changing options here doesn't work if we're
|
||||
// building through the daemon.
|
||||
verbosity = lvlError;
|
||||
settings.keepLog = false;
|
||||
settings.useSubstitutes = false;
|
||||
settings.maxSilentTime = readInt(in);
|
||||
settings.buildTimeout = readInt(in);
|
||||
settings.keepLog.override(false);
|
||||
settings.useSubstitutes.override(false);
|
||||
settings.maxSilentTime.override(readInt(in));
|
||||
settings.buildTimeout.override(readInt(in));
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 2)
|
||||
settings.maxLogSize = readNum<unsigned long>(in);
|
||||
settings.maxLogSize.override(readNum<unsigned long>(in));
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 3) {
|
||||
auto nrRepeats = readInt(in);
|
||||
if (nrRepeats != 0) {
|
||||
|
@ -850,10 +850,10 @@ static void opServe(Strings opFlags, Strings opArgs)
|
|||
// asked for.
|
||||
readInt(in);
|
||||
|
||||
settings.runDiffHook = true;
|
||||
settings.runDiffHook.override(true);
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 7) {
|
||||
settings.keepFailed = (bool) readInt(in);
|
||||
settings.keepFailed.override((bool) readInt(in));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1176,6 +1176,8 @@ static int main_nix_store(int argc, char * * argv)
|
|||
}
|
||||
}
|
||||
|
||||
static RegisterLegacyCommand r_nix_store("nix-store", main_nix_store);
|
||||
void registerNixStore() {
|
||||
LegacyCommands::add("nix-store", main_nix_store);
|
||||
}
|
||||
|
||||
}
|
8
src/legacy/nix-store.hh
Normal file
8
src/legacy/nix-store.hh
Normal file
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
namespace nix {
|
||||
|
||||
void registerNixStore();
|
||||
|
||||
}
|
|
@ -9,8 +9,24 @@
|
|||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
|
||||
#include <regex>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::regex const identifierRegex("^[A-Za-z_][A-Za-z0-9_'-]*$");
|
||||
static void warnInvalidNixIdentifier(const std::string & name)
|
||||
{
|
||||
std::smatch match;
|
||||
if (!std::regex_match(name, match, identifierRegex)) {
|
||||
warn("This Nix invocation specifies a value for argument '%s' which isn't a valid \
|
||||
Nix identifier. The project is considering to drop support for this \
|
||||
or to require quotes around args that aren't valid Nix identifiers. \
|
||||
If you depend on this behvior, please reach out in \
|
||||
https://git.lix.systems/lix-project/lix/issues/496 so we can discuss \
|
||||
your use-case.", name);
|
||||
}
|
||||
}
|
||||
|
||||
MixEvalArgs::MixEvalArgs()
|
||||
{
|
||||
addFlag({
|
||||
|
@ -18,7 +34,10 @@ MixEvalArgs::MixEvalArgs()
|
|||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "expr"},
|
||||
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
||||
.handler = {[&](std::string name, std::string expr) {
|
||||
warnInvalidNixIdentifier(name);
|
||||
autoArgs[name] = 'E' + expr;
|
||||
}}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
@ -26,7 +45,10 @@ MixEvalArgs::MixEvalArgs()
|
|||
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "string"},
|
||||
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
||||
.handler = {[&](std::string name, std::string s) {
|
||||
warnInvalidNixIdentifier(name);
|
||||
autoArgs[name] = 'S' + s;
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
@ -115,7 +137,7 @@ MixEvalArgs::MixEvalArgs()
|
|||
.description = "Allow access to mutable paths and repositories.",
|
||||
.category = category,
|
||||
.handler = {[&]() {
|
||||
evalSettings.pureEval = false;
|
||||
evalSettings.pureEval.override(false);
|
||||
}},
|
||||
});
|
||||
|
||||
|
|
|
@ -212,7 +212,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
|||
if (file) {
|
||||
completions.setType(AddCompletions::Type::Attrs);
|
||||
|
||||
evalSettings.pureEval = false;
|
||||
evalSettings.pureEval.override(false);
|
||||
auto state = getEvalState();
|
||||
Expr & e = state->parseExprFromFile(
|
||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
||||
|
@ -435,7 +435,7 @@ Installables SourceExprCommand::parseInstallables(
|
|||
throw UsageError("'--file' and '--expr' are exclusive");
|
||||
|
||||
// FIXME: backward compatibility hack
|
||||
if (file) evalSettings.pureEval = false;
|
||||
if (file) evalSettings.pureEval.override(false);
|
||||
|
||||
auto state = getEvalState();
|
||||
auto vFile = state->allocValue();
|
||||
|
|
|
@ -2,6 +2,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
RegisterLegacyCommand::Commands * RegisterLegacyCommand::commands = 0;
|
||||
LegacyCommands::Commands * LegacyCommands::commands = 0;
|
||||
|
||||
}
|
||||
|
|
|
@ -9,12 +9,12 @@ namespace nix {
|
|||
|
||||
typedef std::function<void(int, char * *)> MainFunction;
|
||||
|
||||
struct RegisterLegacyCommand
|
||||
struct LegacyCommands
|
||||
{
|
||||
typedef std::map<std::string, MainFunction> Commands;
|
||||
static Commands * commands;
|
||||
|
||||
RegisterLegacyCommand(const std::string & name, MainFunction fun)
|
||||
static void add(const std::string & name, MainFunction fun)
|
||||
{
|
||||
if (!commands) commands = new Commands;
|
||||
(*commands)[name] = fun;
|
||||
|
|
|
@ -8,10 +8,6 @@
|
|||
#include <string_view>
|
||||
#include <cerrno>
|
||||
|
||||
#ifdef READLINE
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
#else
|
||||
// editline < 1.15.2 don't wrap their API for C++ usage
|
||||
// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
|
||||
// This results in linker errors due to to name-mangling of editline C symbols.
|
||||
|
@ -20,7 +16,6 @@
|
|||
extern "C" {
|
||||
#include <editline.h>
|
||||
}
|
||||
#endif
|
||||
|
||||
#include "finally.hh"
|
||||
#include "repl-interacter.hh"
|
||||
|
@ -115,17 +110,13 @@ ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleter
|
|||
} catch (SysError & e) {
|
||||
logWarning(e.info());
|
||||
}
|
||||
#ifndef READLINE
|
||||
el_hist_size = 1000;
|
||||
#endif
|
||||
read_history(historyFile.c_str());
|
||||
auto oldRepl = curRepl;
|
||||
curRepl = repl;
|
||||
Guard restoreRepl([oldRepl] { curRepl = oldRepl; });
|
||||
#ifndef READLINE
|
||||
rl_set_complete_func(completionCallback);
|
||||
rl_set_list_possib_func(listPossibleCallback);
|
||||
#endif
|
||||
return restoreRepl;
|
||||
}
|
||||
|
||||
|
|
|
@ -817,10 +817,10 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
else if (command == ":te" || command == ":trace-enable") {
|
||||
if (arg == "false" || (arg == "" && loggerSettings.showTrace)) {
|
||||
std::cout << "not showing error traces\n";
|
||||
loggerSettings.showTrace = false;
|
||||
loggerSettings.showTrace.override(false);
|
||||
} else if (arg == "true" || (arg == "" && !loggerSettings.showTrace)) {
|
||||
std::cout << "showing error traces\n";
|
||||
loggerSettings.showTrace = true;
|
||||
loggerSettings.showTrace.override(true);
|
||||
} else {
|
||||
throw Error("unexpected argument '%s' to %s", arg, command);
|
||||
};
|
||||
|
|
|
@ -79,7 +79,7 @@ struct AttrDb
|
|||
state->txn->commit();
|
||||
state->txn.reset();
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
ignoreExceptionInDestructor();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,7 +90,7 @@ struct AttrDb
|
|||
try {
|
||||
return fun();
|
||||
} catch (SQLiteError &) {
|
||||
ignoreException();
|
||||
ignoreExceptionExceptInterrupt();
|
||||
failed = true;
|
||||
return 0;
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ static std::shared_ptr<AttrDb> makeAttrDb(
|
|||
try {
|
||||
return std::make_shared<AttrDb>(cfg, fingerprint, symbols);
|
||||
} catch (SQLiteError &) {
|
||||
ignoreException();
|
||||
ignoreExceptionExceptInterrupt();
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ static Strings parseNixPath(const std::string & s)
|
|||
EvalSettings::EvalSettings()
|
||||
{
|
||||
auto var = getEnv("NIX_PATH");
|
||||
if (var) nixPath = parseNixPath(*var);
|
||||
if (var) nixPath.setDefault(parseNixPath(*var));
|
||||
}
|
||||
|
||||
Strings EvalSettings::getDefaultNixPath()
|
||||
|
|
|
@ -185,6 +185,54 @@ struct EvalSettings : Config
|
|||
else
|
||||
{ }
|
||||
```
|
||||
|
||||
Here's a more elaborate `repl-overlay`, which provides the following
|
||||
variables:
|
||||
- The original, unmodified variables are aliased to `original`.
|
||||
- `legacyPackages.${system}` (if it exists) or `packages.${system}`
|
||||
(otherwise) is aliased to `pkgs`.
|
||||
- All attribute set variables with a `${system}` attribute are
|
||||
abbreviated in the same manner; e.g. `devShells.${system}` is
|
||||
shortened to `devShells`.
|
||||
|
||||
For example, the following attribute set:
|
||||
|
||||
```nix
|
||||
info: final: attrs: let
|
||||
# Equivalent to nixpkgs `lib.optionalAttrs`.
|
||||
optionalAttrs = predicate: attrs:
|
||||
if predicate
|
||||
then attrs
|
||||
else {};
|
||||
|
||||
# If `attrs.${oldName}.${info.currentSystem}` exists, alias `${newName}` to
|
||||
# it.
|
||||
collapseRenamed = oldName: newName:
|
||||
optionalAttrs (builtins.hasAttr oldName attrs
|
||||
&& builtins.hasAttr info.currentSystem attrs.${oldName})
|
||||
{
|
||||
${newName} = attrs.${oldName}.${info.currentSystem};
|
||||
};
|
||||
|
||||
# Alias `attrs.${oldName}.${info.currentSystem} to `${newName}`.
|
||||
collapse = name: collapseRenamed name name;
|
||||
|
||||
# Alias all `attrs` keys with an `${info.currentSystem}` attribute.
|
||||
collapseAll =
|
||||
builtins.foldl'
|
||||
(prev: name: prev // collapse name)
|
||||
{}
|
||||
(builtins.attrNames attrs);
|
||||
in
|
||||
# Preserve the original bindings as `original`.
|
||||
(optionalAttrs (! attrs ? original)
|
||||
{
|
||||
original = attrs;
|
||||
})
|
||||
// (collapseRenamed "packages" "pkgs")
|
||||
// (collapseRenamed "legacyPackages" "pkgs")
|
||||
// collapseAll
|
||||
```
|
||||
)"};
|
||||
};
|
||||
|
||||
|
|
|
@ -21,6 +21,14 @@ std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol)
|
|||
return printIdentifier(str, s);
|
||||
}
|
||||
|
||||
AttrName::AttrName(Symbol s) : symbol(s)
|
||||
{
|
||||
}
|
||||
|
||||
AttrName::AttrName(std::unique_ptr<Expr> e) : expr(std::move(e))
|
||||
{
|
||||
}
|
||||
|
||||
void Expr::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
abort();
|
||||
|
@ -239,9 +247,24 @@ void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) co
|
|||
{
|
||||
bool first = true;
|
||||
str << "(";
|
||||
for (auto & i : es) {
|
||||
if (first) first = false; else str << " + ";
|
||||
i.second->show(symbols, str);
|
||||
for (auto & [_pos, part] : es) {
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
str << " + ";
|
||||
|
||||
if (forceString && !dynamic_cast<ExprString *>(part.get())) {
|
||||
/* Print as a string with an interpolation, to preserve the
|
||||
* semantics of the value having to be a string.
|
||||
* Interpolations are weird and someone should eventually
|
||||
* move them out into their own AST node please.
|
||||
*/
|
||||
str << "\"${";
|
||||
part->show(symbols, str);
|
||||
str << "}\"";
|
||||
} else {
|
||||
part->show(symbols, str);
|
||||
}
|
||||
}
|
||||
str << ")";
|
||||
}
|
||||
|
|
|
@ -30,8 +30,8 @@ struct AttrName
|
|||
{
|
||||
Symbol symbol;
|
||||
std::unique_ptr<Expr> expr;
|
||||
AttrName(Symbol s) : symbol(s) {};
|
||||
AttrName(std::unique_ptr<Expr> e) : expr(std::move(e)) {};
|
||||
AttrName(Symbol s);
|
||||
AttrName(std::unique_ptr<Expr> e);
|
||||
};
|
||||
|
||||
typedef std::vector<AttrName> AttrPath;
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
// eolf rules in favor of reproducing the old flex lexer as faithfully as
|
||||
// possible, and deferring calculation of positions to downstream users.
|
||||
|
||||
namespace nix::parser::grammar {
|
||||
namespace nix::parser::grammar::v1 {
|
||||
|
||||
using namespace tao::pegtl;
|
||||
namespace p = tao::pegtl;
|
||||
|
@ -225,7 +225,8 @@ struct string : _string, seq<
|
|||
> {};
|
||||
|
||||
struct _ind_string {
|
||||
template<bool Indented, typename... Inner>
|
||||
struct line_start : semantic, star<one<' '>> {};
|
||||
template<typename... Inner>
|
||||
struct literal : semantic, seq<Inner...> {};
|
||||
struct interpolation : semantic, seq<
|
||||
p::string<'$', '{'>, seps,
|
||||
|
@ -233,34 +234,53 @@ struct _ind_string {
|
|||
must<one<'}'>>
|
||||
> {};
|
||||
struct escape : semantic, must<any> {};
|
||||
/* Marker for non-empty lines */
|
||||
struct has_content : semantic, seq<> {};
|
||||
};
|
||||
struct ind_string : _ind_string, seq<
|
||||
TAO_PEGTL_STRING("''"),
|
||||
// Strip first line completely if empty
|
||||
opt<star<one<' '>>, one<'\n'>>,
|
||||
star<
|
||||
sor<
|
||||
_ind_string::literal<
|
||||
true,
|
||||
list<
|
||||
seq<
|
||||
// Start a line with some indentation
|
||||
// (we always match even the empty string if no indentation, as this creates the line)
|
||||
_ind_string::line_start,
|
||||
// The actual line
|
||||
opt<
|
||||
plus<
|
||||
sor<
|
||||
not_one<'$', '\''>,
|
||||
seq<one<'$'>, not_one<'{', '\''>>,
|
||||
seq<one<'\''>, not_one<'\'', '$'>>
|
||||
>
|
||||
>
|
||||
>,
|
||||
_ind_string::interpolation,
|
||||
_ind_string::literal<false, one<'$'>>,
|
||||
_ind_string::literal<false, one<'\''>, not_at<one<'\''>>>,
|
||||
seq<one<'\''>, _ind_string::literal<false, p::string<'\'', '\''>>>,
|
||||
seq<
|
||||
p::string<'\'', '\''>,
|
||||
sor<
|
||||
_ind_string::literal<false, one<'$'>>,
|
||||
seq<one<'\\'>, _ind_string::escape>
|
||||
_ind_string::literal<
|
||||
plus<
|
||||
sor<
|
||||
not_one<'$', '\'', '\n'>,
|
||||
// TODO probably factor this out like the others for performance
|
||||
seq<one<'$'>, not_one<'{', '\'', '\n'>>,
|
||||
seq<one<'$'>, at<one<'\n'>>>,
|
||||
seq<one<'\''>, not_one<'\'', '$', '\n'>>,
|
||||
seq<one<'\''>, at<one<'\n'>>>
|
||||
>
|
||||
>
|
||||
>,
|
||||
_ind_string::interpolation,
|
||||
_ind_string::literal<one<'$'>>,
|
||||
_ind_string::literal<one<'\''>, not_at<one<'\''>>>,
|
||||
seq<one<'\''>, _ind_string::literal<p::string<'\'', '\''>>>,
|
||||
seq<
|
||||
p::string<'\'', '\''>,
|
||||
sor<
|
||||
_ind_string::literal<one<'$'>>,
|
||||
seq<one<'\\'>, _ind_string::escape>
|
||||
>
|
||||
>
|
||||
>,
|
||||
_ind_string::has_content
|
||||
>
|
||||
>
|
||||
>
|
||||
>,
|
||||
// End of line, LF. CR is just ignored and not treated as ending a line
|
||||
// (for the purpose of indentation stripping)
|
||||
_ind_string::literal<one<'\n'>>
|
||||
>,
|
||||
must<TAO_PEGTL_STRING("''")>
|
||||
> {};
|
||||
|
@ -352,10 +372,10 @@ struct formals : semantic, _formals, seq<
|
|||
|
||||
struct _attr {
|
||||
struct simple : semantic, sor<t::identifier, t::kw_or> {};
|
||||
struct string : semantic, seq<grammar::string> {};
|
||||
struct string : semantic, seq<grammar::v1::string> {};
|
||||
struct expr : semantic, seq<
|
||||
TAO_PEGTL_STRING("${"), seps,
|
||||
must<grammar::expr>, seps,
|
||||
must<grammar::v1::expr>, seps,
|
||||
must<one<'}'>>
|
||||
> {};
|
||||
};
|
||||
|
@ -452,9 +472,9 @@ struct _expr {
|
|||
struct id : semantic, t::identifier {};
|
||||
struct int_ : semantic, t::integer {};
|
||||
struct float_ : semantic, t::floating {};
|
||||
struct string : semantic, seq<grammar::string> {};
|
||||
struct ind_string : semantic, seq<grammar::ind_string> {};
|
||||
struct path : semantic, seq<grammar::path> {};
|
||||
struct string : semantic, seq<grammar::v1::string> {};
|
||||
struct ind_string : semantic, seq<grammar::v1::ind_string> {};
|
||||
struct path : semantic, seq<grammar::v1::path> {};
|
||||
struct uri : semantic, t::uri {};
|
||||
struct ancient_let : semantic, _attrset<must, t::kw_let, seps> {};
|
||||
struct rec_set : semantic, _attrset<must, t::kw_rec, seps> {};
|
||||
|
@ -628,34 +648,34 @@ struct nothing : p::nothing<Rule> {
|
|||
|
||||
template<typename Self, typename OpCtx, typename AttrPathT, typename ExprT>
|
||||
struct operator_semantics {
|
||||
struct has_attr : grammar::op::has_attr {
|
||||
struct has_attr : grammar::v1::op::has_attr {
|
||||
AttrPathT path;
|
||||
};
|
||||
|
||||
struct OpEntry {
|
||||
OpCtx ctx;
|
||||
uint8_t prec;
|
||||
grammar::op::kind assoc;
|
||||
grammar::v1::op::kind assoc;
|
||||
std::variant<
|
||||
grammar::op::not_,
|
||||
grammar::op::unary_minus,
|
||||
grammar::op::implies,
|
||||
grammar::op::or_,
|
||||
grammar::op::and_,
|
||||
grammar::op::equals,
|
||||
grammar::op::not_equals,
|
||||
grammar::op::less_eq,
|
||||
grammar::op::greater_eq,
|
||||
grammar::op::update,
|
||||
grammar::op::concat,
|
||||
grammar::op::less,
|
||||
grammar::op::greater,
|
||||
grammar::op::plus,
|
||||
grammar::op::minus,
|
||||
grammar::op::mul,
|
||||
grammar::op::div,
|
||||
grammar::op::pipe_right,
|
||||
grammar::op::pipe_left,
|
||||
grammar::v1::op::not_,
|
||||
grammar::v1::op::unary_minus,
|
||||
grammar::v1::op::implies,
|
||||
grammar::v1::op::or_,
|
||||
grammar::v1::op::and_,
|
||||
grammar::v1::op::equals,
|
||||
grammar::v1::op::not_equals,
|
||||
grammar::v1::op::less_eq,
|
||||
grammar::v1::op::greater_eq,
|
||||
grammar::v1::op::update,
|
||||
grammar::v1::op::concat,
|
||||
grammar::v1::op::less,
|
||||
grammar::v1::op::greater,
|
||||
grammar::v1::op::plus,
|
||||
grammar::v1::op::minus,
|
||||
grammar::v1::op::mul,
|
||||
grammar::v1::op::div,
|
||||
grammar::v1::op::pipe_right,
|
||||
grammar::v1::op::pipe_left,
|
||||
has_attr
|
||||
> op;
|
||||
};
|
||||
|
@ -676,7 +696,7 @@ struct operator_semantics {
|
|||
auto & [ctx, precedence, kind, op] = ops.back();
|
||||
// NOTE this relies on associativity not being mixed within a precedence level.
|
||||
if ((precedence > toPrecedence)
|
||||
|| (kind != grammar::op::kind::leftAssoc && precedence == toPrecedence))
|
||||
|| (kind != grammar::v1::op::kind::leftAssoc && precedence == toPrecedence))
|
||||
break;
|
||||
std::visit([&, ctx=std::move(ctx)] (auto & op) {
|
||||
exprs.push_back(static_cast<Self &>(*this).applyOp(ctx, op, args...));
|
||||
|
@ -694,9 +714,9 @@ struct operator_semantics {
|
|||
|
||||
void pushOp(OpCtx ctx, auto o, auto &... args)
|
||||
{
|
||||
if (o.kind != grammar::op::kind::unary)
|
||||
if (o.kind != grammar::v1::op::kind::unary)
|
||||
reduce(o.precedence, args...);
|
||||
if (!ops.empty() && o.kind == grammar::op::kind::nonAssoc) {
|
||||
if (!ops.empty() && o.kind == grammar::v1::op::kind::nonAssoc) {
|
||||
auto & [_pos, _prec, _kind, _o] = ops.back();
|
||||
if (_kind == o.kind && _prec == o.precedence)
|
||||
Self::badOperator(ctx, args...);
|
||||
|
|
863
src/libexpr/parser/parser-impl1.inc.cc
Normal file
863
src/libexpr/parser/parser-impl1.inc.cc
Normal file
|
@ -0,0 +1,863 @@
|
|||
// flip this define when doing parser development to enable some g checks.
|
||||
#if 0
|
||||
#include <tao/pegtl/contrib/analyze.hpp>
|
||||
#define ANALYZE_GRAMMAR \
|
||||
([] { \
|
||||
const std::size_t issues = tao::pegtl::analyze<grammar::v1::root>(); \
|
||||
assert(issues == 0); \
|
||||
})()
|
||||
#else
|
||||
#define ANALYZE_GRAMMAR ((void) 0)
|
||||
#endif
|
||||
|
||||
namespace p = tao::pegtl;
|
||||
|
||||
namespace nix::parser::v1 {
|
||||
namespace {
|
||||
|
||||
template<typename>
|
||||
inline constexpr const char * error_message = nullptr;
|
||||
|
||||
#define error_message_for(...) \
|
||||
template<> inline constexpr auto error_message<__VA_ARGS__>
|
||||
|
||||
error_message_for(p::one<'{'>) = "expecting '{'";
|
||||
error_message_for(p::one<'}'>) = "expecting '}'";
|
||||
error_message_for(p::one<'"'>) = "expecting '\"'";
|
||||
error_message_for(p::one<';'>) = "expecting ';'";
|
||||
error_message_for(p::one<')'>) = "expecting ')'";
|
||||
error_message_for(p::one<']'>) = "expecting ']'";
|
||||
error_message_for(p::one<':'>) = "expecting ':'";
|
||||
error_message_for(p::string<'\'', '\''>) = "expecting \"''\"";
|
||||
error_message_for(p::any) = "expecting any character";
|
||||
error_message_for(grammar::v1::eof) = "expecting end of file";
|
||||
error_message_for(grammar::v1::seps) = "expecting separators";
|
||||
error_message_for(grammar::v1::path::forbid_prefix_triple_slash) = "too many slashes in path";
|
||||
error_message_for(grammar::v1::path::forbid_prefix_double_slash_no_interp) = "path has a trailing slash";
|
||||
error_message_for(grammar::v1::expr) = "expecting expression";
|
||||
error_message_for(grammar::v1::expr::unary) = "expecting expression";
|
||||
error_message_for(grammar::v1::binding::equal) = "expecting '='";
|
||||
error_message_for(grammar::v1::expr::lambda::arg) = "expecting identifier";
|
||||
error_message_for(grammar::v1::formals) = "expecting formals";
|
||||
error_message_for(grammar::v1::attrpath) = "expecting attribute path";
|
||||
error_message_for(grammar::v1::expr::select) = "expecting selection expression";
|
||||
error_message_for(grammar::v1::t::kw_then) = "expecting 'then'";
|
||||
error_message_for(grammar::v1::t::kw_else) = "expecting 'else'";
|
||||
error_message_for(grammar::v1::t::kw_in) = "expecting 'in'";
|
||||
|
||||
struct SyntaxErrors
|
||||
{
|
||||
template<typename Rule>
|
||||
static constexpr auto message = error_message<Rule>;
|
||||
|
||||
template<typename Rule>
|
||||
static constexpr bool raise_on_failure = false;
|
||||
};
|
||||
|
||||
template<typename Rule>
|
||||
struct Control : p::must_if<SyntaxErrors>::control<Rule>
|
||||
{
|
||||
template<typename ParseInput, typename... States>
|
||||
[[noreturn]] static void raise(const ParseInput & in, States &&... st)
|
||||
{
|
||||
if (in.empty()) {
|
||||
std::string expected;
|
||||
if constexpr (constexpr auto msg = error_message<Rule>)
|
||||
expected = fmt(", %s", msg);
|
||||
throw p::parse_error("unexpected end of file" + expected, in);
|
||||
}
|
||||
p::must_if<SyntaxErrors>::control<Rule>::raise(in, st...);
|
||||
}
|
||||
};
|
||||
|
||||
struct ExprState
|
||||
: grammar::v1::
|
||||
operator_semantics<ExprState, PosIdx, AttrPath, std::pair<PosIdx, std::unique_ptr<Expr>>>
|
||||
{
|
||||
std::unique_ptr<Expr> popExprOnly() {
|
||||
return std::move(popExpr().second);
|
||||
}
|
||||
|
||||
template<typename Op, typename... Args>
|
||||
std::unique_ptr<Expr> applyUnary(Args &&... args) {
|
||||
return std::make_unique<Op>(popExprOnly(), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template<typename Op>
|
||||
std::unique_ptr<Expr> applyBinary(PosIdx pos) {
|
||||
auto right = popExprOnly(), left = popExprOnly();
|
||||
return std::make_unique<Op>(pos, std::move(left), std::move(right));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> call(PosIdx pos, Symbol fn, bool flip = false)
|
||||
{
|
||||
std::vector<std::unique_ptr<Expr>> args(2);
|
||||
args[flip ? 0 : 1] = popExprOnly();
|
||||
args[flip ? 1 : 0] = popExprOnly();
|
||||
return std::make_unique<ExprCall>(pos, std::make_unique<ExprVar>(fn), std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> pipe(PosIdx pos, State & state, bool flip = false)
|
||||
{
|
||||
if (!state.featureSettings.isEnabled(Xp::PipeOperator))
|
||||
throw ParseError({
|
||||
.msg = HintFmt("Pipe operator is disabled"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
// Reverse the order compared to normal function application: arg |> fn
|
||||
std::unique_ptr<Expr> fn, arg;
|
||||
if (flip) {
|
||||
fn = popExprOnly();
|
||||
arg = popExprOnly();
|
||||
} else {
|
||||
arg = popExprOnly();
|
||||
fn = popExprOnly();
|
||||
}
|
||||
std::vector<std::unique_ptr<Expr>> args{1};
|
||||
args[0] = std::move(arg);
|
||||
|
||||
return std::make_unique<ExprCall>(pos, std::move(fn), std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> order(PosIdx pos, bool less, State & state)
|
||||
{
|
||||
return call(pos, state.s.lessThan, !less);
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> concatStrings(PosIdx pos)
|
||||
{
|
||||
std::vector<std::pair<PosIdx, std::unique_ptr<Expr>>> args(2);
|
||||
args[1] = popExpr();
|
||||
args[0] = popExpr();
|
||||
return std::make_unique<ExprConcatStrings>(pos, false, std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> negate(PosIdx pos, State & state)
|
||||
{
|
||||
std::vector<std::unique_ptr<Expr>> args(2);
|
||||
args[0] = std::make_unique<ExprInt>(0);
|
||||
args[1] = popExprOnly();
|
||||
return std::make_unique<ExprCall>(pos, std::make_unique<ExprVar>(state.s.sub), std::move(args));
|
||||
}
|
||||
|
||||
std::pair<PosIdx, std::unique_ptr<Expr>> applyOp(PosIdx pos, auto & op, State & state) {
|
||||
using Op = grammar::v1::op;
|
||||
|
||||
auto not_ = [] (auto e) {
|
||||
return std::make_unique<ExprOpNot>(std::move(e));
|
||||
};
|
||||
|
||||
return {
|
||||
pos,
|
||||
(overloaded {
|
||||
[&] (Op::implies) { return applyBinary<ExprOpImpl>(pos); },
|
||||
[&] (Op::or_) { return applyBinary<ExprOpOr>(pos); },
|
||||
[&] (Op::and_) { return applyBinary<ExprOpAnd>(pos); },
|
||||
[&] (Op::equals) { return applyBinary<ExprOpEq>(pos); },
|
||||
[&] (Op::not_equals) { return applyBinary<ExprOpNEq>(pos); },
|
||||
[&] (Op::less) { return order(pos, true, state); },
|
||||
[&] (Op::greater_eq) { return not_(order(pos, true, state)); },
|
||||
[&] (Op::greater) { return order(pos, false, state); },
|
||||
[&] (Op::less_eq) { return not_(order(pos, false, state)); },
|
||||
[&] (Op::update) { return applyBinary<ExprOpUpdate>(pos); },
|
||||
[&] (Op::not_) { return applyUnary<ExprOpNot>(); },
|
||||
[&] (Op::plus) { return concatStrings(pos); },
|
||||
[&] (Op::minus) { return call(pos, state.s.sub); },
|
||||
[&] (Op::mul) { return call(pos, state.s.mul); },
|
||||
[&] (Op::div) { return call(pos, state.s.div); },
|
||||
[&] (Op::concat) { return applyBinary<ExprOpConcatLists>(pos); },
|
||||
[&] (has_attr & a) { return applyUnary<ExprOpHasAttr>(std::move(a.path)); },
|
||||
[&] (Op::unary_minus) { return negate(pos, state); },
|
||||
[&] (Op::pipe_right) { return pipe(pos, state, true); },
|
||||
[&] (Op::pipe_left) { return pipe(pos, state); },
|
||||
})(op)
|
||||
};
|
||||
}
|
||||
|
||||
// always_inline is needed, otherwise pushOp slows down considerably
|
||||
[[noreturn, gnu::always_inline]]
|
||||
static void badOperator(PosIdx pos, State & state)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("syntax error, unexpected operator"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
template<typename Expr, typename... Args>
|
||||
Expr & pushExpr(PosIdx pos, Args && ... args)
|
||||
{
|
||||
auto p = std::make_unique<Expr>(std::forward<Args>(args)...);
|
||||
auto & result = *p;
|
||||
exprs.emplace_back(pos, std::move(p));
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
struct SubexprState {
|
||||
private:
|
||||
ExprState * up;
|
||||
|
||||
public:
|
||||
explicit SubexprState(ExprState & up, auto &...) : up(&up) {}
|
||||
operator ExprState &() { return *up; }
|
||||
ExprState * operator->() { return up; }
|
||||
};
|
||||
|
||||
|
||||
|
||||
template<typename Rule>
|
||||
struct BuildAST : grammar::v1::nothing<Rule> {};
|
||||
|
||||
struct LambdaState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
Symbol arg;
|
||||
std::unique_ptr<Formals> formals;
|
||||
};
|
||||
|
||||
struct FormalsState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
Formals formals{};
|
||||
Formal formal{};
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::formal::name> {
|
||||
static void apply(const auto & in, FormalsState & s, State & ps) {
|
||||
s.formal = {
|
||||
.pos = ps.at(in),
|
||||
.name = ps.symbols.create(in.string_view()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::formal> {
|
||||
static void apply0(FormalsState & s, State &) {
|
||||
s.formals.formals.emplace_back(std::move(s.formal));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::formal::default_value> {
|
||||
static void apply0(FormalsState & s, State & ps) {
|
||||
s.formal.def = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::formals::ellipsis> {
|
||||
static void apply0(FormalsState & s, State &) {
|
||||
s.formals.ellipsis = true;
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::formals> : change_head<FormalsState> {
|
||||
static void success0(FormalsState & f, LambdaState & s, State &) {
|
||||
s.formals = std::make_unique<Formals>(std::move(f.formals));
|
||||
}
|
||||
};
|
||||
|
||||
struct AttrState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<AttrName> attrs;
|
||||
|
||||
template <typename T>
|
||||
void pushAttr(T && attr, PosIdx) { attrs.emplace_back(std::forward<T>(attr)); }
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::attr::simple> {
|
||||
static void apply(const auto & in, auto & s, State & ps) {
|
||||
s.pushAttr(ps.symbols.create(in.string_view()), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::attr::string> {
|
||||
static void apply(const auto & in, auto & s, State & ps) {
|
||||
auto e = s->popExprOnly();
|
||||
if (auto str = dynamic_cast<ExprString *>(e.get()))
|
||||
s.pushAttr(ps.symbols.create(str->s), ps.at(in));
|
||||
else
|
||||
s.pushAttr(std::move(e), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::attr::expr> : BuildAST<grammar::v1::attr::string> {};
|
||||
|
||||
struct BindingsState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
ExprAttrs attrs;
|
||||
AttrPath path;
|
||||
std::unique_ptr<Expr> value;
|
||||
};
|
||||
|
||||
struct InheritState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<std::pair<AttrName, PosIdx>> attrs;
|
||||
std::unique_ptr<Expr> from;
|
||||
PosIdx fromPos;
|
||||
|
||||
template <typename T>
|
||||
void pushAttr(T && attr, PosIdx pos) { attrs.emplace_back(std::forward<T>(attr), pos); }
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::inherit::from> {
|
||||
static void apply(const auto & in, InheritState & s, State & ps) {
|
||||
s.from = s->popExprOnly();
|
||||
s.fromPos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::inherit> : change_head<InheritState> {
|
||||
static void success0(InheritState & s, BindingsState & b, State & ps) {
|
||||
auto & attrs = b.attrs.attrs;
|
||||
// TODO this should not reuse generic attrpath rules.
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (i.symbol)
|
||||
continue;
|
||||
if (auto str = dynamic_cast<ExprString *>(i.expr.get()))
|
||||
i = AttrName(ps.symbols.create(str->s));
|
||||
else {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("dynamic attributes not allowed in inherit"),
|
||||
.pos = ps.positions[iPos]
|
||||
});
|
||||
}
|
||||
}
|
||||
if (s.from != nullptr) {
|
||||
if (!b.attrs.inheritFromExprs)
|
||||
b.attrs.inheritFromExprs = std::make_unique<std::vector<ref<Expr>>>();
|
||||
auto fromExpr = ref<Expr>(std::move(s.from));
|
||||
b.attrs.inheritFromExprs->push_back(fromExpr);
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (attrs.find(i.symbol) != attrs.end())
|
||||
ps.dupAttr(i.symbol, iPos, attrs[i.symbol].pos);
|
||||
auto inheritFrom = std::make_unique<ExprInheritFrom>(
|
||||
s.fromPos,
|
||||
b.attrs.inheritFromExprs->size() - 1,
|
||||
fromExpr
|
||||
);
|
||||
attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
std::make_unique<ExprSelect>(iPos, std::move(inheritFrom), i.symbol),
|
||||
iPos,
|
||||
ExprAttrs::AttrDef::Kind::InheritedFrom));
|
||||
}
|
||||
} else {
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (attrs.find(i.symbol) != attrs.end())
|
||||
ps.dupAttr(i.symbol, iPos, attrs[i.symbol].pos);
|
||||
attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
std::make_unique<ExprVar>(iPos, i.symbol),
|
||||
iPos,
|
||||
ExprAttrs::AttrDef::Kind::Inherited));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::binding::path> : change_head<AttrState> {
|
||||
static void success0(AttrState & a, BindingsState & s, State & ps) {
|
||||
s.path = std::move(a.attrs);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::binding::value> {
|
||||
static void apply0(BindingsState & s, State & ps) {
|
||||
s.value = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::binding> {
|
||||
static void apply(const auto & in, BindingsState & s, State & ps) {
|
||||
ps.addAttr(&s.attrs, std::move(s.path), std::move(s.value), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::id> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
if (in.string_view() == "__curPos")
|
||||
s.pushExpr<ExprPos>(ps.at(in), ps.at(in));
|
||||
else
|
||||
s.pushExpr<ExprVar>(ps.at(in), ps.at(in), ps.symbols.create(in.string_view()));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::int_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
int64_t v;
|
||||
if (std::from_chars(in.begin(), in.end(), v).ec != std::errc{}) {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("invalid integer '%1%'", in.string_view()),
|
||||
.pos = ps.positions[ps.at(in)],
|
||||
});
|
||||
}
|
||||
s.pushExpr<ExprInt>(noPos, v);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::float_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
// copy the input into a temporary string so we can call stod.
|
||||
// can't use from_chars because libc++ (thus darwin) does not have it,
|
||||
// and floats are not performance-sensitive anyway. if they were you'd
|
||||
// be in much bigger trouble than this.
|
||||
//
|
||||
// we also get to do a locale-save dance because stod is locale-aware and
|
||||
// something (a plugin?) may have called setlocale or uselocale.
|
||||
static struct locale_hack {
|
||||
locale_t posix;
|
||||
locale_hack(): posix(newlocale(LC_ALL_MASK, "POSIX", 0))
|
||||
{
|
||||
if (posix == 0)
|
||||
throw SysError("could not get POSIX locale");
|
||||
}
|
||||
} locale;
|
||||
|
||||
auto tmp = in.string();
|
||||
double v = [&] {
|
||||
auto oldLocale = uselocale(locale.posix);
|
||||
Finally resetLocale([=] { uselocale(oldLocale); });
|
||||
try {
|
||||
return std::stod(tmp);
|
||||
} catch (...) {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("invalid float '%1%'", in.string_view()),
|
||||
.pos = ps.positions[ps.at(in)],
|
||||
});
|
||||
}
|
||||
}();
|
||||
s.pushExpr<ExprFloat>(noPos, v);
|
||||
}
|
||||
};
|
||||
|
||||
struct StringState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::string currentLiteral;
|
||||
PosIdx currentPos;
|
||||
std::vector<std::pair<nix::PosIdx, std::unique_ptr<Expr>>> parts;
|
||||
|
||||
void append(PosIdx pos, std::string_view s)
|
||||
{
|
||||
if (currentLiteral.empty())
|
||||
currentPos = pos;
|
||||
currentLiteral += s;
|
||||
}
|
||||
|
||||
// FIXME this truncates strings on NUL for compat with the old parser. ideally
|
||||
// we should use the decomposition the g gives us instead of iterating over
|
||||
// the entire string again.
|
||||
static void unescapeStr(std::string & str)
|
||||
{
|
||||
char * s = str.data();
|
||||
char * t = s;
|
||||
char c;
|
||||
while ((c = *s++)) {
|
||||
if (c == '\\') {
|
||||
c = *s++;
|
||||
if (c == 'n') *t = '\n';
|
||||
else if (c == 'r') *t = '\r';
|
||||
else if (c == 't') *t = '\t';
|
||||
else *t = c;
|
||||
}
|
||||
else if (c == '\r') {
|
||||
/* Normalise CR and CR/LF into LF. */
|
||||
*t = '\n';
|
||||
if (*s == '\n') s++; /* cr/lf */
|
||||
}
|
||||
else *t = c;
|
||||
t++;
|
||||
}
|
||||
str.resize(t - str.data());
|
||||
}
|
||||
|
||||
void endLiteral()
|
||||
{
|
||||
if (!currentLiteral.empty()) {
|
||||
unescapeStr(currentLiteral);
|
||||
parts.emplace_back(currentPos, std::make_unique<ExprString>(std::move(currentLiteral)));
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> finish()
|
||||
{
|
||||
if (parts.empty()) {
|
||||
unescapeStr(currentLiteral);
|
||||
return std::make_unique<ExprString>(std::move(currentLiteral));
|
||||
} else {
|
||||
endLiteral();
|
||||
auto pos = parts[0].first;
|
||||
return std::make_unique<ExprConcatStrings>(pos, true, std::move(parts));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Content> struct BuildAST<grammar::v1::string::literal<Content...>> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::string::cr_lf> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view()); // FIXME compat with old parser
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::string::interpolation> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.endLiteral();
|
||||
s.parts.emplace_back(ps.at(in), s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::string::escape> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), "\\"); // FIXME compat with old parser
|
||||
s.append(ps.at(in), in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::string> : change_head<StringState> {
|
||||
static void success0(StringState & s, ExprState & e, State &) {
|
||||
e.exprs.emplace_back(noPos, s.finish());
|
||||
}
|
||||
};
|
||||
|
||||
struct IndStringState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<IndStringLine> lines;
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::ind_string::line_start> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.lines.push_back(IndStringLine { in.string_view(), ps.at(in) });
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Content>
|
||||
struct BuildAST<grammar::v1::ind_string::literal<Content...>> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.lines.back().parts.emplace_back(ps.at(in), in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::ind_string::interpolation> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.lines.back().parts.emplace_back(ps.at(in), s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::ind_string::escape> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
switch (*in.begin()) {
|
||||
case 'n': s.lines.back().parts.emplace_back(ps.at(in), "\n"); break;
|
||||
case 'r': s.lines.back().parts.emplace_back(ps.at(in), "\r"); break;
|
||||
case 't': s.lines.back().parts.emplace_back(ps.at(in), "\t"); break;
|
||||
default: s.lines.back().parts.emplace_back(ps.at(in), in.string_view()); break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::ind_string::has_content> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.lines.back().hasContent = true;
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::ind_string> : change_head<IndStringState> {
|
||||
static void success(const auto & in, IndStringState & s, ExprState & e, State & ps) {
|
||||
e.exprs.emplace_back(noPos, ps.stripIndentation(ps.at(in), std::move(s.lines)));
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Content> struct BuildAST<grammar::v1::path::literal<Content...>> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view());
|
||||
s.endLiteral();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::path::interpolation> : BuildAST<grammar::v1::string::interpolation> {};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::path::anchor> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
Path path(absPath(in.string(), ps.basePath.path.abs()));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if (in.string_view().ends_with('/') && in.size() > 1)
|
||||
path += "/";
|
||||
s.parts.emplace_back(ps.at(in), new ExprPath(std::move(path)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::path::home_anchor> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
if (evalSettings.pureEval)
|
||||
throw Error("the path '%s' can not be resolved in pure mode", in.string_view());
|
||||
Path path(getHome() + in.string_view().substr(1));
|
||||
s.parts.emplace_back(ps.at(in), new ExprPath(std::move(path)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::path::searched_path> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
std::vector<std::unique_ptr<Expr>> args{2};
|
||||
args[0] = std::make_unique<ExprVar>(ps.s.nixPath);
|
||||
args[1] = std::make_unique<ExprString>(in.string());
|
||||
s.parts.emplace_back(
|
||||
ps.at(in),
|
||||
std::make_unique<ExprCall>(
|
||||
ps.at(in),
|
||||
std::make_unique<ExprVar>(ps.s.findFile),
|
||||
std::move(args)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::path> : change_head<StringState> {
|
||||
template<typename E>
|
||||
static void check_slash(PosIdx end, StringState & s, State & ps) {
|
||||
auto e = dynamic_cast<E *>(s.parts.back().second.get());
|
||||
if (!e || !e->s.ends_with('/'))
|
||||
return;
|
||||
if (s.parts.size() > 1 || e->s != "/")
|
||||
throw ParseError({
|
||||
.msg = HintFmt("path has a trailing slash"),
|
||||
.pos = ps.positions[end],
|
||||
});
|
||||
}
|
||||
|
||||
static void success(const auto & in, StringState & s, ExprState & e, State & ps) {
|
||||
s.endLiteral();
|
||||
check_slash<ExprPath>(ps.atEnd(in), s, ps);
|
||||
check_slash<ExprString>(ps.atEnd(in), s, ps);
|
||||
if (s.parts.size() == 1) {
|
||||
e.exprs.emplace_back(noPos, std::move(s.parts.back().second));
|
||||
} else {
|
||||
e.pushExpr<ExprConcatStrings>(ps.at(in), ps.at(in), false, std::move(s.parts));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// strings and paths sare handled fully by the grammar-level rule for now
|
||||
template<> struct BuildAST<grammar::v1::expr::string> : p::maybe_nothing {};
|
||||
template<> struct BuildAST<grammar::v1::expr::ind_string> : p::maybe_nothing {};
|
||||
template<> struct BuildAST<grammar::v1::expr::path> : p::maybe_nothing {};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::uri> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
bool URLLiterals = ps.featureSettings.isEnabled(Dep::UrlLiterals);
|
||||
if (!URLLiterals)
|
||||
throw ParseError({
|
||||
.msg = HintFmt("URL literals are deprecated, allow using them with %s", "--extra-deprecated-features url-literals"),
|
||||
.pos = ps.positions[ps.at(in)]
|
||||
});
|
||||
s.pushExpr<ExprString>(ps.at(in), in.string());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::ancient_let> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
// Added 2024-09-18. Turn into an error at some point in the future.
|
||||
// See the documentation on deprecated features for more details.
|
||||
if (!ps.featureSettings.isEnabled(Dep::AncientLet))
|
||||
warn(
|
||||
"%s found at %s. This feature is deprecated and will be removed in the future. Use %s to silence this warning.",
|
||||
"let {",
|
||||
ps.positions[ps.at(in)],
|
||||
"--extra-deprecated-features ancient-let"
|
||||
);
|
||||
|
||||
b.attrs.pos = ps.at(in);
|
||||
b.attrs.recursive = true;
|
||||
s.pushExpr<ExprSelect>(b.attrs.pos, b.attrs.pos, std::make_unique<ExprAttrs>(std::move(b.attrs)), ps.s.body);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::rec_set> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
// Before inserting new attrs, check for __override and throw an error
|
||||
// (the error will initially be a warning to ease migration)
|
||||
if (!featureSettings.isEnabled(Dep::RecSetOverrides) && b.attrs.attrs.contains(ps.s.overrides)) {
|
||||
ps.overridesFound(ps.at(in));
|
||||
}
|
||||
|
||||
b.attrs.pos = ps.at(in);
|
||||
b.attrs.recursive = true;
|
||||
s.pushExpr<ExprAttrs>(b.attrs.pos, std::move(b.attrs));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::set> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
b.attrs.pos = ps.at(in);
|
||||
s.pushExpr<ExprAttrs>(b.attrs.pos, std::move(b.attrs));
|
||||
}
|
||||
};
|
||||
|
||||
using ListState = std::vector<std::unique_ptr<Expr>>;
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::list> : change_head<ListState> {
|
||||
static void success(const auto & in, ListState & ls, ExprState & s, State & ps) {
|
||||
auto e = std::make_unique<ExprList>();
|
||||
e->elems = std::move(ls);
|
||||
s.exprs.emplace_back(ps.at(in), std::move(e));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::list::entry> : change_head<ExprState> {
|
||||
static void success0(ExprState & e, ListState & s, State & ps) {
|
||||
s.emplace_back(e.finish(ps).second);
|
||||
}
|
||||
};
|
||||
|
||||
struct SelectState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
PosIdx pos;
|
||||
ExprSelect * e = nullptr;
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::select::head> {
|
||||
static void apply(const auto & in, SelectState & s, State & ps) {
|
||||
s.pos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::select::attr> : change_head<AttrState> {
|
||||
static void success0(AttrState & a, SelectState & s, State &) {
|
||||
s.e = &s->pushExpr<ExprSelect>(s.pos, s.pos, s->popExprOnly(), std::move(a.attrs), nullptr);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::select::attr_or> {
|
||||
static void apply0(SelectState & s, State &) {
|
||||
s.e->def = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::select::as_app_or> {
|
||||
static void apply(const auto & in, SelectState & s, State & ps) {
|
||||
std::vector<std::unique_ptr<Expr>> args(1);
|
||||
args[0] = std::make_unique<ExprVar>(ps.at(in), ps.s.or_);
|
||||
s->pushExpr<ExprCall>(s.pos, s.pos, s->popExprOnly(), std::move(args));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::select> : change_head<SelectState> {
|
||||
static void success0(const auto &...) {}
|
||||
};
|
||||
|
||||
struct AppState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
PosIdx pos;
|
||||
ExprCall * e = nullptr;
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::app::select_or_fn> {
|
||||
static void apply(const auto & in, AppState & s, State & ps) {
|
||||
s.pos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::app::first_arg> {
|
||||
static void apply(auto & in, AppState & s, State & ps) {
|
||||
auto arg = s->popExprOnly(), fn = s->popExprOnly();
|
||||
if ((s.e = dynamic_cast<ExprCall *>(fn.get()))) {
|
||||
// TODO remove.
|
||||
// AST compat with old parser, semantics are the same.
|
||||
// this can happen on occasions such as `<p> <p>` or `a or b or`,
|
||||
// neither of which are super worth optimizing.
|
||||
s.e->args.push_back(std::move(arg));
|
||||
s->exprs.emplace_back(noPos, std::move(fn));
|
||||
} else {
|
||||
std::vector<std::unique_ptr<Expr>> args{1};
|
||||
args[0] = std::move(arg);
|
||||
s.e = &s->pushExpr<ExprCall>(s.pos, s.pos, std::move(fn), std::move(args));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::app::another_arg> {
|
||||
static void apply0(AppState & s, State & ps) {
|
||||
s.e->args.push_back(s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::app> : change_head<AppState> {
|
||||
static void success0(const auto &...) {}
|
||||
};
|
||||
|
||||
template<typename Op> struct BuildAST<grammar::v1::expr::operator_<Op>> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
s.pushOp(ps.at(in), Op{}, ps);
|
||||
}
|
||||
};
|
||||
template<> struct BuildAST<grammar::v1::expr::operator_<grammar::v1::op::has_attr>> : change_head<AttrState> {
|
||||
static void success(const auto & in, AttrState & a, ExprState & s, State & ps) {
|
||||
s.pushOp(ps.at(in), ExprState::has_attr{{}, std::move(a.attrs)}, ps);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::lambda::arg> {
|
||||
static void apply(const auto & in, LambdaState & s, State & ps) {
|
||||
s.arg = ps.symbols.create(in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::lambda> : change_head<LambdaState> {
|
||||
static void success(const auto & in, LambdaState & l, ExprState & s, State & ps) {
|
||||
if (l.formals)
|
||||
l.formals = ps.validateFormals(std::move(l.formals), ps.at(in), l.arg);
|
||||
s.pushExpr<ExprLambda>(ps.at(in), ps.at(in), l.arg, std::move(l.formals), l->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::assert_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto body = s.popExprOnly(), cond = s.popExprOnly();
|
||||
s.pushExpr<ExprAssert>(ps.at(in), ps.at(in), std::move(cond), std::move(body));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::with> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto body = s.popExprOnly(), scope = s.popExprOnly();
|
||||
s.pushExpr<ExprWith>(ps.at(in), ps.at(in), std::move(scope), std::move(body));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::let> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
if (!b.attrs.dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.msg = HintFmt("dynamic attributes not allowed in let"),
|
||||
.pos = ps.positions[ps.at(in)]
|
||||
});
|
||||
|
||||
s.pushExpr<ExprLet>(ps.at(in), std::make_unique<ExprAttrs>(std::move(b.attrs)), b->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr::if_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto else_ = s.popExprOnly(), then = s.popExprOnly(), cond = s.popExprOnly();
|
||||
s.pushExpr<ExprIf>(ps.at(in), ps.at(in), std::move(cond), std::move(then), std::move(else_));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::v1::expr> : change_head<ExprState> {
|
||||
static void success0(ExprState & inner, ExprState & outer, State & ps) {
|
||||
outer.exprs.push_back(inner.finish(ps));
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
}
|
|
@ -14,857 +14,11 @@
|
|||
#include <charconv>
|
||||
#include <memory>
|
||||
|
||||
// flip this define when doing parser development to enable some g checks.
|
||||
#if 0
|
||||
#include <tao/pegtl/contrib/analyze.hpp>
|
||||
#define ANALYZE_GRAMMAR \
|
||||
([] { \
|
||||
const std::size_t issues = tao::pegtl::analyze<grammar::root>(); \
|
||||
assert(issues == 0); \
|
||||
})()
|
||||
#else
|
||||
#define ANALYZE_GRAMMAR ((void) 0)
|
||||
#endif
|
||||
|
||||
namespace p = tao::pegtl;
|
||||
|
||||
namespace nix::parser {
|
||||
namespace {
|
||||
|
||||
template<typename>
|
||||
inline constexpr const char * error_message = nullptr;
|
||||
|
||||
#define error_message_for(...) \
|
||||
template<> inline constexpr auto error_message<__VA_ARGS__>
|
||||
|
||||
error_message_for(p::one<'{'>) = "expecting '{'";
|
||||
error_message_for(p::one<'}'>) = "expecting '}'";
|
||||
error_message_for(p::one<'"'>) = "expecting '\"'";
|
||||
error_message_for(p::one<';'>) = "expecting ';'";
|
||||
error_message_for(p::one<')'>) = "expecting ')'";
|
||||
error_message_for(p::one<']'>) = "expecting ']'";
|
||||
error_message_for(p::one<':'>) = "expecting ':'";
|
||||
error_message_for(p::string<'\'', '\''>) = "expecting \"''\"";
|
||||
error_message_for(p::any) = "expecting any character";
|
||||
error_message_for(grammar::eof) = "expecting end of file";
|
||||
error_message_for(grammar::seps) = "expecting separators";
|
||||
error_message_for(grammar::path::forbid_prefix_triple_slash) = "too many slashes in path";
|
||||
error_message_for(grammar::path::forbid_prefix_double_slash_no_interp) = "path has a trailing slash";
|
||||
error_message_for(grammar::expr) = "expecting expression";
|
||||
error_message_for(grammar::expr::unary) = "expecting expression";
|
||||
error_message_for(grammar::binding::equal) = "expecting '='";
|
||||
error_message_for(grammar::expr::lambda::arg) = "expecting identifier";
|
||||
error_message_for(grammar::formals) = "expecting formals";
|
||||
error_message_for(grammar::attrpath) = "expecting attribute path";
|
||||
error_message_for(grammar::expr::select) = "expecting selection expression";
|
||||
error_message_for(grammar::t::kw_then) = "expecting 'then'";
|
||||
error_message_for(grammar::t::kw_else) = "expecting 'else'";
|
||||
error_message_for(grammar::t::kw_in) = "expecting 'in'";
|
||||
|
||||
struct SyntaxErrors
|
||||
{
|
||||
template<typename Rule>
|
||||
static constexpr auto message = error_message<Rule>;
|
||||
|
||||
template<typename Rule>
|
||||
static constexpr bool raise_on_failure = false;
|
||||
};
|
||||
|
||||
template<typename Rule>
|
||||
struct Control : p::must_if<SyntaxErrors>::control<Rule>
|
||||
{
|
||||
template<typename ParseInput, typename... States>
|
||||
[[noreturn]] static void raise(const ParseInput & in, States &&... st)
|
||||
{
|
||||
if (in.empty()) {
|
||||
std::string expected;
|
||||
if constexpr (constexpr auto msg = error_message<Rule>)
|
||||
expected = fmt(", %s", msg);
|
||||
throw p::parse_error("unexpected end of file" + expected, in);
|
||||
}
|
||||
p::must_if<SyntaxErrors>::control<Rule>::raise(in, st...);
|
||||
}
|
||||
};
|
||||
|
||||
struct ExprState
|
||||
: grammar::
|
||||
operator_semantics<ExprState, PosIdx, AttrPath, std::pair<PosIdx, std::unique_ptr<Expr>>>
|
||||
{
|
||||
std::unique_ptr<Expr> popExprOnly() {
|
||||
return std::move(popExpr().second);
|
||||
}
|
||||
|
||||
template<typename Op, typename... Args>
|
||||
std::unique_ptr<Expr> applyUnary(Args &&... args) {
|
||||
return std::make_unique<Op>(popExprOnly(), std::forward<Args>(args)...);
|
||||
}
|
||||
|
||||
template<typename Op>
|
||||
std::unique_ptr<Expr> applyBinary(PosIdx pos) {
|
||||
auto right = popExprOnly(), left = popExprOnly();
|
||||
return std::make_unique<Op>(pos, std::move(left), std::move(right));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> call(PosIdx pos, Symbol fn, bool flip = false)
|
||||
{
|
||||
std::vector<std::unique_ptr<Expr>> args(2);
|
||||
args[flip ? 0 : 1] = popExprOnly();
|
||||
args[flip ? 1 : 0] = popExprOnly();
|
||||
return std::make_unique<ExprCall>(pos, std::make_unique<ExprVar>(fn), std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> pipe(PosIdx pos, State & state, bool flip = false)
|
||||
{
|
||||
if (!state.featureSettings.isEnabled(Xp::PipeOperator))
|
||||
throw ParseError({
|
||||
.msg = HintFmt("Pipe operator is disabled"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
// Reverse the order compared to normal function application: arg |> fn
|
||||
std::unique_ptr<Expr> fn, arg;
|
||||
if (flip) {
|
||||
fn = popExprOnly();
|
||||
arg = popExprOnly();
|
||||
} else {
|
||||
arg = popExprOnly();
|
||||
fn = popExprOnly();
|
||||
}
|
||||
std::vector<std::unique_ptr<Expr>> args{1};
|
||||
args[0] = std::move(arg);
|
||||
|
||||
return std::make_unique<ExprCall>(pos, std::move(fn), std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> order(PosIdx pos, bool less, State & state)
|
||||
{
|
||||
return call(pos, state.s.lessThan, !less);
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> concatStrings(PosIdx pos)
|
||||
{
|
||||
std::vector<std::pair<PosIdx, std::unique_ptr<Expr>>> args(2);
|
||||
args[1] = popExpr();
|
||||
args[0] = popExpr();
|
||||
return std::make_unique<ExprConcatStrings>(pos, false, std::move(args));
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> negate(PosIdx pos, State & state)
|
||||
{
|
||||
std::vector<std::unique_ptr<Expr>> args(2);
|
||||
args[0] = std::make_unique<ExprInt>(0);
|
||||
args[1] = popExprOnly();
|
||||
return std::make_unique<ExprCall>(pos, std::make_unique<ExprVar>(state.s.sub), std::move(args));
|
||||
}
|
||||
|
||||
std::pair<PosIdx, std::unique_ptr<Expr>> applyOp(PosIdx pos, auto & op, State & state) {
|
||||
using Op = grammar::op;
|
||||
|
||||
auto not_ = [] (auto e) {
|
||||
return std::make_unique<ExprOpNot>(std::move(e));
|
||||
};
|
||||
|
||||
return {
|
||||
pos,
|
||||
(overloaded {
|
||||
[&] (Op::implies) { return applyBinary<ExprOpImpl>(pos); },
|
||||
[&] (Op::or_) { return applyBinary<ExprOpOr>(pos); },
|
||||
[&] (Op::and_) { return applyBinary<ExprOpAnd>(pos); },
|
||||
[&] (Op::equals) { return applyBinary<ExprOpEq>(pos); },
|
||||
[&] (Op::not_equals) { return applyBinary<ExprOpNEq>(pos); },
|
||||
[&] (Op::less) { return order(pos, true, state); },
|
||||
[&] (Op::greater_eq) { return not_(order(pos, true, state)); },
|
||||
[&] (Op::greater) { return order(pos, false, state); },
|
||||
[&] (Op::less_eq) { return not_(order(pos, false, state)); },
|
||||
[&] (Op::update) { return applyBinary<ExprOpUpdate>(pos); },
|
||||
[&] (Op::not_) { return applyUnary<ExprOpNot>(); },
|
||||
[&] (Op::plus) { return concatStrings(pos); },
|
||||
[&] (Op::minus) { return call(pos, state.s.sub); },
|
||||
[&] (Op::mul) { return call(pos, state.s.mul); },
|
||||
[&] (Op::div) { return call(pos, state.s.div); },
|
||||
[&] (Op::concat) { return applyBinary<ExprOpConcatLists>(pos); },
|
||||
[&] (has_attr & a) { return applyUnary<ExprOpHasAttr>(std::move(a.path)); },
|
||||
[&] (Op::unary_minus) { return negate(pos, state); },
|
||||
[&] (Op::pipe_right) { return pipe(pos, state, true); },
|
||||
[&] (Op::pipe_left) { return pipe(pos, state); },
|
||||
})(op)
|
||||
};
|
||||
}
|
||||
|
||||
// always_inline is needed, otherwise pushOp slows down considerably
|
||||
[[noreturn, gnu::always_inline]]
|
||||
static void badOperator(PosIdx pos, State & state)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("syntax error, unexpected operator"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
template<typename Expr, typename... Args>
|
||||
Expr & pushExpr(PosIdx pos, Args && ... args)
|
||||
{
|
||||
auto p = std::make_unique<Expr>(std::forward<Args>(args)...);
|
||||
auto & result = *p;
|
||||
exprs.emplace_back(pos, std::move(p));
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
struct SubexprState {
|
||||
private:
|
||||
ExprState * up;
|
||||
|
||||
public:
|
||||
explicit SubexprState(ExprState & up, auto &...) : up(&up) {}
|
||||
operator ExprState &() { return *up; }
|
||||
ExprState * operator->() { return up; }
|
||||
};
|
||||
|
||||
|
||||
|
||||
template<typename Rule>
|
||||
struct BuildAST : grammar::nothing<Rule> {};
|
||||
|
||||
struct LambdaState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
Symbol arg;
|
||||
std::unique_ptr<Formals> formals;
|
||||
};
|
||||
|
||||
struct FormalsState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
Formals formals{};
|
||||
Formal formal{};
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::formal::name> {
|
||||
static void apply(const auto & in, FormalsState & s, State & ps) {
|
||||
s.formal = {
|
||||
.pos = ps.at(in),
|
||||
.name = ps.symbols.create(in.string_view()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::formal> {
|
||||
static void apply0(FormalsState & s, State &) {
|
||||
s.formals.formals.emplace_back(std::move(s.formal));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::formal::default_value> {
|
||||
static void apply0(FormalsState & s, State & ps) {
|
||||
s.formal.def = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::formals::ellipsis> {
|
||||
static void apply0(FormalsState & s, State &) {
|
||||
s.formals.ellipsis = true;
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::formals> : change_head<FormalsState> {
|
||||
static void success0(FormalsState & f, LambdaState & s, State &) {
|
||||
s.formals = std::make_unique<Formals>(std::move(f.formals));
|
||||
}
|
||||
};
|
||||
|
||||
struct AttrState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<AttrName> attrs;
|
||||
|
||||
template <typename T>
|
||||
void pushAttr(T && attr, PosIdx) { attrs.emplace_back(std::forward<T>(attr)); }
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::attr::simple> {
|
||||
static void apply(const auto & in, auto & s, State & ps) {
|
||||
s.pushAttr(ps.symbols.create(in.string_view()), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::attr::string> {
|
||||
static void apply(const auto & in, auto & s, State & ps) {
|
||||
auto e = s->popExprOnly();
|
||||
if (auto str = dynamic_cast<ExprString *>(e.get()))
|
||||
s.pushAttr(ps.symbols.create(str->s), ps.at(in));
|
||||
else
|
||||
s.pushAttr(std::move(e), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::attr::expr> : BuildAST<grammar::attr::string> {};
|
||||
|
||||
struct BindingsState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
ExprAttrs attrs;
|
||||
AttrPath path;
|
||||
std::unique_ptr<Expr> value;
|
||||
};
|
||||
|
||||
struct InheritState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<std::pair<AttrName, PosIdx>> attrs;
|
||||
std::unique_ptr<Expr> from;
|
||||
PosIdx fromPos;
|
||||
|
||||
template <typename T>
|
||||
void pushAttr(T && attr, PosIdx pos) { attrs.emplace_back(std::forward<T>(attr), pos); }
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::inherit::from> {
|
||||
static void apply(const auto & in, InheritState & s, State & ps) {
|
||||
s.from = s->popExprOnly();
|
||||
s.fromPos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::inherit> : change_head<InheritState> {
|
||||
static void success0(InheritState & s, BindingsState & b, State & ps) {
|
||||
auto & attrs = b.attrs.attrs;
|
||||
// TODO this should not reuse generic attrpath rules.
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (i.symbol)
|
||||
continue;
|
||||
if (auto str = dynamic_cast<ExprString *>(i.expr.get()))
|
||||
i = AttrName(ps.symbols.create(str->s));
|
||||
else {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("dynamic attributes not allowed in inherit"),
|
||||
.pos = ps.positions[iPos]
|
||||
});
|
||||
}
|
||||
}
|
||||
if (s.from != nullptr) {
|
||||
if (!b.attrs.inheritFromExprs)
|
||||
b.attrs.inheritFromExprs = std::make_unique<std::vector<ref<Expr>>>();
|
||||
auto fromExpr = ref<Expr>(std::move(s.from));
|
||||
b.attrs.inheritFromExprs->push_back(fromExpr);
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (attrs.find(i.symbol) != attrs.end())
|
||||
ps.dupAttr(i.symbol, iPos, attrs[i.symbol].pos);
|
||||
auto inheritFrom = std::make_unique<ExprInheritFrom>(
|
||||
s.fromPos,
|
||||
b.attrs.inheritFromExprs->size() - 1,
|
||||
fromExpr
|
||||
);
|
||||
attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
std::make_unique<ExprSelect>(iPos, std::move(inheritFrom), i.symbol),
|
||||
iPos,
|
||||
ExprAttrs::AttrDef::Kind::InheritedFrom));
|
||||
}
|
||||
} else {
|
||||
for (auto & [i, iPos] : s.attrs) {
|
||||
if (attrs.find(i.symbol) != attrs.end())
|
||||
ps.dupAttr(i.symbol, iPos, attrs[i.symbol].pos);
|
||||
attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
std::make_unique<ExprVar>(iPos, i.symbol),
|
||||
iPos,
|
||||
ExprAttrs::AttrDef::Kind::Inherited));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::binding::path> : change_head<AttrState> {
|
||||
static void success0(AttrState & a, BindingsState & s, State & ps) {
|
||||
s.path = std::move(a.attrs);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::binding::value> {
|
||||
static void apply0(BindingsState & s, State & ps) {
|
||||
s.value = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::binding> {
|
||||
static void apply(const auto & in, BindingsState & s, State & ps) {
|
||||
ps.addAttr(&s.attrs, std::move(s.path), std::move(s.value), ps.at(in));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::id> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
if (in.string_view() == "__curPos")
|
||||
s.pushExpr<ExprPos>(ps.at(in), ps.at(in));
|
||||
else
|
||||
s.pushExpr<ExprVar>(ps.at(in), ps.at(in), ps.symbols.create(in.string_view()));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::int_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
int64_t v;
|
||||
if (std::from_chars(in.begin(), in.end(), v).ec != std::errc{}) {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("invalid integer '%1%'", in.string_view()),
|
||||
.pos = ps.positions[ps.at(in)],
|
||||
});
|
||||
}
|
||||
s.pushExpr<ExprInt>(noPos, v);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::float_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
// copy the input into a temporary string so we can call stod.
|
||||
// can't use from_chars because libc++ (thus darwin) does not have it,
|
||||
// and floats are not performance-sensitive anyway. if they were you'd
|
||||
// be in much bigger trouble than this.
|
||||
//
|
||||
// we also get to do a locale-save dance because stod is locale-aware and
|
||||
// something (a plugin?) may have called setlocale or uselocale.
|
||||
static struct locale_hack {
|
||||
locale_t posix;
|
||||
locale_hack(): posix(newlocale(LC_ALL_MASK, "POSIX", 0))
|
||||
{
|
||||
if (posix == 0)
|
||||
throw SysError("could not get POSIX locale");
|
||||
}
|
||||
} locale;
|
||||
|
||||
auto tmp = in.string();
|
||||
double v = [&] {
|
||||
auto oldLocale = uselocale(locale.posix);
|
||||
Finally resetLocale([=] { uselocale(oldLocale); });
|
||||
try {
|
||||
return std::stod(tmp);
|
||||
} catch (...) {
|
||||
throw ParseError({
|
||||
.msg = HintFmt("invalid float '%1%'", in.string_view()),
|
||||
.pos = ps.positions[ps.at(in)],
|
||||
});
|
||||
}
|
||||
}();
|
||||
s.pushExpr<ExprFloat>(noPos, v);
|
||||
}
|
||||
};
|
||||
|
||||
struct StringState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::string currentLiteral;
|
||||
PosIdx currentPos;
|
||||
std::vector<std::pair<nix::PosIdx, std::unique_ptr<Expr>>> parts;
|
||||
|
||||
void append(PosIdx pos, std::string_view s)
|
||||
{
|
||||
if (currentLiteral.empty())
|
||||
currentPos = pos;
|
||||
currentLiteral += s;
|
||||
}
|
||||
|
||||
// FIXME this truncates strings on NUL for compat with the old parser. ideally
|
||||
// we should use the decomposition the g gives us instead of iterating over
|
||||
// the entire string again.
|
||||
static void unescapeStr(std::string & str)
|
||||
{
|
||||
char * s = str.data();
|
||||
char * t = s;
|
||||
char c;
|
||||
while ((c = *s++)) {
|
||||
if (c == '\\') {
|
||||
c = *s++;
|
||||
if (c == 'n') *t = '\n';
|
||||
else if (c == 'r') *t = '\r';
|
||||
else if (c == 't') *t = '\t';
|
||||
else *t = c;
|
||||
}
|
||||
else if (c == '\r') {
|
||||
/* Normalise CR and CR/LF into LF. */
|
||||
*t = '\n';
|
||||
if (*s == '\n') s++; /* cr/lf */
|
||||
}
|
||||
else *t = c;
|
||||
t++;
|
||||
}
|
||||
str.resize(t - str.data());
|
||||
}
|
||||
|
||||
void endLiteral()
|
||||
{
|
||||
if (!currentLiteral.empty()) {
|
||||
unescapeStr(currentLiteral);
|
||||
parts.emplace_back(currentPos, std::make_unique<ExprString>(std::move(currentLiteral)));
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<Expr> finish()
|
||||
{
|
||||
if (parts.empty()) {
|
||||
unescapeStr(currentLiteral);
|
||||
return std::make_unique<ExprString>(std::move(currentLiteral));
|
||||
} else {
|
||||
endLiteral();
|
||||
auto pos = parts[0].first;
|
||||
return std::make_unique<ExprConcatStrings>(pos, true, std::move(parts));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Content> struct BuildAST<grammar::string::literal<Content...>> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::string::cr_lf> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view()); // FIXME compat with old parser
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::string::interpolation> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.endLiteral();
|
||||
s.parts.emplace_back(ps.at(in), s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::string::escape> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), "\\"); // FIXME compat with old parser
|
||||
s.append(ps.at(in), in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::string> : change_head<StringState> {
|
||||
static void success0(StringState & s, ExprState & e, State &) {
|
||||
e.exprs.emplace_back(noPos, s.finish());
|
||||
}
|
||||
};
|
||||
|
||||
struct IndStringState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
std::vector<std::pair<PosIdx, std::variant<std::unique_ptr<Expr>, StringToken>>> parts;
|
||||
};
|
||||
|
||||
template<bool Indented, typename... Content>
|
||||
struct BuildAST<grammar::ind_string::literal<Indented, Content...>> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.parts.emplace_back(ps.at(in), StringToken{in.string_view(), Indented});
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::ind_string::interpolation> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
s.parts.emplace_back(ps.at(in), s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::ind_string::escape> {
|
||||
static void apply(const auto & in, IndStringState & s, State & ps) {
|
||||
switch (*in.begin()) {
|
||||
case 'n': s.parts.emplace_back(ps.at(in), StringToken{"\n"}); break;
|
||||
case 'r': s.parts.emplace_back(ps.at(in), StringToken{"\r"}); break;
|
||||
case 't': s.parts.emplace_back(ps.at(in), StringToken{"\t"}); break;
|
||||
default: s.parts.emplace_back(ps.at(in), StringToken{in.string_view()}); break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::ind_string> : change_head<IndStringState> {
|
||||
static void success(const auto & in, IndStringState & s, ExprState & e, State & ps) {
|
||||
e.exprs.emplace_back(noPos, ps.stripIndentation(ps.at(in), std::move(s.parts)));
|
||||
}
|
||||
};
|
||||
|
||||
template<typename... Content> struct BuildAST<grammar::path::literal<Content...>> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
s.append(ps.at(in), in.string_view());
|
||||
s.endLiteral();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::path::interpolation> : BuildAST<grammar::string::interpolation> {};
|
||||
|
||||
template<> struct BuildAST<grammar::path::anchor> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
Path path(absPath(in.string(), ps.basePath.path.abs()));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if (in.string_view().ends_with('/') && in.size() > 1)
|
||||
path += "/";
|
||||
s.parts.emplace_back(ps.at(in), new ExprPath(std::move(path)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::path::home_anchor> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
if (evalSettings.pureEval)
|
||||
throw Error("the path '%s' can not be resolved in pure mode", in.string_view());
|
||||
Path path(getHome() + in.string_view().substr(1));
|
||||
s.parts.emplace_back(ps.at(in), new ExprPath(std::move(path)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::path::searched_path> {
|
||||
static void apply(const auto & in, StringState & s, State & ps) {
|
||||
std::vector<std::unique_ptr<Expr>> args{2};
|
||||
args[0] = std::make_unique<ExprVar>(ps.s.nixPath);
|
||||
args[1] = std::make_unique<ExprString>(in.string());
|
||||
s.parts.emplace_back(
|
||||
ps.at(in),
|
||||
std::make_unique<ExprCall>(
|
||||
ps.at(in),
|
||||
std::make_unique<ExprVar>(ps.s.findFile),
|
||||
std::move(args)));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::path> : change_head<StringState> {
|
||||
template<typename E>
|
||||
static void check_slash(PosIdx end, StringState & s, State & ps) {
|
||||
auto e = dynamic_cast<E *>(s.parts.back().second.get());
|
||||
if (!e || !e->s.ends_with('/'))
|
||||
return;
|
||||
if (s.parts.size() > 1 || e->s != "/")
|
||||
throw ParseError({
|
||||
.msg = HintFmt("path has a trailing slash"),
|
||||
.pos = ps.positions[end],
|
||||
});
|
||||
}
|
||||
|
||||
static void success(const auto & in, StringState & s, ExprState & e, State & ps) {
|
||||
s.endLiteral();
|
||||
check_slash<ExprPath>(ps.atEnd(in), s, ps);
|
||||
check_slash<ExprString>(ps.atEnd(in), s, ps);
|
||||
if (s.parts.size() == 1) {
|
||||
e.exprs.emplace_back(noPos, std::move(s.parts.back().second));
|
||||
} else {
|
||||
e.pushExpr<ExprConcatStrings>(ps.at(in), ps.at(in), false, std::move(s.parts));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// strings and paths sare handled fully by the grammar-level rule for now
|
||||
template<> struct BuildAST<grammar::expr::string> : p::maybe_nothing {};
|
||||
template<> struct BuildAST<grammar::expr::ind_string> : p::maybe_nothing {};
|
||||
template<> struct BuildAST<grammar::expr::path> : p::maybe_nothing {};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::uri> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
bool URLLiterals = ps.featureSettings.isEnabled(Dep::UrlLiterals);
|
||||
if (!URLLiterals)
|
||||
throw ParseError({
|
||||
.msg = HintFmt("URL literals are deprecated, allow using them with --extra-deprecated-features=url-literals"),
|
||||
.pos = ps.positions[ps.at(in)]
|
||||
});
|
||||
s.pushExpr<ExprString>(ps.at(in), in.string());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::ancient_let> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
// Added 2024-09-18. Turn into an error at some point in the future.
|
||||
// See the documentation on deprecated features for more details.
|
||||
if (!ps.featureSettings.isEnabled(Dep::AncientLet))
|
||||
warn(
|
||||
"%s found at %s. This feature is deprecated and will be removed in the future. Use %s to silence this warning.",
|
||||
"let {",
|
||||
ps.positions[ps.at(in)],
|
||||
"--extra-deprecated-features ancient-let"
|
||||
);
|
||||
|
||||
b.attrs.pos = ps.at(in);
|
||||
b.attrs.recursive = true;
|
||||
s.pushExpr<ExprSelect>(b.attrs.pos, b.attrs.pos, std::make_unique<ExprAttrs>(std::move(b.attrs)), ps.s.body);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::rec_set> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
// Before inserting new attrs, check for __override and throw an error
|
||||
// (the error will initially be a warning to ease migration)
|
||||
if (!featureSettings.isEnabled(Dep::RecSetOverrides) && b.attrs.attrs.contains(ps.s.overrides)) {
|
||||
ps.overridesFound(ps.at(in));
|
||||
}
|
||||
|
||||
b.attrs.pos = ps.at(in);
|
||||
b.attrs.recursive = true;
|
||||
s.pushExpr<ExprAttrs>(b.attrs.pos, std::move(b.attrs));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::set> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
b.attrs.pos = ps.at(in);
|
||||
s.pushExpr<ExprAttrs>(b.attrs.pos, std::move(b.attrs));
|
||||
}
|
||||
};
|
||||
|
||||
using ListState = std::vector<std::unique_ptr<Expr>>;
|
||||
|
||||
template<> struct BuildAST<grammar::expr::list> : change_head<ListState> {
|
||||
static void success(const auto & in, ListState & ls, ExprState & s, State & ps) {
|
||||
auto e = std::make_unique<ExprList>();
|
||||
e->elems = std::move(ls);
|
||||
s.exprs.emplace_back(ps.at(in), std::move(e));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::list::entry> : change_head<ExprState> {
|
||||
static void success0(ExprState & e, ListState & s, State & ps) {
|
||||
s.emplace_back(e.finish(ps).second);
|
||||
}
|
||||
};
|
||||
|
||||
struct SelectState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
PosIdx pos;
|
||||
ExprSelect * e = nullptr;
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::select::head> {
|
||||
static void apply(const auto & in, SelectState & s, State & ps) {
|
||||
s.pos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::select::attr> : change_head<AttrState> {
|
||||
static void success0(AttrState & a, SelectState & s, State &) {
|
||||
s.e = &s->pushExpr<ExprSelect>(s.pos, s.pos, s->popExprOnly(), std::move(a.attrs), nullptr);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::select::attr_or> {
|
||||
static void apply0(SelectState & s, State &) {
|
||||
s.e->def = s->popExprOnly();
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::select::as_app_or> {
|
||||
static void apply(const auto & in, SelectState & s, State & ps) {
|
||||
std::vector<std::unique_ptr<Expr>> args(1);
|
||||
args[0] = std::make_unique<ExprVar>(ps.at(in), ps.s.or_);
|
||||
s->pushExpr<ExprCall>(s.pos, s.pos, s->popExprOnly(), std::move(args));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::select> : change_head<SelectState> {
|
||||
static void success0(const auto &...) {}
|
||||
};
|
||||
|
||||
struct AppState : SubexprState {
|
||||
using SubexprState::SubexprState;
|
||||
|
||||
PosIdx pos;
|
||||
ExprCall * e = nullptr;
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::app::select_or_fn> {
|
||||
static void apply(const auto & in, AppState & s, State & ps) {
|
||||
s.pos = ps.at(in);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::app::first_arg> {
|
||||
static void apply(auto & in, AppState & s, State & ps) {
|
||||
auto arg = s->popExprOnly(), fn = s->popExprOnly();
|
||||
if ((s.e = dynamic_cast<ExprCall *>(fn.get()))) {
|
||||
// TODO remove.
|
||||
// AST compat with old parser, semantics are the same.
|
||||
// this can happen on occasions such as `<p> <p>` or `a or b or`,
|
||||
// neither of which are super worth optimizing.
|
||||
s.e->args.push_back(std::move(arg));
|
||||
s->exprs.emplace_back(noPos, std::move(fn));
|
||||
} else {
|
||||
std::vector<std::unique_ptr<Expr>> args{1};
|
||||
args[0] = std::move(arg);
|
||||
s.e = &s->pushExpr<ExprCall>(s.pos, s.pos, std::move(fn), std::move(args));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::app::another_arg> {
|
||||
static void apply0(AppState & s, State & ps) {
|
||||
s.e->args.push_back(s->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::app> : change_head<AppState> {
|
||||
static void success0(const auto &...) {}
|
||||
};
|
||||
|
||||
template<typename Op> struct BuildAST<grammar::expr::operator_<Op>> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
s.pushOp(ps.at(in), Op{}, ps);
|
||||
}
|
||||
};
|
||||
template<> struct BuildAST<grammar::expr::operator_<grammar::op::has_attr>> : change_head<AttrState> {
|
||||
static void success(const auto & in, AttrState & a, ExprState & s, State & ps) {
|
||||
s.pushOp(ps.at(in), ExprState::has_attr{{}, std::move(a.attrs)}, ps);
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::lambda::arg> {
|
||||
static void apply(const auto & in, LambdaState & s, State & ps) {
|
||||
s.arg = ps.symbols.create(in.string_view());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::lambda> : change_head<LambdaState> {
|
||||
static void success(const auto & in, LambdaState & l, ExprState & s, State & ps) {
|
||||
if (l.formals)
|
||||
l.formals = ps.validateFormals(std::move(l.formals), ps.at(in), l.arg);
|
||||
s.pushExpr<ExprLambda>(ps.at(in), ps.at(in), l.arg, std::move(l.formals), l->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::assert_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto body = s.popExprOnly(), cond = s.popExprOnly();
|
||||
s.pushExpr<ExprAssert>(ps.at(in), ps.at(in), std::move(cond), std::move(body));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::with> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto body = s.popExprOnly(), scope = s.popExprOnly();
|
||||
s.pushExpr<ExprWith>(ps.at(in), ps.at(in), std::move(scope), std::move(body));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::let> : change_head<BindingsState> {
|
||||
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
|
||||
if (!b.attrs.dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.msg = HintFmt("dynamic attributes not allowed in let"),
|
||||
.pos = ps.positions[ps.at(in)]
|
||||
});
|
||||
|
||||
s.pushExpr<ExprLet>(ps.at(in), std::make_unique<ExprAttrs>(std::move(b.attrs)), b->popExprOnly());
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr::if_> {
|
||||
static void apply(const auto & in, ExprState & s, State & ps) {
|
||||
auto else_ = s.popExprOnly(), then = s.popExprOnly(), cond = s.popExprOnly();
|
||||
s.pushExpr<ExprIf>(ps.at(in), ps.at(in), std::move(cond), std::move(then), std::move(else_));
|
||||
}
|
||||
};
|
||||
|
||||
template<> struct BuildAST<grammar::expr> : change_head<ExprState> {
|
||||
static void success0(ExprState & inner, ExprState & outer, State & ps) {
|
||||
outer.exprs.push_back(inner.finish(ps));
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
// Linter complains that this is a "suspicious include of file with '.cc' extension".
|
||||
// While that is correct and generally not great, it is one of the less bad options to pick
|
||||
// in terms of diff noise.
|
||||
// NOLINTNEXTLINE(bugprone-suspicious-include)
|
||||
#include "parser-impl1.inc.cc"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -884,7 +38,6 @@ Expr * EvalState::parse(
|
|||
exprSymbols,
|
||||
featureSettings,
|
||||
};
|
||||
parser::ExprState x;
|
||||
|
||||
assert(length >= 2);
|
||||
assert(text[length - 1] == 0);
|
||||
|
@ -893,7 +46,12 @@ Expr * EvalState::parse(
|
|||
|
||||
p::string_input<p::tracking_mode::lazy> inp{std::string_view{text, length}, "input"};
|
||||
try {
|
||||
p::parse<parser::grammar::root, parser::BuildAST, parser::Control>(inp, x, s);
|
||||
parser::v1::ExprState x;
|
||||
p::parse<parser::grammar::v1::root, parser::v1::BuildAST, parser::v1::Control>(inp, x, s);
|
||||
|
||||
auto [_pos, result] = x.finish(s);
|
||||
result->bindVars(*this, staticEnv);
|
||||
return result.release();
|
||||
} catch (p::parse_error & e) {
|
||||
auto pos = e.positions().back();
|
||||
throw ParseError({
|
||||
|
@ -901,10 +59,6 @@ Expr * EvalState::parse(
|
|||
.pos = positions[s.positions.add(s.origin, pos.byte)]
|
||||
});
|
||||
}
|
||||
|
||||
auto [_pos, result] = x.finish(s);
|
||||
result->bindVars(*this, staticEnv);
|
||||
return result.release();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -6,11 +6,21 @@
|
|||
|
||||
namespace nix::parser {
|
||||
|
||||
struct StringToken
|
||||
{
|
||||
std::string_view s;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return s; }
|
||||
struct IndStringLine {
|
||||
// String containing only the leading whitespace of the line. May be empty.
|
||||
std::string_view indentation;
|
||||
// Position of the line start (before the indentation)
|
||||
PosIdx pos;
|
||||
|
||||
// Whether the line contains anything besides indentation and line break
|
||||
bool hasContent = false;
|
||||
|
||||
std::vector<
|
||||
std::pair<
|
||||
PosIdx,
|
||||
std::variant<std::unique_ptr<Expr>, std::string_view>
|
||||
>
|
||||
> parts = {};
|
||||
};
|
||||
|
||||
struct State
|
||||
|
@ -27,8 +37,7 @@ struct State
|
|||
void overridesFound(const PosIdx pos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, std::unique_ptr<Expr> e, const PosIdx pos);
|
||||
std::unique_ptr<Formals> validateFormals(std::unique_ptr<Formals> formals, PosIdx pos = noPos, Symbol arg = {});
|
||||
std::unique_ptr<Expr> stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<std::unique_ptr<Expr>, StringToken>>> && es);
|
||||
std::unique_ptr<Expr> stripIndentation(const PosIdx pos, std::vector<IndStringLine> && line);
|
||||
|
||||
// lazy positioning means we don't get byte offsets directly, in.position() would work
|
||||
// but also requires line and column (which is expensive)
|
||||
|
@ -182,98 +191,87 @@ inline std::unique_ptr<Formals> State::validateFormals(std::unique_ptr<Formals>
|
|||
return formals;
|
||||
}
|
||||
|
||||
inline std::unique_ptr<Expr> State::stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<std::unique_ptr<Expr>, StringToken>>> && es)
|
||||
inline std::unique_ptr<Expr> State::stripIndentation(
|
||||
const PosIdx pos,
|
||||
std::vector<IndStringLine> && lines)
|
||||
{
|
||||
if (es.empty()) return std::make_unique<ExprString>("");
|
||||
/* If the only line is whitespace-only, directly return empty string.
|
||||
* The rest of the code relies on the final string not being empty.
|
||||
*/
|
||||
if (lines.size() == 1 && lines.front().parts.empty()) {
|
||||
return std::make_unique<ExprString>("");
|
||||
}
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
/* If the last line only contains whitespace, trim it to not cause excessive whitespace.
|
||||
* (Other whitespace-only lines get stripped only of the common indentation, and excess
|
||||
* whitespace becomes part of the string.)
|
||||
*/
|
||||
if (lines.back().parts.empty()) {
|
||||
lines.back().indentation = {};
|
||||
}
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only lines are not taken into account. */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < str->s.size(); ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (str->s[j] == ' ')
|
||||
curIndent++;
|
||||
else if (str->s[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (str->s[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
for (auto & line : lines) {
|
||||
if (line.hasContent) {
|
||||
minIndent = std::min(minIndent, line.indentation.size());
|
||||
}
|
||||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
std::vector<std::pair<PosIdx, std::unique_ptr<Expr>>> es2;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (std::unique_ptr<Expr> e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2.emplace_back(i->first, std::move(e));
|
||||
};
|
||||
const auto trimString = [&] (const StringToken t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.s.size(); ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.s[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.s[j];
|
||||
}
|
||||
else if (t.s[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.s[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += t.s[j];
|
||||
}
|
||||
} else {
|
||||
s2 += t.s[j];
|
||||
if (t.s[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2.emplace_back(i->first, std::make_unique<ExprString>(std::move(s2)));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, std::move(i->second));
|
||||
for (auto & line : lines) {
|
||||
line.indentation.remove_prefix(std::min(minIndent, line.indentation.size()));
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2.size() == 1 && dynamic_cast<ExprString *>(es2[0].second.get())) {
|
||||
return std::move(es2[0].second);
|
||||
/* Concat the parts together again */
|
||||
|
||||
std::vector<std::pair<PosIdx, std::unique_ptr<Expr>>> parts;
|
||||
/* Accumulator for merging intermediates */
|
||||
PosIdx merged_pos;
|
||||
std::string merged = "";
|
||||
|
||||
auto push_merged = [&] (PosIdx i_pos, std::string_view str) {
|
||||
if (merged.empty()) {
|
||||
merged_pos = i_pos;
|
||||
}
|
||||
merged += str;
|
||||
};
|
||||
|
||||
auto flush_merged = [&] () {
|
||||
if (!merged.empty()) {
|
||||
parts.emplace_back(merged_pos, std::make_unique<ExprString>(std::string(merged)));
|
||||
merged.clear();
|
||||
}
|
||||
};
|
||||
|
||||
for (auto && [li, line] : enumerate(lines)) {
|
||||
push_merged(line.pos, line.indentation);
|
||||
|
||||
for (auto & val : line.parts) {
|
||||
auto &[i_pos, item] = val;
|
||||
|
||||
std::visit(overloaded{
|
||||
[&](std::string_view str) {
|
||||
push_merged(i_pos, str);
|
||||
},
|
||||
[&](std::unique_ptr<Expr> expr) {
|
||||
flush_merged();
|
||||
parts.emplace_back(i_pos, std::move(expr));
|
||||
},
|
||||
}, std::move(item));
|
||||
}
|
||||
}
|
||||
return std::make_unique<ExprConcatStrings>(pos, true, std::move(es2));
|
||||
|
||||
flush_merged();
|
||||
|
||||
/* If this is a single string, then don't do a concatenation.
|
||||
* (If it's a single expression, still do the ConcatStrings to properly force it being a string.)
|
||||
*/
|
||||
if (parts.size() == 1 && dynamic_cast<ExprString *>(parts[0].second.get())) {
|
||||
return std::move(parts[0].second);
|
||||
}
|
||||
return std::make_unique<ExprConcatStrings>(pos, true, std::move(parts));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -394,7 +394,8 @@ static RegisterPrimOp primop_fetchGit({
|
|||
[Git reference]: https://git-scm.com/book/en/v2/Git-Internals-Git-References
|
||||
|
||||
By default, the `ref` value is prefixed with `refs/heads/`.
|
||||
As of 2.3.0, Nix will not prefix `refs/heads/` if `ref` starts with `refs/`.
|
||||
As of 2.3.0, Nix will not prefix `refs/heads/` if `ref` starts with `refs/` or
|
||||
if `ref` looks like a commit hash for backwards compatibility with CppNix 2.3.
|
||||
|
||||
- `submodules` (default: `false`)
|
||||
|
||||
|
|
|
@ -7,6 +7,32 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e)
|
||||
{
|
||||
if (e == AcceptFlakeConfig::False) {
|
||||
j = false;
|
||||
} else if (e == AcceptFlakeConfig::Ask) {
|
||||
j = "ask";
|
||||
} else if (e == AcceptFlakeConfig::True) {
|
||||
j = true;
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e)
|
||||
{
|
||||
if (j == false) {
|
||||
e = AcceptFlakeConfig::False;
|
||||
} else if (j == "ask") {
|
||||
e = AcceptFlakeConfig::Ask;
|
||||
} else if (j == true) {
|
||||
e = AcceptFlakeConfig::True;
|
||||
} else {
|
||||
throw Error("Invalid accept-flake-config value '%s'", std::string(j));
|
||||
}
|
||||
}
|
||||
|
||||
template<> AcceptFlakeConfig BaseSetting<AcceptFlakeConfig>::parse(const std::string & str, const ApplyConfigOptions & options) const
|
||||
{
|
||||
if (str == "true") return AcceptFlakeConfig::True;
|
||||
|
|
|
@ -13,6 +13,9 @@ namespace nix {
|
|||
|
||||
enum class AcceptFlakeConfig { False, Ask, True };
|
||||
|
||||
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e);
|
||||
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e);
|
||||
|
||||
struct FetchSettings : public Config
|
||||
{
|
||||
FetchSettings();
|
||||
|
|
|
@ -209,7 +209,7 @@ DownloadFileResult downloadFile(
|
|||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
Headers headers = {});
|
||||
|
||||
struct DownloadTarballResult
|
||||
{
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "error.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "cache.hh"
|
||||
#include "globals.hh"
|
||||
|
@ -257,6 +258,28 @@ std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, co
|
|||
}
|
||||
} // end namespace
|
||||
|
||||
static std::optional<Path> resolveRefToCachePath(
|
||||
Input & input,
|
||||
const Path & cacheDir,
|
||||
std::vector<Path> & gitRefFileCandidates,
|
||||
std::function<bool(const Path&)> condition)
|
||||
{
|
||||
if (input.getRef()->starts_with("refs/")) {
|
||||
Path fullpath = cacheDir + "/" + *input.getRef();
|
||||
if (condition(fullpath)) {
|
||||
return fullpath;
|
||||
}
|
||||
}
|
||||
|
||||
for (auto & candidate : gitRefFileCandidates) {
|
||||
if (condition(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
struct GitInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
|
||||
|
@ -539,10 +562,13 @@ struct GitInputScheme : InputScheme
|
|||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
|
||||
}
|
||||
|
||||
Path localRefFile =
|
||||
input.getRef()->compare(0, 5, "refs/") == 0
|
||||
? cacheDir + "/" + *input.getRef()
|
||||
: cacheDir + "/refs/heads/" + *input.getRef();
|
||||
std::vector<Path> gitRefFileCandidates;
|
||||
for (auto & infix : {"", "tags/", "heads/"}) {
|
||||
Path p = cacheDir + "/refs/" + infix + *input.getRef();
|
||||
gitRefFileCandidates.push_back(p);
|
||||
}
|
||||
|
||||
Path localRefFile;
|
||||
|
||||
bool doFetch;
|
||||
time_t now = time(0);
|
||||
|
@ -564,29 +590,70 @@ struct GitInputScheme : InputScheme
|
|||
if (allRefs) {
|
||||
doFetch = true;
|
||||
} else {
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
git fetch to update the local ref to the remote ref. */
|
||||
struct stat st;
|
||||
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
||||
!isCacheFileWithinTtl(now, st);
|
||||
std::function<bool(const Path&)> condition;
|
||||
condition = [&now](const Path & path) {
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
git fetch to update the local ref to the remote ref. */
|
||||
struct stat st;
|
||||
return stat(path.c_str(), &st) == 0 &&
|
||||
isCacheFileWithinTtl(now, st);
|
||||
};
|
||||
if (auto result = resolveRefToCachePath(
|
||||
input,
|
||||
cacheDir,
|
||||
gitRefFileCandidates,
|
||||
condition
|
||||
)) {
|
||||
localRefFile = *result;
|
||||
doFetch = false;
|
||||
} else {
|
||||
doFetch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// When having to fetch, we don't know `localRefFile` yet.
|
||||
// Because git needs to figure out what we're fetching
|
||||
// (i.e. is it a rev? a branch? a tag?)
|
||||
if (doFetch) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
auto ref = input.getRef();
|
||||
std::string fetchRef;
|
||||
if (allRefs) {
|
||||
fetchRef = "refs/*";
|
||||
} else if (
|
||||
ref->starts_with("refs/")
|
||||
|| *ref == "HEAD"
|
||||
|| std::regex_match(*ref, revRegex))
|
||||
{
|
||||
fetchRef = *ref;
|
||||
} else {
|
||||
fetchRef = "refs/*/" + *ref;
|
||||
}
|
||||
|
||||
try {
|
||||
auto ref = input.getRef();
|
||||
auto fetchRef = allRefs
|
||||
? "refs/*"
|
||||
: ref->compare(0, 5, "refs/") == 0
|
||||
? *ref
|
||||
: ref == "HEAD"
|
||||
? *ref
|
||||
: "refs/heads/" + *ref;
|
||||
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, true);
|
||||
Finally finally([&]() {
|
||||
if (auto p = resolveRefToCachePath(
|
||||
input,
|
||||
cacheDir,
|
||||
gitRefFileCandidates,
|
||||
pathExists
|
||||
)) {
|
||||
localRefFile = *p;
|
||||
}
|
||||
});
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
runProgram("git", true, {
|
||||
"-C", repoDir,
|
||||
"--git-dir", gitDir,
|
||||
"fetch",
|
||||
"--quiet",
|
||||
"--force",
|
||||
"--", actualUrl, fmt("%s:%s", fetchRef, fetchRef)
|
||||
}, true);
|
||||
} catch (Error & e) {
|
||||
if (!pathExists(localRefFile)) throw;
|
||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||
|
|
|
@ -15,7 +15,7 @@ DownloadFileResult downloadFile(
|
|||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
Headers headers)
|
||||
{
|
||||
// FIXME: check store
|
||||
|
||||
|
@ -40,13 +40,14 @@ DownloadFileResult downloadFile(
|
|||
if (cached && !cached->expired)
|
||||
return useCached();
|
||||
|
||||
FileTransferRequest request(url);
|
||||
request.headers = headers;
|
||||
if (cached)
|
||||
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
|
||||
headers.emplace_back("If-None-Match", getStrAttr(cached->infoAttrs, "etag"));
|
||||
FileTransferResult res;
|
||||
std::string data;
|
||||
try {
|
||||
res = getFileTransfer()->transfer(request);
|
||||
auto [meta, content] = getFileTransfer()->download(url, headers);
|
||||
res = std::move(meta);
|
||||
data = content->drain();
|
||||
} catch (FileTransferError & e) {
|
||||
if (cached) {
|
||||
warn("%s; using cached version", e.msg());
|
||||
|
@ -71,8 +72,8 @@ DownloadFileResult downloadFile(
|
|||
storePath = std::move(cached->storePath);
|
||||
} else {
|
||||
StringSink sink;
|
||||
sink << dumpString(res.data);
|
||||
auto hash = hashString(HashType::SHA256, res.data);
|
||||
sink << dumpString(data);
|
||||
auto hash = hashString(HashType::SHA256, data);
|
||||
ValidPathInfo info {
|
||||
*store,
|
||||
name,
|
||||
|
|
41
src/libmain/crash-handler.cc
Normal file
41
src/libmain/crash-handler.cc
Normal file
|
@ -0,0 +1,41 @@
|
|||
#include "crash-handler.hh"
|
||||
#include "fmt.hh"
|
||||
|
||||
#include <boost/core/demangle.hpp>
|
||||
#include <exception>
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace {
|
||||
void onTerminate()
|
||||
{
|
||||
std::cerr << "Lix crashed. This is a bug. We would appreciate if you report it along with what caused it at https://git.lix.systems/lix-project/lix/issues with the following information included:\n\n";
|
||||
try {
|
||||
std::exception_ptr eptr = std::current_exception();
|
||||
if (eptr) {
|
||||
std::rethrow_exception(eptr);
|
||||
} else {
|
||||
std::cerr << "std::terminate() called without exception\n";
|
||||
}
|
||||
} catch (const std::exception & ex) {
|
||||
std::cerr << "Exception: " << boost::core::demangle(typeid(ex).name()) << ": " << ex.what() << "\n";
|
||||
} catch (...) {
|
||||
std::cerr << "Unknown exception! Spooky.\n";
|
||||
}
|
||||
|
||||
std::cerr << "Stack trace:\n";
|
||||
nix::printStackTrace();
|
||||
|
||||
std::abort();
|
||||
}
|
||||
}
|
||||
|
||||
void registerCrashHandler()
|
||||
{
|
||||
// DO NOT use this for signals. Boost stacktrace is very much not
|
||||
// async-signal-safe, and in a world with ASLR, addr2line is pointless.
|
||||
//
|
||||
// If you want signals, set up a minidump system and do it out-of-process.
|
||||
std::set_terminate(onTerminate);
|
||||
}
|
||||
}
|
21
src/libmain/crash-handler.hh
Normal file
21
src/libmain/crash-handler.hh
Normal file
|
@ -0,0 +1,21 @@
|
|||
#pragma once
|
||||
/// @file Crash handler for Lix that prints back traces (hopefully in instances where it is not just going to crash the process itself).
|
||||
/*
|
||||
* Author's note: This will probably be partially/fully supplanted by a
|
||||
* minidump writer like the following once we get our act together on crashes a
|
||||
* little bit more:
|
||||
* https://github.com/rust-minidump/minidump-writer
|
||||
* https://github.com/EmbarkStudios/crash-handling
|
||||
* (out of process implementation *should* be able to be done on-demand)
|
||||
*
|
||||
* Such an out-of-process implementation could then both make minidumps and
|
||||
* print stack traces for arbitrarily messed-up process states such that we can
|
||||
* safely give out backtraces for SIGSEGV and other deadly signals.
|
||||
*/
|
||||
|
||||
namespace nix {
|
||||
|
||||
/** Registers the Lix crash handler for std::terminate (currently; will support more crashes later). See also detectStackOverflow(). */
|
||||
void registerCrashHandler();
|
||||
|
||||
}
|
|
@ -7,7 +7,7 @@ namespace nix {
|
|||
LogFormat defaultLogFormat = LogFormat::raw;
|
||||
|
||||
LogFormat parseLogFormat(const std::string & logFormatStr) {
|
||||
if (logFormatStr == "raw" || getEnv("NIX_GET_COMPLETIONS"))
|
||||
if (logFormatStr == "raw")
|
||||
return LogFormat::raw;
|
||||
else if (logFormatStr == "raw-with-logs")
|
||||
return LogFormat::rawWithLogs;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
libmain_sources = files(
|
||||
'common-args.cc',
|
||||
'crash-handler.cc',
|
||||
'loggers.cc',
|
||||
'progress-bar.cc',
|
||||
'shared.cc',
|
||||
|
@ -8,6 +9,7 @@ libmain_sources = files(
|
|||
|
||||
libmain_headers = files(
|
||||
'common-args.hh',
|
||||
'crash-handler.hh',
|
||||
'loggers.hh',
|
||||
'progress-bar.hh',
|
||||
'shared.hh',
|
||||
|
|
|
@ -92,7 +92,7 @@ void ProgressBar::resume()
|
|||
nextWakeup = draw(*state, {});
|
||||
state.wait_for(quitCV, std::chrono::milliseconds(50));
|
||||
}
|
||||
writeLogsToStderr("\r\e[K");
|
||||
eraseProgressDisplay(*state);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -558,7 +558,8 @@ std::optional<char> ProgressBar::ask(std::string_view msg)
|
|||
{
|
||||
auto state(state_.lock());
|
||||
if (state->paused > 0 || !isatty(STDIN_FILENO)) return {};
|
||||
std::cerr << fmt("\r\e[K%s ", msg);
|
||||
eraseProgressDisplay(*state);
|
||||
std::cerr << msg;
|
||||
auto s = trim(readLine(STDIN_FILENO));
|
||||
if (s.size() != 1) return {};
|
||||
draw(*state, {});
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "crash-handler.hh"
|
||||
#include "globals.hh"
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
|
@ -118,6 +119,8 @@ static void sigHandler(int signo) { }
|
|||
|
||||
void initNix()
|
||||
{
|
||||
registerCrashHandler();
|
||||
|
||||
/* Turn on buffering for cerr. */
|
||||
static char buf[1024];
|
||||
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
|
||||
|
@ -193,20 +196,20 @@ LegacyArgs::LegacyArgs(const std::string & programName,
|
|||
.longName = "keep-failed",
|
||||
.shortName ='K',
|
||||
.description = "Keep temporary directories of failed builds.",
|
||||
.handler = {&(bool&) settings.keepFailed, true},
|
||||
.handler = {[&]() { settings.keepFailed.override(true); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "keep-going",
|
||||
.shortName ='k',
|
||||
.description = "Keep going after a build fails.",
|
||||
.handler = {&(bool&) settings.keepGoing, true},
|
||||
.handler = {[&]() { settings.keepGoing.override(true); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "fallback",
|
||||
.description = "Build from source if substitution fails.",
|
||||
.handler = {&(bool&) settings.tryFallback, true},
|
||||
.handler = {[&]() { settings.tryFallback.override(true); }},
|
||||
});
|
||||
|
||||
auto intSettingAlias = [&](char shortName, const std::string & longName,
|
||||
|
@ -244,7 +247,7 @@ LegacyArgs::LegacyArgs(const std::string & programName,
|
|||
.longName = "store",
|
||||
.description = "The URL of the Nix store to use.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {&(std::string&) settings.storeUri},
|
||||
.handler = {[&](std::string storeUri) { settings.storeUri.override(storeUri); }},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -335,12 +338,15 @@ int handleExceptions(const std::string & programName, std::function<void()> fun)
|
|||
} catch (BaseError & e) {
|
||||
logError(e.info());
|
||||
return e.info().status;
|
||||
} catch (std::bad_alloc & e) {
|
||||
} catch (const std::bad_alloc & e) {
|
||||
printError(error + "out of memory");
|
||||
return 1;
|
||||
} catch (std::exception & e) {
|
||||
printError(error + e.what());
|
||||
return 1;
|
||||
} catch (const std::exception & e) {
|
||||
// Random exceptions bubbling into main are cause for bug reports, crash
|
||||
std::terminate();
|
||||
} catch (...) {
|
||||
// Explicitly do not tolerate non-std exceptions escaping.
|
||||
std::terminate();
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
@ -389,7 +395,7 @@ RunPager::~RunPager()
|
|||
pid.wait();
|
||||
}
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
ignoreExceptionInDestructor();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -111,7 +111,7 @@ struct PrintFreed
|
|||
|
||||
|
||||
/**
|
||||
* Install a SIGSEGV handler to detect stack overflows.
|
||||
* Install a SIGSEGV handler to detect stack overflows. See also registerCrashHandler().
|
||||
*/
|
||||
void detectStackOverflow();
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ struct BuildResult
|
|||
* @todo This should be an entire ErrorInfo object, not just a
|
||||
* string, for richer information.
|
||||
*/
|
||||
std::string errorMsg;
|
||||
std::string errorMsg = {};
|
||||
|
||||
std::string toString() const {
|
||||
auto strStatus = [&]() {
|
||||
|
@ -90,7 +90,7 @@ struct BuildResult
|
|||
* For derivations, a mapping from the names of the wanted outputs
|
||||
* to actual paths.
|
||||
*/
|
||||
SingleDrvOutputs builtOutputs;
|
||||
SingleDrvOutputs builtOutputs = {};
|
||||
|
||||
/**
|
||||
* The start/stop times of the build (or one of the rounds, if it
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void commonChildInit()
|
||||
void commonExecveingChildInit()
|
||||
{
|
||||
logger = makeSimpleLogger();
|
||||
|
||||
|
|
|
@ -4,8 +4,12 @@
|
|||
namespace nix {
|
||||
|
||||
/**
|
||||
* Common initialisation performed in child processes.
|
||||
* Common initialisation performed in child processes that are just going to
|
||||
* execve.
|
||||
*
|
||||
* These processes may not use ReceiveInterrupts as they do not have an
|
||||
* interrupt receiving thread.
|
||||
*/
|
||||
void commonChildInit();
|
||||
void commonExecveingChildInit();
|
||||
|
||||
}
|
||||
|
|
|
@ -11,7 +11,13 @@
|
|||
#include "drv-output-substitution-goal.hh"
|
||||
#include "strings.hh"
|
||||
|
||||
#include <boost/outcome/try.hpp>
|
||||
#include <fstream>
|
||||
#include <kj/array.h>
|
||||
#include <kj/async-unix.h>
|
||||
#include <kj/async.h>
|
||||
#include <kj/debug.h>
|
||||
#include <kj/vector.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
|
@ -65,7 +71,6 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
|||
, wantedOutputs(wantedOutputs)
|
||||
, buildMode(buildMode)
|
||||
{
|
||||
state = &DerivationGoal::getDerivation;
|
||||
name = fmt(
|
||||
"building of '%s' from .drv file",
|
||||
DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store));
|
||||
|
@ -85,7 +90,6 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation
|
|||
{
|
||||
this->drv = std::make_unique<Derivation>(drv);
|
||||
|
||||
state = &DerivationGoal::haveDerivation;
|
||||
name = fmt(
|
||||
"building of '%s' from in-memory derivation",
|
||||
DerivedPath::Built { makeConstantStorePathRef(drvPath), drv.outputNames() }.to_string(worker.store));
|
||||
|
@ -103,17 +107,7 @@ DerivationGoal::~DerivationGoal() noexcept(false)
|
|||
{
|
||||
/* Careful: we should never ever throw an exception from a
|
||||
destructor. */
|
||||
try { closeLogFile(); } catch (...) { ignoreException(); }
|
||||
}
|
||||
|
||||
|
||||
std::string DerivationGoal::key()
|
||||
{
|
||||
/* Ensure that derivations get built in order of their name,
|
||||
i.e. a derivation named "aardvark" always comes before
|
||||
"baboon". And substitution goals always happen before
|
||||
derivation goals (due to "b$"). */
|
||||
return "b$" + std::string(drvPath.name()) + "$" + worker.store.printStorePath(drvPath);
|
||||
try { closeLogFile(); } catch (...) { ignoreExceptionInDestructor(); }
|
||||
}
|
||||
|
||||
|
||||
|
@ -124,20 +118,32 @@ void DerivationGoal::killChild()
|
|||
}
|
||||
|
||||
|
||||
Goal::Finished DerivationGoal::timedOut(Error && ex)
|
||||
Goal::WorkResult DerivationGoal::timedOut(Error && ex)
|
||||
{
|
||||
killChild();
|
||||
return done(BuildResult::TimedOut, {}, std::move(ex));
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::work(bool inBuildSlot)
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::workImpl() noexcept
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
KJ_DEFER({
|
||||
act.reset();
|
||||
actLock.reset();
|
||||
builderActivities.clear();
|
||||
});
|
||||
|
||||
BOOST_OUTCOME_CO_TRY(auto result, co_await (useDerivation ? getDerivation() : haveDerivation()));
|
||||
result.storePath = drvPath;
|
||||
co_return result;
|
||||
}
|
||||
|
||||
void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||
bool DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
||||
{
|
||||
if (isDone) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto newWanted = wantedOutputs.union_(outputs);
|
||||
switch (needRestart) {
|
||||
case NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed:
|
||||
|
@ -154,32 +160,38 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
|||
break;
|
||||
};
|
||||
wantedOutputs = newWanted;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::getDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::getDerivation() noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
/* The first thing to do is to make sure that the derivation
|
||||
exists. If it doesn't, it may be created through a
|
||||
substitute. */
|
||||
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
|
||||
return loadDerivation(inBuildSlot);
|
||||
co_return co_await loadDerivation();
|
||||
}
|
||||
|
||||
|
||||
state = &DerivationGoal::loadDerivation;
|
||||
return WaitForGoals{{worker.goalFactory().makePathSubstitutionGoal(drvPath)}};
|
||||
(co_await waitForGoals(worker.goalFactory().makePathSubstitutionGoal(drvPath))).value();
|
||||
co_return co_await loadDerivation();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::loadDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::loadDerivation() noexcept
|
||||
try {
|
||||
trace("loading derivation");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
return done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
return {done(
|
||||
BuildResult::MiscFailure,
|
||||
{},
|
||||
Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
|
||||
)};
|
||||
}
|
||||
|
||||
/* `drvPath' should already be a root, but let's be on the safe
|
||||
|
@ -201,12 +213,14 @@ Goal::WorkResult DerivationGoal::loadDerivation(bool inBuildSlot)
|
|||
}
|
||||
assert(drv);
|
||||
|
||||
return haveDerivation(inBuildSlot);
|
||||
return haveDerivation();
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::haveDerivation() noexcept
|
||||
try {
|
||||
trace("have derivation");
|
||||
|
||||
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
|
||||
|
@ -233,7 +247,7 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
});
|
||||
}
|
||||
|
||||
return gaveUpOnSubstitution(inBuildSlot);
|
||||
co_return co_await gaveUpOnSubstitution();
|
||||
}
|
||||
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||
|
@ -255,19 +269,19 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
|
||||
/* If they are all valid, then we're done. */
|
||||
if (allValid && buildMode == bmNormal) {
|
||||
return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
}
|
||||
|
||||
/* We are first going to try to create the invalid output paths
|
||||
through substitutes. If that doesn't work, we'll build
|
||||
them. */
|
||||
WaitForGoals result;
|
||||
kj::Vector<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies;
|
||||
if (settings.useSubstitutes) {
|
||||
if (parsedDrv->substitutesAllowed()) {
|
||||
for (auto & [outputName, status] : initialOutputs) {
|
||||
if (!status.wanted) continue;
|
||||
if (!status.known)
|
||||
result.goals.insert(
|
||||
dependencies.add(
|
||||
worker.goalFactory().makeDrvOutputSubstitutionGoal(
|
||||
DrvOutput{status.outputHash, outputName},
|
||||
buildMode == bmRepair ? Repair : NoRepair
|
||||
|
@ -275,7 +289,7 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
);
|
||||
else {
|
||||
auto * cap = getDerivationCA(*drv);
|
||||
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(
|
||||
dependencies.add(worker.goalFactory().makePathSubstitutionGoal(
|
||||
status.known->path,
|
||||
buildMode == bmRepair ? Repair : NoRepair,
|
||||
cap ? std::optional { *cap } : std::nullopt));
|
||||
|
@ -286,24 +300,31 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
}
|
||||
}
|
||||
|
||||
if (result.goals.empty()) { /* to prevent hang (no wake-up event) */
|
||||
return outputsSubstitutionTried(inBuildSlot);
|
||||
} else {
|
||||
state = &DerivationGoal::outputsSubstitutionTried;
|
||||
return result;
|
||||
if (!dependencies.empty()) { /* to prevent hang (no wake-up event) */
|
||||
(co_await waitForGoals(dependencies.releaseAsArray())).value();
|
||||
}
|
||||
co_return co_await outputsSubstitutionTried();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::outputsSubstitutionTried() noexcept
|
||||
try {
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(drv->type().isPure());
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
return done(BuildResult::TransientFailure, {},
|
||||
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback)
|
||||
{
|
||||
return {done(
|
||||
BuildResult::TransientFailure,
|
||||
{},
|
||||
Error(
|
||||
"some substitutes for the outputs of derivation '%s' failed (usually happens due "
|
||||
"to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)
|
||||
)
|
||||
)};
|
||||
}
|
||||
|
||||
/* If the substitutes form an incomplete closure, then we should
|
||||
|
@ -337,13 +358,13 @@ Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
|||
|
||||
if (needRestart == NeedRestartForMoreOutputs::OutputsAddedDoNeed) {
|
||||
needRestart = NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed;
|
||||
return haveDerivation(inBuildSlot);
|
||||
return haveDerivation();
|
||||
}
|
||||
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
if (buildMode == bmNormal && allValid) {
|
||||
return done(BuildResult::Substituted, std::move(validOutputs));
|
||||
return {done(BuildResult::Substituted, std::move(validOutputs))};
|
||||
}
|
||||
if (buildMode == bmRepair && allValid) {
|
||||
return repairClosure();
|
||||
|
@ -353,15 +374,17 @@ Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
|||
worker.store.printStorePath(drvPath));
|
||||
|
||||
/* Nothing to wait for; tail call */
|
||||
return gaveUpOnSubstitution(inBuildSlot);
|
||||
return gaveUpOnSubstitution();
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
/* At least one of the output paths could not be
|
||||
produced using a substitute. So we have to build instead. */
|
||||
Goal::WorkResult DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot)
|
||||
{
|
||||
WaitForGoals result;
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::gaveUpOnSubstitution() noexcept
|
||||
try {
|
||||
kj::Vector<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies;
|
||||
|
||||
/* At this point we are building all outputs, so if more are wanted there
|
||||
is no need to restart. */
|
||||
|
@ -374,7 +397,7 @@ Goal::WorkResult DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot)
|
|||
|
||||
addWaiteeDerivedPath = [&](ref<SingleDerivedPath> inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
||||
if (!inputNode.value.empty())
|
||||
result.goals.insert(worker.goalFactory().makeGoal(
|
||||
dependencies.add(worker.goalFactory().makeGoal(
|
||||
DerivedPath::Built {
|
||||
.drvPath = inputDrv,
|
||||
.outputs = inputNode.value,
|
||||
|
@ -419,20 +442,20 @@ Goal::WorkResult DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot)
|
|||
if (!settings.useSubstitutes)
|
||||
throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled",
|
||||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
|
||||
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i));
|
||||
dependencies.add(worker.goalFactory().makePathSubstitutionGoal(i));
|
||||
}
|
||||
|
||||
if (result.goals.empty()) {/* to prevent hang (no wake-up event) */
|
||||
return inputsRealised(inBuildSlot);
|
||||
} else {
|
||||
state = &DerivationGoal::inputsRealised;
|
||||
return result;
|
||||
if (!dependencies.empty()) {/* to prevent hang (no wake-up event) */
|
||||
(co_await waitForGoals(dependencies.releaseAsArray())).value();
|
||||
}
|
||||
co_return co_await inputsRealised();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::repairClosure()
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::repairClosure() noexcept
|
||||
try {
|
||||
assert(drv->type().isPure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
|
@ -467,7 +490,7 @@ Goal::WorkResult DerivationGoal::repairClosure()
|
|||
}
|
||||
|
||||
/* Check each path (slow!). */
|
||||
WaitForGoals result;
|
||||
kj::Vector<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies;
|
||||
for (auto & i : outputClosure) {
|
||||
if (worker.pathContentsGood(i)) continue;
|
||||
printError(
|
||||
|
@ -475,9 +498,9 @@ Goal::WorkResult DerivationGoal::repairClosure()
|
|||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
|
||||
auto drvPath2 = outputsToDrv.find(i);
|
||||
if (drvPath2 == outputsToDrv.end())
|
||||
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i, Repair));
|
||||
dependencies.add(worker.goalFactory().makePathSubstitutionGoal(i, Repair));
|
||||
else
|
||||
result.goals.insert(worker.goalFactory().makeGoal(
|
||||
dependencies.add(worker.goalFactory().makeGoal(
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath2->second),
|
||||
.outputs = OutputsSpec::All { },
|
||||
|
@ -485,40 +508,50 @@ Goal::WorkResult DerivationGoal::repairClosure()
|
|||
bmRepair));
|
||||
}
|
||||
|
||||
if (result.goals.empty()) {
|
||||
return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
if (dependencies.empty()) {
|
||||
co_return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
}
|
||||
|
||||
state = &DerivationGoal::closureRepaired;
|
||||
return result;
|
||||
(co_await waitForGoals(dependencies.releaseAsArray())).value();
|
||||
co_return co_await closureRepaired();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::closureRepaired(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::closureRepaired() noexcept
|
||||
try {
|
||||
trace("closure repaired");
|
||||
if (nrFailed > 0)
|
||||
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
|
||||
worker.store.printStorePath(drvPath));
|
||||
return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
return {done(BuildResult::AlreadyValid, assertPathValidity())};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::inputsRealised() noexcept
|
||||
try {
|
||||
trace("all inputs realised");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
if (!useDerivation)
|
||||
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
|
||||
return done(BuildResult::DependencyFailed, {}, Error(
|
||||
co_return done(
|
||||
BuildResult::DependencyFailed,
|
||||
{},
|
||||
Error(
|
||||
"%s dependencies of derivation '%s' failed to build",
|
||||
nrFailed, worker.store.printStorePath(drvPath)));
|
||||
nrFailed,
|
||||
worker.store.printStorePath(drvPath)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (retrySubstitution == RetrySubstitution::YesNeed) {
|
||||
retrySubstitution = RetrySubstitution::AlreadyRetried;
|
||||
return haveDerivation(inBuildSlot);
|
||||
co_return co_await haveDerivation();
|
||||
}
|
||||
|
||||
/* Gather information necessary for computing the closure and/or
|
||||
|
@ -580,11 +613,12 @@ Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
|||
worker.store.printStorePath(pathResolved),
|
||||
});
|
||||
|
||||
resolvedDrvGoal = worker.goalFactory().makeDerivationGoal(
|
||||
auto dependency = worker.goalFactory().makeDerivationGoal(
|
||||
pathResolved, wantedOutputs, buildMode);
|
||||
resolvedDrvGoal = dependency.first;
|
||||
|
||||
state = &DerivationGoal::resolvedFinished;
|
||||
return WaitForGoals{{resolvedDrvGoal}};
|
||||
(co_await waitForGoals(std::move(dependency))).value();
|
||||
co_return co_await resolvedFinished();
|
||||
}
|
||||
|
||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumInputPaths;
|
||||
|
@ -648,8 +682,9 @@ Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
|||
/* Okay, try to build. Note that here we don't wait for a build
|
||||
slot to become available, since we don't need one if there is a
|
||||
build hook. */
|
||||
state = &DerivationGoal::tryToBuild;
|
||||
return tryToBuild(inBuildSlot);
|
||||
co_return co_await tryToBuild();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
void DerivationGoal::started()
|
||||
|
@ -665,8 +700,9 @@ void DerivationGoal::started()
|
|||
mcRunningBuilds = worker.runningBuilds.addTemporarily(1);
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryToBuild() noexcept
|
||||
try {
|
||||
retry:
|
||||
trace("trying to build");
|
||||
|
||||
/* Obtain locks on all output paths, if the paths are known a priori.
|
||||
|
@ -700,7 +736,9 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for lock on %s", Magenta(showPaths(lockFiles))));
|
||||
return WaitForAWhile{};
|
||||
co_await waitForAWhile();
|
||||
// we can loop very often, and `co_return co_await` always allocates a new frame
|
||||
goto retry;
|
||||
}
|
||||
|
||||
actLock.reset();
|
||||
|
@ -717,7 +755,7 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
if (buildMode != bmCheck && allValid) {
|
||||
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
|
||||
outputLocks.setDeletion(true);
|
||||
return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
}
|
||||
|
||||
/* If any of the outputs already exist but are not valid, delete
|
||||
|
@ -737,49 +775,63 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
&& settings.maxBuildJobs.get() != 0;
|
||||
|
||||
if (!buildLocally) {
|
||||
auto hookReply = tryBuildHook(inBuildSlot);
|
||||
auto result = std::visit(
|
||||
overloaded{
|
||||
[&](HookReply::Accept & a) -> std::optional<WorkResult> {
|
||||
/* Yes, it has started doing so. Wait until we get
|
||||
EOF from the hook. */
|
||||
actLock.reset();
|
||||
buildResult.startTime = time(0); // inexact
|
||||
state = &DerivationGoal::buildDone;
|
||||
started();
|
||||
return WaitForWorld{std::move(a.fds), false};
|
||||
},
|
||||
[&](HookReply::Postpone) -> std::optional<WorkResult> {
|
||||
/* Not now; wait until at least one child finishes or
|
||||
the wake-up timeout expires. */
|
||||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlTalkative, actBuildWaiting,
|
||||
fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
outputLocks.unlock();
|
||||
return WaitForAWhile{};
|
||||
},
|
||||
[&](HookReply::Decline) -> std::optional<WorkResult> {
|
||||
/* We should do it ourselves. */
|
||||
return std::nullopt;
|
||||
},
|
||||
},
|
||||
hookReply);
|
||||
if (result) {
|
||||
return std::move(*result);
|
||||
auto hookReply = tryBuildHook();
|
||||
switch (hookReply.index()) {
|
||||
case 0: {
|
||||
HookReply::Accept & a = std::get<0>(hookReply);
|
||||
/* Yes, it has started doing so. Wait until we get
|
||||
EOF from the hook. */
|
||||
actLock.reset();
|
||||
buildResult.startTime = time(0); // inexact
|
||||
started();
|
||||
auto r = co_await a.promise;
|
||||
if (r.has_value()) {
|
||||
co_return co_await buildDone();
|
||||
} else if (r.has_error()) {
|
||||
co_return r.assume_error();
|
||||
} else {
|
||||
co_return r.assume_exception();
|
||||
}
|
||||
}
|
||||
|
||||
case 1: {
|
||||
HookReply::Decline _ [[gnu::unused]] = std::get<1>(hookReply);
|
||||
break;
|
||||
}
|
||||
|
||||
case 2: {
|
||||
HookReply::Postpone _ [[gnu::unused]] = std::get<2>(hookReply);
|
||||
/* Not now; wait until at least one child finishes or
|
||||
the wake-up timeout expires. */
|
||||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlTalkative, actBuildWaiting,
|
||||
fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
outputLocks.unlock();
|
||||
co_await waitForAWhile();
|
||||
goto retry;
|
||||
}
|
||||
|
||||
default:
|
||||
// can't static_assert this because HookReply *subclasses* variant and std::variant_size breaks
|
||||
assert(false && "unexpected hook reply");
|
||||
}
|
||||
}
|
||||
|
||||
actLock.reset();
|
||||
|
||||
state = &DerivationGoal::tryLocalBuild;
|
||||
return tryLocalBuild(inBuildSlot);
|
||||
co_return co_await tryLocalBuild();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::tryLocalBuild(bool inBuildSlot) {
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryLocalBuild() noexcept
|
||||
try {
|
||||
throw Error(
|
||||
"unable to build with a primary store that isn't a local store; "
|
||||
"either pass a different '--store' or enable remote builds."
|
||||
"\nhttps://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html");
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
|
@ -819,7 +871,7 @@ void replaceValidPath(const Path & storePath, const Path & tmpPath)
|
|||
// attempt to recover
|
||||
movePath(oldPath, storePath);
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
ignoreExceptionExceptInterrupt();
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
@ -935,10 +987,11 @@ void runPostBuildHook(
|
|||
proc.getStdout()->drainInto(sink);
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::buildDone() noexcept
|
||||
try {
|
||||
trace("build done");
|
||||
|
||||
slotToken = {};
|
||||
Finally releaseBuildUser([&](){ this->cleanupHookFinally(); });
|
||||
|
||||
cleanupPreChildKill();
|
||||
|
@ -954,9 +1007,6 @@ Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
|||
buildResult.timesBuilt++;
|
||||
buildResult.stopTime = time(0);
|
||||
|
||||
/* So the child is gone now. */
|
||||
worker.childTerminated(this);
|
||||
|
||||
/* Close the read side of the logger pipe. */
|
||||
closeReadPipes();
|
||||
|
||||
|
@ -1030,7 +1080,7 @@ Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
|||
outputLocks.setDeletion(true);
|
||||
outputLocks.unlock();
|
||||
|
||||
return done(BuildResult::Built, std::move(builtOutputs));
|
||||
return {done(BuildResult::Built, std::move(builtOutputs))};
|
||||
} catch (BuildError & e) {
|
||||
outputLocks.unlock();
|
||||
|
||||
|
@ -1051,12 +1101,14 @@ Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
|||
BuildResult::PermanentFailure;
|
||||
}
|
||||
|
||||
return done(st, {}, std::move(e));
|
||||
return {done(st, {}, std::move(e))};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::resolvedFinished(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::resolvedFinished() noexcept
|
||||
try {
|
||||
trace("resolved derivation finished");
|
||||
|
||||
assert(resolvedDrvGoal);
|
||||
|
@ -1123,10 +1175,12 @@ Goal::WorkResult DerivationGoal::resolvedFinished(bool inBuildSlot)
|
|||
if (status == BuildResult::AlreadyValid)
|
||||
status = BuildResult::ResolvesToAlreadyValid;
|
||||
|
||||
return done(status, std::move(builtOutputs));
|
||||
return {done(status, std::move(builtOutputs))};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
|
||||
HookReply DerivationGoal::tryBuildHook()
|
||||
{
|
||||
if (!worker.hook.available || !useDerivation) return HookReply::Decline{};
|
||||
|
||||
|
@ -1138,7 +1192,7 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
|
|||
/* Send the request to the hook. */
|
||||
worker.hook.instance->sink
|
||||
<< "try"
|
||||
<< (inBuildSlot ? 1 : 0)
|
||||
<< (slotToken.valid() ? 1 : 0)
|
||||
<< drv->platform
|
||||
<< worker.store.printStorePath(drvPath)
|
||||
<< parsedDrv->getRequiredSystemFeatures();
|
||||
|
@ -1165,6 +1219,7 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
|
|||
else {
|
||||
s += "\n";
|
||||
writeLogsToStderr(s);
|
||||
logger->log(lvlInfo, s);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1224,12 +1279,8 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
|
|||
/* Create the log file and pipe. */
|
||||
Path logFile = openLogFile();
|
||||
|
||||
std::set<int> fds;
|
||||
fds.insert(hook->fromHook.get());
|
||||
fds.insert(hook->builderOut.get());
|
||||
builderOutFD = &hook->builderOut;
|
||||
|
||||
return HookReply::Accept{std::move(fds)};
|
||||
return HookReply::Accept{handleChildOutput()};
|
||||
}
|
||||
|
||||
|
||||
|
@ -1289,23 +1340,69 @@ void DerivationGoal::closeLogFile()
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data)
|
||||
Goal::WorkResult DerivationGoal::tooMuchLogs()
|
||||
{
|
||||
assert(builderOutFD);
|
||||
killChild();
|
||||
return done(
|
||||
BuildResult::LogLimitExceeded, {},
|
||||
Error("%s killed after writing more than %d bytes of log output",
|
||||
getName(), settings.maxLogSize));
|
||||
}
|
||||
|
||||
auto tooMuchLogs = [&] {
|
||||
killChild();
|
||||
return done(
|
||||
BuildResult::LogLimitExceeded, {},
|
||||
Error("%s killed after writing more than %d bytes of log output",
|
||||
getName(), settings.maxLogSize));
|
||||
};
|
||||
struct DerivationGoal::InputStream final : private kj::AsyncObject
|
||||
{
|
||||
int fd;
|
||||
kj::UnixEventPort::FdObserver observer;
|
||||
|
||||
InputStream(kj::UnixEventPort & ep, int fd)
|
||||
: fd(fd)
|
||||
, observer(ep, fd, kj::UnixEventPort::FdObserver::OBSERVE_READ)
|
||||
{
|
||||
int flags = fcntl(fd, F_GETFL);
|
||||
if (flags < 0) {
|
||||
throw SysError("fcntl(F_GETFL) failed on fd %i", fd);
|
||||
}
|
||||
if (fcntl(fd, F_SETFL, flags | O_NONBLOCK) < 0) {
|
||||
throw SysError("fcntl(F_SETFL) failed on fd %i", fd);
|
||||
}
|
||||
}
|
||||
|
||||
kj::Promise<std::string_view> read(kj::ArrayPtr<char> buffer)
|
||||
{
|
||||
const auto res = ::read(fd, buffer.begin(), buffer.size());
|
||||
// closing a pty endpoint causes EIO on the other endpoint. stock kj streams
|
||||
// do not handle this and throw exceptions we can't ask for errno instead :(
|
||||
// (we can't use `errno` either because kj may well have mangled it by now.)
|
||||
if (res == 0 || (res == -1 && errno == EIO)) {
|
||||
return std::string_view{};
|
||||
}
|
||||
|
||||
KJ_NONBLOCKING_SYSCALL(res) {}
|
||||
|
||||
if (res > 0) {
|
||||
return std::string_view{buffer.begin(), static_cast<size_t>(res)};
|
||||
}
|
||||
|
||||
return observer.whenBecomesReadable().then([this, buffer] {
|
||||
return read(buffer);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> DerivationGoal::handleBuilderOutput(InputStream & in) noexcept
|
||||
try {
|
||||
auto buf = kj::heapArray<char>(4096);
|
||||
while (true) {
|
||||
auto data = co_await in.read(buf);
|
||||
lastChildActivity = worker.aio.provider->getTimer().now();
|
||||
|
||||
if (data.empty()) {
|
||||
co_return result::success();
|
||||
}
|
||||
|
||||
// local & `ssh://`-builds are dealt with here.
|
||||
if (fd == builderOutFD->get()) {
|
||||
logSize += data.size();
|
||||
if (settings.maxLogSize && logSize > settings.maxLogSize) {
|
||||
return tooMuchLogs();
|
||||
co_return tooMuchLogs();
|
||||
}
|
||||
|
||||
for (auto c : data)
|
||||
|
@ -1320,10 +1417,22 @@ Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data
|
|||
}
|
||||
|
||||
if (logSink) (*logSink)(data);
|
||||
return StillAlive{};
|
||||
}
|
||||
} catch (...) {
|
||||
co_return std::current_exception();
|
||||
}
|
||||
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> DerivationGoal::handleHookOutput(InputStream & in) noexcept
|
||||
try {
|
||||
auto buf = kj::heapArray<char>(4096);
|
||||
while (true) {
|
||||
auto data = co_await in.read(buf);
|
||||
lastChildActivity = worker.aio.provider->getTimer().now();
|
||||
|
||||
if (data.empty()) {
|
||||
co_return result::success();
|
||||
}
|
||||
|
||||
if (hook && fd == hook->fromHook.get()) {
|
||||
for (auto c : data)
|
||||
if (c == '\n') {
|
||||
auto json = parseJSONMessage(currentHookLine);
|
||||
|
@ -1339,7 +1448,7 @@ Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data
|
|||
(fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n";
|
||||
logSize += logLine.size();
|
||||
if (settings.maxLogSize && logSize > settings.maxLogSize) {
|
||||
return tooMuchLogs();
|
||||
co_return tooMuchLogs();
|
||||
}
|
||||
(*logSink)(logLine);
|
||||
} else if (type == resSetPhase && ! fields.is_null()) {
|
||||
|
@ -1363,16 +1472,83 @@ Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data
|
|||
} else
|
||||
currentHookLine += c;
|
||||
}
|
||||
|
||||
return StillAlive{};
|
||||
} catch (...) {
|
||||
co_return std::current_exception();
|
||||
}
|
||||
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> DerivationGoal::handleChildOutput() noexcept
|
||||
try {
|
||||
assert(builderOutFD);
|
||||
|
||||
void DerivationGoal::handleEOF(int fd)
|
||||
auto builderIn = kj::heap<InputStream>(worker.aio.unixEventPort, builderOutFD->get());
|
||||
kj::Own<InputStream> hookIn;
|
||||
if (hook) {
|
||||
hookIn = kj::heap<InputStream>(worker.aio.unixEventPort, hook->fromHook.get());
|
||||
}
|
||||
|
||||
auto handlers = handleChildStreams(*builderIn, hookIn.get()).attach(std::move(builderIn), std::move(hookIn));
|
||||
|
||||
if (respectsTimeouts() && settings.buildTimeout != 0) {
|
||||
handlers = handlers.exclusiveJoin(
|
||||
worker.aio.provider->getTimer()
|
||||
.afterDelay(settings.buildTimeout.get() * kj::SECONDS)
|
||||
.then([this]() -> Outcome<void, WorkResult> {
|
||||
return timedOut(
|
||||
Error("%1% timed out after %2% seconds", name, settings.buildTimeout)
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return handlers.then([this](auto r) -> Outcome<void, WorkResult> {
|
||||
if (!currentLogLine.empty()) flushLine();
|
||||
return r;
|
||||
});
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> DerivationGoal::monitorForSilence() noexcept
|
||||
{
|
||||
if (!currentLogLine.empty()) flushLine();
|
||||
while (true) {
|
||||
const auto stash = lastChildActivity;
|
||||
auto waitUntil = lastChildActivity + settings.maxSilentTime.get() * kj::SECONDS;
|
||||
co_await worker.aio.provider->getTimer().atTime(waitUntil);
|
||||
if (lastChildActivity == stash) {
|
||||
co_return timedOut(
|
||||
Error("%1% timed out after %2% seconds of silence", name, settings.maxSilentTime)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>>
|
||||
DerivationGoal::handleChildStreams(InputStream & builderIn, InputStream * hookIn) noexcept
|
||||
{
|
||||
lastChildActivity = worker.aio.provider->getTimer().now();
|
||||
|
||||
auto handlers = kj::joinPromisesFailFast([&] {
|
||||
kj::Vector<kj::Promise<Outcome<void, WorkResult>>> parts{2};
|
||||
|
||||
parts.add(handleBuilderOutput(builderIn));
|
||||
if (hookIn) {
|
||||
parts.add(handleHookOutput(*hookIn));
|
||||
}
|
||||
|
||||
return parts.releaseAsArray();
|
||||
}());
|
||||
|
||||
if (respectsTimeouts() && settings.maxSilentTime != 0) {
|
||||
handlers = handlers.exclusiveJoin(monitorForSilence().then([](auto r) {
|
||||
return kj::arr(std::move(r));
|
||||
}));
|
||||
}
|
||||
|
||||
for (auto r : co_await handlers) {
|
||||
BOOST_OUTCOME_CO_TRYV(r);
|
||||
}
|
||||
co_return result::success();
|
||||
}
|
||||
|
||||
void DerivationGoal::flushLine()
|
||||
{
|
||||
|
@ -1513,11 +1689,13 @@ SingleDrvOutputs DerivationGoal::assertPathValidity()
|
|||
}
|
||||
|
||||
|
||||
Goal::Finished DerivationGoal::done(
|
||||
Goal::WorkResult DerivationGoal::done(
|
||||
BuildResult::Status status,
|
||||
SingleDrvOutputs builtOutputs,
|
||||
std::optional<Error> ex)
|
||||
{
|
||||
isDone = true;
|
||||
|
||||
outputLocks.unlock();
|
||||
buildResult.status = status;
|
||||
if (ex)
|
||||
|
@ -1548,7 +1726,7 @@ Goal::Finished DerivationGoal::done(
|
|||
logError(ex->info());
|
||||
}
|
||||
|
||||
return Finished{
|
||||
return WorkResult{
|
||||
.exitCode = buildResult.success() ? ecSuccess : ecFailed,
|
||||
.result = buildResult,
|
||||
.ex = ex ? std::make_shared<Error>(std::move(*ex)) : nullptr,
|
||||
|
@ -1587,5 +1765,4 @@ void DerivationGoal::waiteeDone(GoalPtr waitee)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "goal.hh"
|
||||
#include <kj/time.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -17,7 +18,7 @@ struct HookInstance;
|
|||
|
||||
struct HookReplyBase {
|
||||
struct [[nodiscard]] Accept {
|
||||
std::set<int> fds;
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> promise;
|
||||
};
|
||||
struct [[nodiscard]] Decline {};
|
||||
struct [[nodiscard]] Postpone {};
|
||||
|
@ -62,7 +63,7 @@ struct InitialOutputStatus {
|
|||
struct InitialOutput {
|
||||
bool wanted;
|
||||
Hash outputHash;
|
||||
std::optional<InitialOutputStatus> known;
|
||||
std::optional<InitialOutputStatus> known = {};
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -70,6 +71,14 @@ struct InitialOutput {
|
|||
*/
|
||||
struct DerivationGoal : public Goal
|
||||
{
|
||||
struct InputStream;
|
||||
|
||||
/**
|
||||
* Whether this goal has completed. Completed goals can not be
|
||||
* asked for more outputs, a new goal must be created instead.
|
||||
*/
|
||||
bool isDone = false;
|
||||
|
||||
/**
|
||||
* Whether to use an on-disk .drv file.
|
||||
*/
|
||||
|
@ -175,6 +184,11 @@ struct DerivationGoal : public Goal
|
|||
|
||||
std::map<std::string, InitialOutput> initialOutputs;
|
||||
|
||||
/**
|
||||
* Build result.
|
||||
*/
|
||||
BuildResult buildResult;
|
||||
|
||||
/**
|
||||
* File descriptor for the log file.
|
||||
*/
|
||||
|
@ -213,9 +227,6 @@ struct DerivationGoal : public Goal
|
|||
*/
|
||||
std::optional<DerivationType> derivationType;
|
||||
|
||||
typedef WorkResult (DerivationGoal::*GoalState)(bool inBuildSlot);
|
||||
GoalState state;
|
||||
|
||||
BuildMode buildMode;
|
||||
|
||||
NotifyingCounter<uint64_t>::Bump mcExpectedBuilds, mcRunningBuilds;
|
||||
|
@ -242,37 +253,35 @@ struct DerivationGoal : public Goal
|
|||
BuildMode buildMode = bmNormal);
|
||||
virtual ~DerivationGoal() noexcept(false);
|
||||
|
||||
Finished timedOut(Error && ex) override;
|
||||
WorkResult timedOut(Error && ex);
|
||||
|
||||
std::string key() override;
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> workImpl() noexcept override;
|
||||
|
||||
/**
|
||||
* Add wanted outputs to an already existing derivation goal.
|
||||
*/
|
||||
void addWantedOutputs(const OutputsSpec & outputs);
|
||||
bool addWantedOutputs(const OutputsSpec & outputs);
|
||||
|
||||
/**
|
||||
* The states.
|
||||
*/
|
||||
WorkResult getDerivation(bool inBuildSlot);
|
||||
WorkResult loadDerivation(bool inBuildSlot);
|
||||
WorkResult haveDerivation(bool inBuildSlot);
|
||||
WorkResult outputsSubstitutionTried(bool inBuildSlot);
|
||||
WorkResult gaveUpOnSubstitution(bool inBuildSlot);
|
||||
WorkResult closureRepaired(bool inBuildSlot);
|
||||
WorkResult inputsRealised(bool inBuildSlot);
|
||||
WorkResult tryToBuild(bool inBuildSlot);
|
||||
virtual WorkResult tryLocalBuild(bool inBuildSlot);
|
||||
WorkResult buildDone(bool inBuildSlot);
|
||||
kj::Promise<Result<WorkResult>> getDerivation() noexcept;
|
||||
kj::Promise<Result<WorkResult>> loadDerivation() noexcept;
|
||||
kj::Promise<Result<WorkResult>> haveDerivation() noexcept;
|
||||
kj::Promise<Result<WorkResult>> outputsSubstitutionTried() noexcept;
|
||||
kj::Promise<Result<WorkResult>> gaveUpOnSubstitution() noexcept;
|
||||
kj::Promise<Result<WorkResult>> closureRepaired() noexcept;
|
||||
kj::Promise<Result<WorkResult>> inputsRealised() noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryToBuild() noexcept;
|
||||
virtual kj::Promise<Result<WorkResult>> tryLocalBuild() noexcept;
|
||||
kj::Promise<Result<WorkResult>> buildDone() noexcept;
|
||||
|
||||
WorkResult resolvedFinished(bool inBuildSlot);
|
||||
kj::Promise<Result<WorkResult>> resolvedFinished() noexcept;
|
||||
|
||||
/**
|
||||
* Is the build hook willing to perform the build?
|
||||
*/
|
||||
HookReply tryBuildHook(bool inBuildSlot);
|
||||
HookReply tryBuildHook();
|
||||
|
||||
virtual int getChildStatus();
|
||||
|
||||
|
@ -312,13 +321,19 @@ struct DerivationGoal : public Goal
|
|||
virtual void cleanupPostOutputsRegisteredModeCheck();
|
||||
virtual void cleanupPostOutputsRegisteredModeNonCheck();
|
||||
|
||||
/**
|
||||
* Callback used by the worker to write to the log.
|
||||
*/
|
||||
WorkResult handleChildOutput(int fd, std::string_view data) override;
|
||||
void handleEOF(int fd) override;
|
||||
protected:
|
||||
kj::TimePoint lastChildActivity = kj::minValue;
|
||||
|
||||
kj::Promise<Outcome<void, WorkResult>> handleChildOutput() noexcept;
|
||||
kj::Promise<Outcome<void, WorkResult>>
|
||||
handleChildStreams(InputStream & builderIn, InputStream * hookIn) noexcept;
|
||||
kj::Promise<Outcome<void, WorkResult>> handleBuilderOutput(InputStream & in) noexcept;
|
||||
kj::Promise<Outcome<void, WorkResult>> handleHookOutput(InputStream & in) noexcept;
|
||||
kj::Promise<Outcome<void, WorkResult>> monitorForSilence() noexcept;
|
||||
WorkResult tooMuchLogs();
|
||||
void flushLine();
|
||||
|
||||
public:
|
||||
/**
|
||||
* Wrappers around the corresponding Store methods that first consult the
|
||||
* derivation. This is currently needed because when there is no drv file
|
||||
|
@ -346,17 +361,22 @@ struct DerivationGoal : public Goal
|
|||
*/
|
||||
virtual void killChild();
|
||||
|
||||
WorkResult repairClosure();
|
||||
kj::Promise<Result<WorkResult>> repairClosure() noexcept;
|
||||
|
||||
void started();
|
||||
|
||||
Finished done(
|
||||
WorkResult done(
|
||||
BuildResult::Status status,
|
||||
SingleDrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
void waiteeDone(GoalPtr waitee) override;
|
||||
|
||||
virtual bool respectsTimeouts()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||
|
||||
JobCategory jobCategory() const override {
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "signals.hh"
|
||||
#include <kj/array.h>
|
||||
#include <kj/async.h>
|
||||
#include <kj/vector.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -16,31 +19,32 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
|
|||
: Goal(worker, isDependency)
|
||||
, id(id)
|
||||
{
|
||||
state = &DrvOutputSubstitutionGoal::init;
|
||||
name = fmt("substitution of '%s'", id.to_string());
|
||||
trace("created");
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::init(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::workImpl() noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
/* If the derivation already exists, we’re done */
|
||||
if (worker.store.queryRealisation(id)) {
|
||||
return Finished{ecSuccess, std::move(buildResult)};
|
||||
co_return WorkResult{ecSuccess};
|
||||
}
|
||||
|
||||
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::tryNext() noexcept
|
||||
try {
|
||||
trace("trying next substituter");
|
||||
|
||||
if (!inBuildSlot) {
|
||||
return WaitForSlot{};
|
||||
if (!slotToken.valid()) {
|
||||
slotToken = co_await worker.substitutions.acquire();
|
||||
}
|
||||
|
||||
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
|
||||
|
@ -57,7 +61,7 @@ Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
return Finished{substituterFailed ? ecFailed : ecNoSubstituters, std::move(buildResult)};
|
||||
co_return WorkResult{substituterFailed ? ecFailed : ecNoSubstituters};
|
||||
}
|
||||
|
||||
sub = subs.front();
|
||||
|
@ -67,23 +71,26 @@ Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
some other error occurs), so it must not touch `this`. So put
|
||||
the shared state in a separate refcounted object. */
|
||||
downloadState = std::make_shared<DownloadState>();
|
||||
downloadState->outPipe.create();
|
||||
auto pipe = kj::newPromiseAndCrossThreadFulfiller<void>();
|
||||
downloadState->outPipe = kj::mv(pipe.fulfiller);
|
||||
|
||||
downloadState->result =
|
||||
std::async(std::launch::async, [downloadState{downloadState}, id{id}, sub{sub}] {
|
||||
Finally updateStats([&]() { downloadState->outPipe->fulfill(); });
|
||||
ReceiveInterrupts receiveInterrupts;
|
||||
Finally updateStats([&]() { downloadState->outPipe.writeSide.close(); });
|
||||
return sub->queryRealisation(id);
|
||||
});
|
||||
|
||||
state = &DrvOutputSubstitutionGoal::realisationFetched;
|
||||
return WaitForWorld{{downloadState->outPipe.readSide.get()}, true};
|
||||
co_await pipe.promise;
|
||||
co_return co_await realisationFetched();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot)
|
||||
{
|
||||
worker.childTerminated(this);
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::realisationFetched() noexcept
|
||||
try {
|
||||
maintainRunningSubstitutions.reset();
|
||||
slotToken = {};
|
||||
|
||||
try {
|
||||
outputInfo = downloadState->result.get();
|
||||
|
@ -93,10 +100,10 @@ Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot)
|
|||
}
|
||||
|
||||
if (!outputInfo) {
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
}
|
||||
|
||||
WaitForGoals result;
|
||||
kj::Vector<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies;
|
||||
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
|
||||
if (depId != id) {
|
||||
if (auto localOutputInfo = worker.store.queryRealisation(depId);
|
||||
|
@ -110,56 +117,46 @@ Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot)
|
|||
worker.store.printStorePath(localOutputInfo->outPath),
|
||||
worker.store.printStorePath(depPath)
|
||||
);
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
}
|
||||
result.goals.insert(worker.goalFactory().makeDrvOutputSubstitutionGoal(depId));
|
||||
dependencies.add(worker.goalFactory().makeDrvOutputSubstitutionGoal(depId));
|
||||
}
|
||||
}
|
||||
|
||||
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(outputInfo->outPath));
|
||||
dependencies.add(worker.goalFactory().makePathSubstitutionGoal(outputInfo->outPath));
|
||||
|
||||
if (result.goals.empty()) {
|
||||
return outPathValid(inBuildSlot);
|
||||
} else {
|
||||
state = &DrvOutputSubstitutionGoal::outPathValid;
|
||||
return result;
|
||||
if (!dependencies.empty()) {
|
||||
(co_await waitForGoals(dependencies.releaseAsArray())).value();
|
||||
}
|
||||
co_return co_await outPathValid();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::outPathValid(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::outPathValid() noexcept
|
||||
try {
|
||||
assert(outputInfo);
|
||||
trace("output path substituted");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
|
||||
return Finished{
|
||||
return {WorkResult{
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
std::move(buildResult),
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
worker.store.registerDrvOutput(*outputInfo);
|
||||
return finished();
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::finished()
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::finished() noexcept
|
||||
try {
|
||||
trace("finished");
|
||||
return Finished{ecSuccess, std::move(buildResult)};
|
||||
return {WorkResult{ecSuccess}};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
std::string DrvOutputSubstitutionGoal::key()
|
||||
{
|
||||
/* "a$" ensures substitution goals happen before derivation
|
||||
goals. */
|
||||
return "a$" + std::string(id.to_string());
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::work(bool inBuildSlot)
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ class DrvOutputSubstitutionGoal : public Goal {
|
|||
|
||||
struct DownloadState
|
||||
{
|
||||
Pipe outPipe;
|
||||
kj::Own<kj::CrossThreadPromiseFulfiller<void>> outPipe;
|
||||
std::future<std::shared_ptr<const Realisation>> result;
|
||||
};
|
||||
|
||||
|
@ -65,20 +65,12 @@ public:
|
|||
std::optional<ContentAddress> ca = std::nullopt
|
||||
);
|
||||
|
||||
typedef WorkResult (DrvOutputSubstitutionGoal::*GoalState)(bool inBuildSlot);
|
||||
GoalState state;
|
||||
kj::Promise<Result<WorkResult>> tryNext() noexcept;
|
||||
kj::Promise<Result<WorkResult>> realisationFetched() noexcept;
|
||||
kj::Promise<Result<WorkResult>> outPathValid() noexcept;
|
||||
kj::Promise<Result<WorkResult>> finished() noexcept;
|
||||
|
||||
WorkResult init(bool inBuildSlot);
|
||||
WorkResult tryNext(bool inBuildSlot);
|
||||
WorkResult realisationFetched(bool inBuildSlot);
|
||||
WorkResult outPathValid(bool inBuildSlot);
|
||||
WorkResult finished();
|
||||
|
||||
Finished timedOut(Error && ex) override { abort(); };
|
||||
|
||||
std::string key() override;
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> workImpl() noexcept override;
|
||||
|
||||
JobCategory jobCategory() const override {
|
||||
return JobCategory::Substitution;
|
||||
|
|
|
@ -8,38 +8,36 @@ namespace nix {
|
|||
|
||||
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
Goals goals;
|
||||
auto results = processGoals(*this, evalStore ? *evalStore : *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
for (auto & br : reqs)
|
||||
goals.insert(gf.makeGoal(br, buildMode));
|
||||
goals.emplace_back(gf.makeGoal(br, buildMode));
|
||||
return goals;
|
||||
});
|
||||
}).wait(aio.waitScope).value();
|
||||
|
||||
StringSet failed;
|
||||
std::shared_ptr<Error> ex;
|
||||
for (auto & i : goals) {
|
||||
if (i->ex) {
|
||||
for (auto & [i, result] : results.goals) {
|
||||
if (result.ex) {
|
||||
if (ex)
|
||||
logError(i->ex->info());
|
||||
logError(result.ex->info());
|
||||
else
|
||||
ex = i->ex;
|
||||
ex = result.ex;
|
||||
}
|
||||
if (i->exitCode != Goal::ecSuccess) {
|
||||
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
||||
failed.insert(printStorePath(i2->drvPath));
|
||||
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
||||
failed.insert(printStorePath(i2->storePath));
|
||||
if (result.exitCode != Goal::ecSuccess) {
|
||||
if (result.storePath)
|
||||
failed.insert(printStorePath(*result.storePath));
|
||||
}
|
||||
}
|
||||
|
||||
if (failed.size() == 1 && ex) {
|
||||
ex->withExitStatus(worker.failingExitStatus());
|
||||
ex->withExitStatus(results.failingExitStatus);
|
||||
throw std::move(*ex);
|
||||
} else if (!failed.empty()) {
|
||||
if (ex) logError(ex->info());
|
||||
throw Error(worker.failingExitStatus(), "build of %s failed", concatStringsSep(", ", quoteStrings(failed)));
|
||||
throw Error(results.failingExitStatus, "build of %s failed", concatStringsSep(", ", quoteStrings(failed)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,23 +46,20 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
BuildMode buildMode,
|
||||
std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
std::vector<std::pair<const DerivedPath &, GoalPtr>> state;
|
||||
auto aio = kj::setupAsyncIo();
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
Goals goals;
|
||||
auto goals = processGoals(*this, evalStore ? *evalStore : *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
for (const auto & req : reqs) {
|
||||
auto goal = gf.makeGoal(req, buildMode);
|
||||
goals.insert(goal);
|
||||
state.push_back({req, goal});
|
||||
goals.emplace_back(gf.makeGoal(req, buildMode));
|
||||
}
|
||||
return goals;
|
||||
});
|
||||
}).wait(aio.waitScope).value().goals;
|
||||
|
||||
std::vector<KeyedBuildResult> results;
|
||||
|
||||
for (auto & [req, goalPtr] : state)
|
||||
results.emplace_back(goalPtr->buildResult.restrictTo(req));
|
||||
for (auto && [goalIdx, req] : enumerate(reqs))
|
||||
results.emplace_back(goals[goalIdx].result.restrictTo(req));
|
||||
|
||||
return results;
|
||||
}
|
||||
|
@ -72,14 +67,16 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
|
||||
try {
|
||||
auto goals = worker.run([&](GoalFactory & gf) -> Goals {
|
||||
return Goals{gf.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All{}, buildMode)};
|
||||
});
|
||||
auto goal = *goals.begin();
|
||||
return goal->buildResult.restrictTo(DerivedPath::Built {
|
||||
auto results = processGoals(*this, *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
goals.emplace_back(gf.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All{}, buildMode));
|
||||
return goals;
|
||||
}).wait(aio.waitScope).value();
|
||||
auto & result = results.goals.begin()->second;
|
||||
return result.result.restrictTo(DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = OutputsSpec::All {},
|
||||
});
|
||||
|
@ -97,48 +94,55 @@ void Store::ensurePath(const StorePath & path)
|
|||
/* If the path is already valid, we're done. */
|
||||
if (isValidPath(path)) return;
|
||||
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
|
||||
auto goals =
|
||||
worker.run([&](GoalFactory & gf) { return Goals{gf.makePathSubstitutionGoal(path)}; });
|
||||
auto goal = *goals.begin();
|
||||
auto results = processGoals(*this, *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
goals.emplace_back(gf.makePathSubstitutionGoal(path));
|
||||
return goals;
|
||||
}).wait(aio.waitScope).value();
|
||||
auto & result = results.goals.begin()->second;
|
||||
|
||||
if (goal->exitCode != Goal::ecSuccess) {
|
||||
if (goal->ex) {
|
||||
goal->ex->withExitStatus(worker.failingExitStatus());
|
||||
throw std::move(*goal->ex);
|
||||
if (result.exitCode != Goal::ecSuccess) {
|
||||
if (result.ex) {
|
||||
result.ex->withExitStatus(results.failingExitStatus);
|
||||
throw std::move(*result.ex);
|
||||
} else
|
||||
throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
|
||||
throw Error(results.failingExitStatus, "path '%s' does not exist and cannot be created", printStorePath(path));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Store::repairPath(const StorePath & path)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
return Goals{gf.makePathSubstitutionGoal(path, Repair)};
|
||||
});
|
||||
auto goal = *goals.begin();
|
||||
auto results = processGoals(*this, *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
goals.emplace_back(gf.makePathSubstitutionGoal(path, Repair));
|
||||
return goals;
|
||||
}).wait(aio.waitScope).value();
|
||||
auto & result = results.goals.begin()->second;
|
||||
|
||||
if (goal->exitCode != Goal::ecSuccess) {
|
||||
if (result.exitCode != Goal::ecSuccess) {
|
||||
/* Since substituting the path didn't work, if we have a valid
|
||||
deriver, then rebuild the deriver. */
|
||||
auto info = queryPathInfo(path);
|
||||
if (info->deriver && isValidPath(*info->deriver)) {
|
||||
worker.run([&](GoalFactory & gf) {
|
||||
return Goals{gf.makeGoal(
|
||||
processGoals(*this, *this, aio, [&](GoalFactory & gf) {
|
||||
Worker::Targets goals;
|
||||
goals.emplace_back(gf.makeGoal(
|
||||
DerivedPath::Built{
|
||||
.drvPath = makeConstantStorePathRef(*info->deriver),
|
||||
// FIXME: Should just build the specific output we need.
|
||||
.outputs = OutputsSpec::All{},
|
||||
},
|
||||
bmRepair
|
||||
)};
|
||||
});
|
||||
));
|
||||
return goals;
|
||||
}).wait(aio.waitScope).value();
|
||||
} else
|
||||
throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path));
|
||||
throw Error(results.failingExitStatus, "cannot repair path '%s'", printStorePath(path));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,18 +1,79 @@
|
|||
#include "goal.hh"
|
||||
#include "async-collect.hh"
|
||||
#include "worker.hh"
|
||||
#include <boost/outcome/try.hpp>
|
||||
#include <kj/time.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
|
||||
std::string s1 = a->key();
|
||||
std::string s2 = b->key();
|
||||
return s1 < s2;
|
||||
}
|
||||
|
||||
|
||||
void Goal::trace(std::string_view s)
|
||||
{
|
||||
debug("%1%: %2%", name, s);
|
||||
}
|
||||
|
||||
kj::Promise<void> Goal::waitForAWhile()
|
||||
{
|
||||
trace("wait for a while");
|
||||
/* If we are polling goals that are waiting for a lock, then wake
|
||||
up after a few seconds at most. */
|
||||
return worker.aio.provider->getTimer().afterDelay(settings.pollInterval.get() * kj::SECONDS);
|
||||
}
|
||||
|
||||
kj::Promise<Result<Goal::WorkResult>> Goal::work() noexcept
|
||||
try {
|
||||
// always clear the slot token, no matter what happens. not doing this
|
||||
// can cause builds to get stuck on exceptions (or other early exist).
|
||||
// ideally we'd use scoped slot tokens instead of keeping them in some
|
||||
// goal member variable, but we cannot do this yet for legacy reasons.
|
||||
KJ_DEFER({ slotToken = {}; });
|
||||
|
||||
BOOST_OUTCOME_CO_TRY(auto result, co_await workImpl());
|
||||
|
||||
trace("done");
|
||||
|
||||
cleanup();
|
||||
|
||||
co_return std::move(result);
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
kj::Promise<Result<void>>
|
||||
Goal::waitForGoals(kj::Array<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies) noexcept
|
||||
try {
|
||||
auto left = dependencies.size();
|
||||
for (auto & [dep, p] : dependencies) {
|
||||
p = p.then([this, dep, &left](auto _result) -> Result<WorkResult> {
|
||||
BOOST_OUTCOME_TRY(auto result, _result);
|
||||
|
||||
left--;
|
||||
trace(fmt("waitee '%s' done; %d left", dep->name, left));
|
||||
|
||||
if (result.exitCode != Goal::ecSuccess) ++nrFailed;
|
||||
if (result.exitCode == Goal::ecNoSubstituters) ++nrNoSubstituters;
|
||||
if (result.exitCode == Goal::ecIncompleteClosure) ++nrIncompleteClosure;
|
||||
|
||||
return std::move(result);
|
||||
}).eagerlyEvaluate(nullptr);
|
||||
}
|
||||
|
||||
auto collectDeps = asyncCollect(std::move(dependencies));
|
||||
|
||||
while (auto item = co_await collectDeps.next()) {
|
||||
auto & [dep, _result] = *item;
|
||||
BOOST_OUTCOME_CO_TRY(auto result, _result);
|
||||
|
||||
waiteeDone(dep);
|
||||
|
||||
if (result.exitCode == ecFailed && !settings.keepGoing) {
|
||||
co_return result::success();
|
||||
}
|
||||
}
|
||||
|
||||
co_return result::success();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "async-semaphore.hh"
|
||||
#include "result.hh"
|
||||
#include "types.hh"
|
||||
#include "store-api.hh"
|
||||
#include "build-result.hh"
|
||||
#include <concepts> // IWYU pragma: keep
|
||||
#include <kj/async.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -17,22 +21,11 @@ class Worker;
|
|||
* A pointer to a goal.
|
||||
*/
|
||||
typedef std::shared_ptr<Goal> GoalPtr;
|
||||
typedef std::weak_ptr<Goal> WeakGoalPtr;
|
||||
|
||||
struct CompareGoalPtrs {
|
||||
bool operator() (const GoalPtr & a, const GoalPtr & b) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set of goals.
|
||||
*/
|
||||
typedef std::set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef std::set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
|
||||
/**
|
||||
* A map of paths to goals (and the other way around).
|
||||
*/
|
||||
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||
typedef std::set<GoalPtr> Goals;
|
||||
|
||||
/**
|
||||
* Used as a hint to the worker on how to schedule a particular goal. For example,
|
||||
|
@ -67,17 +60,6 @@ struct Goal
|
|||
*/
|
||||
const bool isDependency;
|
||||
|
||||
/**
|
||||
* Goals that this goal is waiting for.
|
||||
*/
|
||||
Goals waitees;
|
||||
|
||||
/**
|
||||
* Goals waiting for this one to finish. Must use weak pointers
|
||||
* here to prevent cycles.
|
||||
*/
|
||||
WeakGoals waiters;
|
||||
|
||||
/**
|
||||
* Number of goals we are/were waiting for that have failed.
|
||||
*/
|
||||
|
@ -100,57 +82,40 @@ struct Goal
|
|||
*/
|
||||
std::string name;
|
||||
|
||||
/**
|
||||
* Whether the goal is finished.
|
||||
*/
|
||||
std::optional<ExitCode> exitCode;
|
||||
|
||||
/**
|
||||
* Build result.
|
||||
*/
|
||||
BuildResult buildResult;
|
||||
protected:
|
||||
AsyncSemaphore::Token slotToken;
|
||||
|
||||
public:
|
||||
|
||||
struct [[nodiscard]] StillAlive {};
|
||||
struct [[nodiscard]] WaitForSlot {};
|
||||
struct [[nodiscard]] WaitForAWhile {};
|
||||
struct [[nodiscard]] ContinueImmediately {};
|
||||
struct [[nodiscard]] WaitForGoals {
|
||||
Goals goals;
|
||||
};
|
||||
struct [[nodiscard]] WaitForWorld {
|
||||
std::set<int> fds;
|
||||
bool inBuildSlot;
|
||||
};
|
||||
struct [[nodiscard]] Finished {
|
||||
struct [[nodiscard]] WorkResult {
|
||||
ExitCode exitCode;
|
||||
BuildResult result;
|
||||
std::shared_ptr<Error> ex;
|
||||
BuildResult result = {};
|
||||
std::shared_ptr<Error> ex = {};
|
||||
bool permanentFailure = false;
|
||||
bool timedOut = false;
|
||||
bool hashMismatch = false;
|
||||
bool checkMismatch = false;
|
||||
/// Store path this goal relates to. Will be set to drvPath for
|
||||
/// derivations, or the substituted store path for substitions.
|
||||
std::optional<StorePath> storePath = {};
|
||||
};
|
||||
|
||||
struct [[nodiscard]] WorkResult : std::variant<
|
||||
StillAlive,
|
||||
WaitForSlot,
|
||||
WaitForAWhile,
|
||||
ContinueImmediately,
|
||||
WaitForGoals,
|
||||
WaitForWorld,
|
||||
Finished>
|
||||
protected:
|
||||
kj::Promise<void> waitForAWhile();
|
||||
kj::Promise<Result<void>>
|
||||
waitForGoals(kj::Array<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies) noexcept;
|
||||
|
||||
template<std::derived_from<Goal>... G>
|
||||
kj::Promise<Result<void>>
|
||||
waitForGoals(std::pair<std::shared_ptr<G>, kj::Promise<Result<WorkResult>>>... goals) noexcept
|
||||
{
|
||||
WorkResult() = delete;
|
||||
using variant::variant;
|
||||
};
|
||||
return waitForGoals(
|
||||
kj::arrOf<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>>(std::move(goals)...)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exception containing an error message, if any.
|
||||
*/
|
||||
std::shared_ptr<Error> ex;
|
||||
virtual kj::Promise<Result<WorkResult>> workImpl() noexcept = 0;
|
||||
|
||||
public:
|
||||
explicit Goal(Worker & worker, bool isDependency)
|
||||
: worker(worker)
|
||||
, isDependency(isDependency)
|
||||
|
@ -161,24 +126,10 @@ public:
|
|||
trace("goal destroyed");
|
||||
}
|
||||
|
||||
virtual WorkResult work(bool inBuildSlot) = 0;
|
||||
kj::Promise<Result<WorkResult>> work() noexcept;
|
||||
|
||||
virtual void waiteeDone(GoalPtr waitee) { }
|
||||
|
||||
virtual WorkResult handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
abort();
|
||||
}
|
||||
|
||||
virtual void handleEOF(int fd)
|
||||
{
|
||||
}
|
||||
|
||||
virtual bool respectsTimeouts()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
void trace(std::string_view s);
|
||||
|
||||
std::string getName() const
|
||||
|
@ -186,15 +137,6 @@ public:
|
|||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback in case of a timeout. It should wake up its waiters,
|
||||
* get rid of any running child processes that are being monitored
|
||||
* by the worker (important!), etc.
|
||||
*/
|
||||
virtual Finished timedOut(Error && ex) = 0;
|
||||
|
||||
virtual std::string key() = 0;
|
||||
|
||||
virtual void cleanup() { }
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "child.hh"
|
||||
#include "error.hh"
|
||||
#include "file-system.hh"
|
||||
#include "globals.hh"
|
||||
#include "hook-instance.hh"
|
||||
|
@ -44,7 +45,7 @@ HookInstance::HookInstance()
|
|||
if (dup2(fromHook_.writeSide.get(), STDERR_FILENO) == -1)
|
||||
throw SysError("cannot pipe standard error into log file");
|
||||
|
||||
commonChildInit();
|
||||
commonExecveingChildInit();
|
||||
|
||||
if (chdir("/") == -1) throw SysError("changing into /");
|
||||
|
||||
|
@ -73,7 +74,7 @@ HookInstance::HookInstance()
|
|||
|
||||
sink = FdSink(toHook.get());
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
globalConfig.getSettings(settings);
|
||||
globalConfig.getSettings(settings, true);
|
||||
for (auto & setting : settings)
|
||||
sink << 1 << setting.first << setting.second.value;
|
||||
sink << 0;
|
||||
|
@ -86,7 +87,7 @@ HookInstance::~HookInstance()
|
|||
toHook.reset();
|
||||
if (pid) pid.kill();
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
ignoreExceptionInDestructor();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "local-derivation-goal.hh"
|
||||
#include "error.hh"
|
||||
#include "indirect-root-store.hh"
|
||||
#include "machines.hh"
|
||||
#include "store-api.hh"
|
||||
|
@ -98,9 +99,9 @@ LocalDerivationGoal::~LocalDerivationGoal() noexcept(false)
|
|||
{
|
||||
/* Careful: we should never ever throw an exception from a
|
||||
destructor. */
|
||||
try { deleteTmpDir(false); } catch (...) { ignoreException(); }
|
||||
try { killChild(); } catch (...) { ignoreException(); }
|
||||
try { stopDaemon(); } catch (...) { ignoreException(); }
|
||||
try { deleteTmpDir(false); } catch (...) { ignoreExceptionInDestructor(); }
|
||||
try { killChild(); } catch (...) { ignoreExceptionInDestructor(); }
|
||||
try { stopDaemon(); } catch (...) { ignoreExceptionInDestructor(); }
|
||||
}
|
||||
|
||||
|
||||
|
@ -121,8 +122,6 @@ LocalStore & LocalDerivationGoal::getLocalStore()
|
|||
void LocalDerivationGoal::killChild()
|
||||
{
|
||||
if (pid) {
|
||||
worker.childTerminated(this);
|
||||
|
||||
/* If we're using a build user, then there is a tricky race
|
||||
condition: if we kill the build user before the child has
|
||||
done its setuid() to the build user uid, then it won't be
|
||||
|
@ -149,17 +148,18 @@ void LocalDerivationGoal::killSandbox(bool getStats)
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> LocalDerivationGoal::tryLocalBuild() noexcept
|
||||
try {
|
||||
retry:
|
||||
#if __APPLE__
|
||||
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
|
||||
#endif
|
||||
|
||||
if (!inBuildSlot) {
|
||||
state = &DerivationGoal::tryToBuild;
|
||||
if (!slotToken.valid()) {
|
||||
outputLocks.unlock();
|
||||
if (0U != settings.maxBuildJobs) {
|
||||
return WaitForSlot{};
|
||||
if (worker.localBuilds.capacity() > 0) {
|
||||
slotToken = co_await worker.localBuilds.acquire();
|
||||
co_return co_await tryToBuild();
|
||||
}
|
||||
if (getMachines().empty()) {
|
||||
throw Error(
|
||||
|
@ -214,7 +214,9 @@ Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
|||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
return WaitForAWhile{};
|
||||
co_await waitForAWhile();
|
||||
// we can loop very often, and `co_return co_await` always allocates a new frame
|
||||
goto retry;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -243,22 +245,29 @@ Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
|||
try {
|
||||
|
||||
/* Okay, we have to build. */
|
||||
auto fds = startBuilder();
|
||||
|
||||
/* This state will be reached when we get EOF on the child's
|
||||
log pipe. */
|
||||
state = &DerivationGoal::buildDone;
|
||||
auto promise = startBuilder();
|
||||
|
||||
started();
|
||||
return WaitForWorld{std::move(fds), true};
|
||||
auto r = co_await promise;
|
||||
if (r.has_value()) {
|
||||
// all good so far
|
||||
} else if (r.has_error()) {
|
||||
co_return r.assume_error();
|
||||
} else {
|
||||
co_return r.assume_exception();
|
||||
}
|
||||
|
||||
} catch (BuildError & e) {
|
||||
outputLocks.unlock();
|
||||
buildUser.reset();
|
||||
auto report = done(BuildResult::InputRejected, {}, std::move(e));
|
||||
report.permanentFailure = true;
|
||||
return report;
|
||||
co_return report;
|
||||
}
|
||||
|
||||
co_return co_await buildDone();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
|
@ -388,7 +397,9 @@ void LocalDerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
|
|||
cleanupPostOutputsRegisteredModeCheck();
|
||||
}
|
||||
|
||||
std::set<int> LocalDerivationGoal::startBuilder()
|
||||
// NOTE this one isn't noexcept because it's called from places that expect
|
||||
// exceptions to signal failure to launch. we should change this some time.
|
||||
kj::Promise<Outcome<void, Goal::WorkResult>> LocalDerivationGoal::startBuilder()
|
||||
{
|
||||
if ((buildUser && buildUser->getUIDCount() != 1)
|
||||
#if __linux__
|
||||
|
@ -777,7 +788,7 @@ std::set<int> LocalDerivationGoal::startBuilder()
|
|||
msgs.push_back(std::move(msg));
|
||||
}
|
||||
|
||||
return {builderOutPTY.get()};
|
||||
return handleChildOutput();
|
||||
}
|
||||
|
||||
|
||||
|
@ -1239,7 +1250,7 @@ void LocalDerivationGoal::startDaemon()
|
|||
NotTrusted, daemon::Recursive);
|
||||
debug("terminated daemon connection");
|
||||
} catch (SysError &) {
|
||||
ignoreException();
|
||||
ignoreExceptionExceptInterrupt();
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1353,19 +1364,26 @@ void LocalDerivationGoal::runChild()
|
|||
|
||||
try { /* child */
|
||||
|
||||
commonChildInit();
|
||||
commonExecveingChildInit();
|
||||
|
||||
setupSyscallFilter();
|
||||
|
||||
bool setUser = true;
|
||||
|
||||
/* Make the contents of netrc available to builtin:fetchurl
|
||||
(which may run under a different uid and/or in a sandbox). */
|
||||
/* Make the contents of netrc and the CA certificate bundle
|
||||
available to builtin:fetchurl (which may run under a
|
||||
different uid and/or in a sandbox). */
|
||||
std::string netrcData;
|
||||
try {
|
||||
if (drv->isBuiltin() && drv->builder == "builtin:fetchurl" && !derivationType->isSandboxed())
|
||||
std::string caFileData;
|
||||
if (drv->isBuiltin() && drv->builder == "builtin:fetchurl" && !derivationType->isSandboxed()) {
|
||||
try {
|
||||
netrcData = readFile(settings.netrcFile);
|
||||
} catch (SysError &) { }
|
||||
} catch (SysError &) { }
|
||||
|
||||
try {
|
||||
caFileData = readFile(settings.caFile);
|
||||
} catch (SysError &) { }
|
||||
}
|
||||
|
||||
#if __linux__
|
||||
if (useChroot) {
|
||||
|
@ -1800,7 +1818,7 @@ void LocalDerivationGoal::runChild()
|
|||
e.second = rewriteStrings(e.second, inputRewrites);
|
||||
|
||||
if (drv->builder == "builtin:fetchurl")
|
||||
builtinFetchurl(drv2, netrcData);
|
||||
builtinFetchurl(drv2, netrcData, caFileData);
|
||||
else if (drv->builder == "builtin:buildenv")
|
||||
builtinBuildenv(drv2);
|
||||
else if (drv->builder == "builtin:unpack-channel")
|
||||
|
@ -2003,6 +2021,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
|
||||
OutputPathMap finalOutputs;
|
||||
|
||||
std::vector<std::pair<Path, std::optional<Path>>> nondeterministic;
|
||||
|
||||
for (auto & outputName : sortedOutputNames) {
|
||||
auto output = get(drv->outputs, outputName);
|
||||
auto scratchPath = get(scratchOutputs, outputName);
|
||||
|
@ -2289,20 +2309,20 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
buildUser ? buildUser->getGID() : getgid(),
|
||||
finalDestPath, dst, worker.store.printStorePath(drvPath), tmpDir);
|
||||
|
||||
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs from '%s'",
|
||||
worker.store.printStorePath(drvPath), worker.store.toRealPath(finalDestPath), dst);
|
||||
nondeterministic.push_back(std::make_pair(worker.store.toRealPath(finalDestPath), dst));
|
||||
} else
|
||||
throw NotDeterministic("derivation '%s' may not be deterministic: output '%s' differs",
|
||||
worker.store.printStorePath(drvPath), worker.store.toRealPath(finalDestPath));
|
||||
nondeterministic.push_back(std::make_pair(worker.store.toRealPath(finalDestPath), std::nullopt));
|
||||
}
|
||||
|
||||
/* Since we verified the build, it's now ultimately trusted. */
|
||||
if (!oldInfo.ultimate) {
|
||||
else if (!oldInfo.ultimate) {
|
||||
oldInfo.ultimate = true;
|
||||
localStore.signPathInfo(oldInfo);
|
||||
localStore.registerValidPaths({{oldInfo.path, oldInfo}});
|
||||
}
|
||||
|
||||
/* Don't register anything, since we already have the
|
||||
previous versions which we're comparing. */
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -2333,6 +2353,18 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
if (buildMode == bmCheck) {
|
||||
if (!nondeterministic.empty()) {
|
||||
std::ostringstream msg;
|
||||
msg << HintFmt("derivation '%s' may not be deterministic: outputs differ", drvPath.to_string());
|
||||
for (auto [oldPath, newPath]: nondeterministic) {
|
||||
if (newPath) {
|
||||
msg << HintFmt("\n output differs: output '%s' differs from '%s'", oldPath.c_str(), *newPath);
|
||||
} else {
|
||||
msg << HintFmt("\n output '%s' differs", oldPath.c_str());
|
||||
}
|
||||
}
|
||||
throw NotDeterministic(msg.str());
|
||||
}
|
||||
/* In case of fixed-output derivations, if there are
|
||||
mismatches on `--check` an error must be thrown as this is
|
||||
also a source for non-determinism. */
|
||||
|
|
|
@ -182,7 +182,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
* Create a LocalDerivationGoal without an on-disk .drv file,
|
||||
* possibly a platform-specific subclass
|
||||
*/
|
||||
static std::shared_ptr<LocalDerivationGoal> makeLocalDerivationGoal(
|
||||
static std::unique_ptr<LocalDerivationGoal> makeLocalDerivationGoal(
|
||||
const StorePath & drvPath,
|
||||
const OutputsSpec & wantedOutputs,
|
||||
Worker & worker,
|
||||
|
@ -194,7 +194,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
* Create a LocalDerivationGoal for an on-disk .drv file,
|
||||
* possibly a platform-specific subclass
|
||||
*/
|
||||
static std::shared_ptr<LocalDerivationGoal> makeLocalDerivationGoal(
|
||||
static std::unique_ptr<LocalDerivationGoal> makeLocalDerivationGoal(
|
||||
const StorePath & drvPath,
|
||||
const BasicDerivation & drv,
|
||||
const OutputsSpec & wantedOutputs,
|
||||
|
@ -213,12 +213,12 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
/**
|
||||
* The additional states.
|
||||
*/
|
||||
WorkResult tryLocalBuild(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> tryLocalBuild() noexcept override;
|
||||
|
||||
/**
|
||||
* Start building a derivation.
|
||||
*/
|
||||
std::set<int> startBuilder();
|
||||
kj::Promise<Outcome<void, WorkResult>> startBuilder();
|
||||
|
||||
/**
|
||||
* Fill in the environment for the builder.
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
#include "nar-info.hh"
|
||||
#include "signals.hh"
|
||||
#include "finally.hh"
|
||||
#include <boost/outcome/try.hpp>
|
||||
#include <kj/array.h>
|
||||
#include <kj/vector.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -18,7 +21,6 @@ PathSubstitutionGoal::PathSubstitutionGoal(
|
|||
, repair(repair)
|
||||
, ca(ca)
|
||||
{
|
||||
state = &PathSubstitutionGoal::init;
|
||||
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
|
||||
trace("created");
|
||||
maintainExpectedSubstitutions = worker.expectedSubstitutions.addTemporarily(1);
|
||||
|
@ -31,35 +33,29 @@ PathSubstitutionGoal::~PathSubstitutionGoal()
|
|||
}
|
||||
|
||||
|
||||
Goal::Finished PathSubstitutionGoal::done(
|
||||
Goal::WorkResult PathSubstitutionGoal::done(
|
||||
ExitCode result,
|
||||
BuildResult::Status status,
|
||||
std::optional<std::string> errorMsg)
|
||||
{
|
||||
buildResult.status = status;
|
||||
BuildResult buildResult{.status = status};
|
||||
if (errorMsg) {
|
||||
debug(*errorMsg);
|
||||
buildResult.errorMsg = *errorMsg;
|
||||
}
|
||||
return Finished{result, std::move(buildResult)};
|
||||
return WorkResult{result, std::move(buildResult)};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::work(bool inBuildSlot)
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::init(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::workImpl() noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
worker.store.addTempRoot(storePath);
|
||||
|
||||
/* If the path already exists we're done. */
|
||||
if (!repair && worker.store.isValidPath(storePath)) {
|
||||
return done(ecSuccess, BuildResult::AlreadyValid);
|
||||
co_return done(ecSuccess, BuildResult::AlreadyValid);
|
||||
}
|
||||
|
||||
if (settings.readOnlyMode)
|
||||
|
@ -67,12 +63,16 @@ Goal::WorkResult PathSubstitutionGoal::init(bool inBuildSlot)
|
|||
|
||||
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
|
||||
|
||||
return tryNext(inBuildSlot);
|
||||
BOOST_OUTCOME_CO_TRY(auto result, co_await tryNext());
|
||||
result.storePath = storePath;
|
||||
co_return result;
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryNext() noexcept
|
||||
try {
|
||||
trace("trying next substituter");
|
||||
|
||||
cleanup();
|
||||
|
@ -87,7 +87,7 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
return done(
|
||||
co_return done(
|
||||
substituterFailed ? ecFailed : ecNoSubstituters,
|
||||
BuildResult::NoSubstituters,
|
||||
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)));
|
||||
|
@ -103,26 +103,28 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
if (sub->storeDir == worker.store.storeDir)
|
||||
assert(subPath == storePath);
|
||||
} else if (sub->storeDir != worker.store.storeDir) {
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
}
|
||||
|
||||
try {
|
||||
// FIXME: make async
|
||||
info = sub->queryPathInfo(subPath ? *subPath : storePath);
|
||||
} catch (InvalidPath &) {
|
||||
return tryNext(inBuildSlot);
|
||||
} catch (SubstituterDisabled &) {
|
||||
if (settings.tryFallback) {
|
||||
return tryNext(inBuildSlot);
|
||||
do {
|
||||
try {
|
||||
// FIXME: make async
|
||||
info = sub->queryPathInfo(subPath ? *subPath : storePath);
|
||||
break;
|
||||
} catch (InvalidPath &) {
|
||||
} catch (SubstituterDisabled &) {
|
||||
if (!settings.tryFallback) {
|
||||
throw;
|
||||
}
|
||||
} catch (Error & e) {
|
||||
if (settings.tryFallback) {
|
||||
logError(e.info());
|
||||
} else {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
throw;
|
||||
} catch (Error & e) {
|
||||
if (settings.tryFallback) {
|
||||
logError(e.info());
|
||||
return tryNext(inBuildSlot);
|
||||
}
|
||||
throw;
|
||||
}
|
||||
co_return co_await tryNext();
|
||||
} while (false);
|
||||
|
||||
if (info->path != storePath) {
|
||||
if (info->isContentAddressed(*sub) && info->references.empty()) {
|
||||
|
@ -132,7 +134,7 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
} else {
|
||||
printError("asked '%s' for '%s' but got '%s'",
|
||||
sub->getUri(), worker.store.printStorePath(storePath), sub->printStorePath(info->path));
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -153,65 +155,67 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
{
|
||||
warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
|
||||
worker.store.printStorePath(storePath), sub->getUri());
|
||||
return tryNext(inBuildSlot);
|
||||
co_return co_await tryNext();
|
||||
}
|
||||
|
||||
/* To maintain the closure invariant, we first have to realise the
|
||||
paths referenced by this one. */
|
||||
WaitForGoals result;
|
||||
kj::Vector<std::pair<GoalPtr, kj::Promise<Result<WorkResult>>>> dependencies;
|
||||
for (auto & i : info->references)
|
||||
if (i != storePath) /* ignore self-references */
|
||||
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i));
|
||||
dependencies.add(worker.goalFactory().makePathSubstitutionGoal(i));
|
||||
|
||||
if (result.goals.empty()) {/* to prevent hang (no wake-up event) */
|
||||
return referencesValid(inBuildSlot);
|
||||
} else {
|
||||
state = &PathSubstitutionGoal::referencesValid;
|
||||
return result;
|
||||
if (!dependencies.empty()) {/* to prevent hang (no wake-up event) */
|
||||
(co_await waitForGoals(dependencies.releaseAsArray())).value();
|
||||
}
|
||||
co_return co_await referencesValid();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::referencesValid(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::referencesValid() noexcept
|
||||
try {
|
||||
trace("all references realised");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
return done(
|
||||
return {done(
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
BuildResult::DependencyFailed,
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)));
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)))};
|
||||
}
|
||||
|
||||
for (auto & i : info->references)
|
||||
if (i != storePath) /* ignore self-references */
|
||||
assert(worker.store.isValidPath(i));
|
||||
|
||||
state = &PathSubstitutionGoal::tryToRun;
|
||||
return tryToRun(inBuildSlot);
|
||||
return tryToRun();
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::tryToRun(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryToRun() noexcept
|
||||
try {
|
||||
trace("trying to run");
|
||||
|
||||
if (!inBuildSlot) {
|
||||
return WaitForSlot{};
|
||||
if (!slotToken.valid()) {
|
||||
slotToken = co_await worker.substitutions.acquire();
|
||||
}
|
||||
|
||||
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
|
||||
|
||||
outPipe.create();
|
||||
auto pipe = kj::newPromiseAndCrossThreadFulfiller<void>();
|
||||
outPipe = kj::mv(pipe.fulfiller);
|
||||
|
||||
thr = std::async(std::launch::async, [this]() {
|
||||
/* Wake up the worker loop when we're done. */
|
||||
Finally updateStats([this]() { outPipe->fulfill(); });
|
||||
|
||||
auto & fetchPath = subPath ? *subPath : storePath;
|
||||
try {
|
||||
ReceiveInterrupts receiveInterrupts;
|
||||
|
||||
/* Wake up the worker loop when we're done. */
|
||||
Finally updateStats([this]() { outPipe.writeSide.close(); });
|
||||
|
||||
Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
|
||||
PushActivity pact(act.id);
|
||||
|
||||
|
@ -227,37 +231,39 @@ Goal::WorkResult PathSubstitutionGoal::tryToRun(bool inBuildSlot)
|
|||
}
|
||||
});
|
||||
|
||||
state = &PathSubstitutionGoal::finished;
|
||||
return WaitForWorld{{outPipe.readSide.get()}, true};
|
||||
co_await pipe.promise;
|
||||
co_return co_await finished();
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::finished(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::finished() noexcept
|
||||
try {
|
||||
trace("substitute finished");
|
||||
|
||||
worker.childTerminated(this);
|
||||
|
||||
try {
|
||||
thr.get();
|
||||
} catch (std::exception & e) {
|
||||
printError(e.what());
|
||||
|
||||
/* Cause the parent build to fail unless --fallback is given,
|
||||
or the substitute has disappeared. The latter case behaves
|
||||
the same as the substitute never having existed in the
|
||||
first place. */
|
||||
do {
|
||||
try {
|
||||
throw;
|
||||
} catch (SubstituteGone &) {
|
||||
} catch (...) {
|
||||
substituterFailed = true;
|
||||
}
|
||||
slotToken = {};
|
||||
thr.get();
|
||||
break;
|
||||
} catch (std::exception & e) {
|
||||
printError(e.what());
|
||||
|
||||
/* Cause the parent build to fail unless --fallback is given,
|
||||
or the substitute has disappeared. The latter case behaves
|
||||
the same as the substitute never having existed in the
|
||||
first place. */
|
||||
try {
|
||||
throw;
|
||||
} catch (SubstituteGone &) {
|
||||
} catch (...) {
|
||||
substituterFailed = true;
|
||||
}
|
||||
}
|
||||
/* Try the next substitute. */
|
||||
state = &PathSubstitutionGoal::tryNext;
|
||||
return tryNext(inBuildSlot);
|
||||
}
|
||||
co_return co_await tryNext();
|
||||
} while (false);
|
||||
|
||||
worker.markContentsGood(storePath);
|
||||
|
||||
|
@ -274,13 +280,9 @@ Goal::WorkResult PathSubstitutionGoal::finished(bool inBuildSlot)
|
|||
worker.doneNarSize += maintainExpectedNar.delta();
|
||||
maintainExpectedNar.reset();
|
||||
|
||||
return done(ecSuccess, BuildResult::Substituted);
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
return StillAlive{};
|
||||
co_return done(ecSuccess, BuildResult::Substituted);
|
||||
} catch (...) {
|
||||
co_return result::failure(std::current_exception());
|
||||
}
|
||||
|
||||
|
||||
|
@ -290,12 +292,9 @@ void PathSubstitutionGoal::cleanup()
|
|||
if (thr.valid()) {
|
||||
// FIXME: signal worker thread to quit.
|
||||
thr.get();
|
||||
worker.childTerminated(this);
|
||||
}
|
||||
|
||||
outPipe.close();
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
ignoreExceptionInDestructor();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ struct PathSubstitutionGoal : public Goal
|
|||
/**
|
||||
* Pipe for the substituter's standard output.
|
||||
*/
|
||||
Pipe outPipe;
|
||||
kj::Own<kj::CrossThreadPromiseFulfiller<void>> outPipe;
|
||||
|
||||
/**
|
||||
* The substituter thread.
|
||||
|
@ -67,15 +67,12 @@ struct PathSubstitutionGoal : public Goal
|
|||
NotifyingCounter<uint64_t>::Bump maintainExpectedSubstitutions,
|
||||
maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload;
|
||||
|
||||
typedef WorkResult (PathSubstitutionGoal::*GoalState)(bool inBuildSlot);
|
||||
GoalState state;
|
||||
|
||||
/**
|
||||
* Content address for recomputing store path
|
||||
*/
|
||||
std::optional<ContentAddress> ca;
|
||||
|
||||
Finished done(
|
||||
WorkResult done(
|
||||
ExitCode result,
|
||||
BuildResult::Status status,
|
||||
std::optional<std::string> errorMsg = {});
|
||||
|
@ -90,32 +87,15 @@ public:
|
|||
);
|
||||
~PathSubstitutionGoal();
|
||||
|
||||
Finished timedOut(Error && ex) override { abort(); };
|
||||
|
||||
/**
|
||||
* We prepend "a$" to the key name to ensure substitution goals
|
||||
* happen before derivation goals.
|
||||
*/
|
||||
std::string key() override
|
||||
{
|
||||
return "a$" + std::string(storePath.name()) + "$" + worker.store.printStorePath(storePath);
|
||||
}
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> workImpl() noexcept override;
|
||||
|
||||
/**
|
||||
* The states.
|
||||
*/
|
||||
WorkResult init(bool inBuildSlot);
|
||||
WorkResult tryNext(bool inBuildSlot);
|
||||
WorkResult referencesValid(bool inBuildSlot);
|
||||
WorkResult tryToRun(bool inBuildSlot);
|
||||
WorkResult finished(bool inBuildSlot);
|
||||
|
||||
/**
|
||||
* Callback used by the worker to write to the log.
|
||||
*/
|
||||
WorkResult handleChildOutput(int fd, std::string_view data) override;
|
||||
kj::Promise<Result<WorkResult>> tryNext() noexcept;
|
||||
kj::Promise<Result<WorkResult>> referencesValid() noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryToRun() noexcept;
|
||||
kj::Promise<Result<WorkResult>> finished() noexcept;
|
||||
|
||||
/* Called by destructor, can't be overridden */
|
||||
void cleanup() override final;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue