Compare commits

..

2 commits

Author SHA1 Message Date
Yureka ed51a172c6 libutil: fix conditional for close_range availability
This check is wrong and would cause the close_range() function being called even when it's not available

Change-Id: Ide65b36830e705fe772196c37349873353622761
(cherry picked from commit df49d37b71)
2024-08-20 09:09:57 +02:00
Artemis Tosini ca2b514e20 meson: Don't use target_machine
The target_machine variable is meant for the target
of cross compilers. We are not a cross compiler, so
instead reuse our host_machine based checks.

Fixes Linux→FreeBSD cross, since Meson can't figure
out `target_machine.kernel()` in that case.

Fixes: lix-project/lix#469

Change-Id: Ia46a64c8d507c3b08987a1de1eda171ff5e50df4
2024-08-16 23:56:57 -07:00
266 changed files with 1727 additions and 3913 deletions

View file

@ -2,7 +2,7 @@
name: Missing or incorrect documentation
about: Help us improve the reference manual
title: ''
labels: docs
labels: documentation
assignees: ''
---
@ -19,10 +19,10 @@ assignees: ''
<!-- make sure this issue is not redundant or obsolete -->
- [ ] checked [latest Lix manual] or its [source code]
- [ ] checked [latest Lix manual] \([source]\)
- [ ] checked [documentation issues] and [recent documentation changes] for possible duplicates
[latest Lix manual]: https://docs.lix.systems/manual/lix/nightly
[source code]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
[latest Nix manual]: https://docs.lix.systems/manual/lix/nightly
[source]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
[documentation issues]: https://git.lix.systems/lix-project/lix/issues?labels=151&state=all
[recent documentation changes]: https://gerrit.lix.systems/q/p:lix+path:%22%5Edoc/manual/.*%22

7
.gitignore vendored
View file

@ -9,10 +9,6 @@ GTAGS
# ccls
/.ccls-cache
# auto-generated compilation database
compile_commands.json
rust-project.json
result
result-*
@ -33,6 +29,3 @@ buildtime.bin
/.pre-commit-config.yaml
/.nocontribmsg
/release
# Rust build files when using Cargo (not actually supported for building but it spews the files anyway)
/target/

View file

@ -1,6 +0,0 @@
[workspace]
resolver = "2"
members = ["src/lix-doc"]
[workspace.package]
edition = "2021"

View file

@ -1,18 +0,0 @@
# Usually "experimental" or "deprecated"
_kind:
# "xp" or "dp"
kindShort:
with builtins;
with import ./utils.nix;
let
showFeature =
name: doc:
squash ''
## [`${name}`]{#${kindShort}-feature-${name}}
${doc}
'';
in
xps: (concatStringsSep "\n" (attrValues (mapAttrs showFeature xps)))

View file

@ -1,14 +1,9 @@
# Usually "experimental" or "deprecated"
kind:
# "xp" or "dp"
kindShort:
with builtins;
with import ./utils.nix;
let
showExperimentalFeature = name: doc: ''
- [`${name}`](@docroot@/contributing/${kind}-features.md#${kindShort}-feature-${name})
- [`${name}`](@docroot@/contributing/experimental-features.md#xp-feature-${name})
'';
in
xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))

View file

@ -0,0 +1,13 @@
with builtins;
with import ./utils.nix;
let
showExperimentalFeature =
name: doc:
squash ''
## [`${name}`]{#xp-feature-${name}}
${doc}
'';
in
xps: (concatStringsSep "\n" (attrValues (mapAttrs showExperimentalFeature xps)))

View file

@ -20,8 +20,6 @@ conf_file_json = custom_target(
capture : true,
output : 'conf-file.json',
env : nix_env_for_docs,
# FIXME: put the actual lib targets in here? meson have introspection challenge 2024 though.
build_always_stale : true,
)
nix_conf_file_md_body = custom_target(
@ -52,8 +50,6 @@ nix_exp_features_json = custom_target(
command : [ nix, '__dump-xp-features' ],
capture : true,
output : 'xp-features.json',
# FIXME: put the actual lib targets in here? meson have introspection challenge 2024 though.
build_always_stale : true,
)
language_json = custom_target(
@ -61,8 +57,6 @@ language_json = custom_target(
output : 'language.json',
capture : true,
env : nix_env_for_docs,
# FIXME: put the actual lib targets in here? meson have introspection challenge 2024 though.
build_always_stale : true,
)
nix3_cli_json = custom_target(
@ -70,8 +64,6 @@ nix3_cli_json = custom_target(
capture : true,
output : 'nix.json',
env : nix_env_for_docs,
# FIXME: put the actual lib targets in here? meson have introspection challenge 2024 though.
build_always_stale : true,
)
generate_manual_deps = files(
@ -80,9 +72,9 @@ generate_manual_deps = files(
# Generates builtins.md and builtin-constants.md.
subdir('src/language')
# Generates new-cli pages, {experimental,deprecated}-features-shortlist.md, and conf-file.md.
# Generates new-cli pages, experimental-features-shortlist.md, and conf-file.md.
subdir('src/command-ref')
# Generates {experimental,deprecated}-feature-descriptions.md.
# Generates experimental-feature-descriptions.md.
subdir('src/contributing')
# Generates rl-next-generated.md.
subdir('src/release-notes')
@ -114,8 +106,6 @@ manual = custom_target(
nix3_cli_files,
experimental_features_shortlist_md,
experimental_feature_descriptions_md,
deprecated_features_shortlist_md,
deprecated_feature_descriptions_md,
conf_file_md,
builtins_md,
builtin_constants_md,

View file

@ -1,21 +0,0 @@
---
synopsis: "Build failures caused by `allowSubstitutes = false` while being the wrong system now produce a decent error"
issues: [fj#484]
cls: [1841]
category: Fixes
credits: jade
---
Nix allows derivations to set `allowSubstitutes = false` in order to force them to be built locally without querying substituters for them.
This is useful for derivations that are very fast to build (especially if they produce large output).
However, this can shoot you in the foot if the derivation *has* to be substituted such as if the derivation is for another architecture, which is what `--always-allow-substitutes` is for.
Perhaps such derivations that are known to be impossible to build locally should ignore `allowSubstitutes` (irrespective of remote builders) in the future, but this at least reports the failure and solution directly.
```
$ nix build -f fail.nix
error: a 'unicornsandrainbows-linux' with features {} is required to build '/nix/store/...-meow.drv', but I am a 'x86_64-linux' with features {...}
Hint: the failing derivation has allowSubstitutes set to false, forcing it to be built rather than substituted.
Passing --always-allow-substitutes to force substitution may resolve this failure if the path is available in a substituter.
```

View file

@ -1,10 +0,0 @@
---
synopsis: "`Alt+Left` and `Alt+Right` go back/forwards by words in `nix repl`"
issues: [fj#501]
cls: [1883]
category: Fixes
credits: 9999years
---
`nix repl` now recognizes `Alt+Left` and `Alt+Right` for navigating by words
when entering input in `nix repl` on more terminals/platforms.

View file

@ -1,17 +0,0 @@
---
synopsis: Deprecated language features
issues: [fj#437]
cls: [1785, 1736, 1735, 1744]
category: Breaking Changes
credits: [piegames, horrors]
---
A system for deprecation (and then the planned removal) of undesired language features has been put into place.
It is controlled via feature flags much like experimental features, except that the deprecations are enabled default,
and can be disabled via the flags for backwards compatibility (opt-out with `--extra-deprecated-features` or the Nix configuration file).
- `url-literals`: **URL literals** have long been obsolete and discouraged of use, and now they are officially deprecated.
This means that all URLs must be properly put within quotes like all other strings.
- `rec-set-overrides`: **__overrides** is an old arcane syntax which has not been in use for more than a decade.
It is soft-deprecated with a warning only, with the plan to turn that into an error in a future release.
- `ancient-let`: **The old `let` syntax** (`let { body = …; … }`) is soft-deprecated with a warning as well. Use the regular `let … in` instead.

View file

@ -1,10 +0,0 @@
---
synopsis: HTTP proxy environment variables are now respected for S3 binary cache stores
issues: [fj#433]
cls: [1788]
category: Fixes
credits: jade
---
Due to "legacy reasons" (according to the AWS C++ SDK docs), the AWS SDK ignores system proxy configuration by default.
We turned it back on.

View file

@ -1,17 +0,0 @@
---
synopsis: readline support removed
cls: [1885]
category: Packaging
credits: [9999years]
---
Support for building Lix with [`readline`][readline] instead of
[`editline`][editline] has been removed. `readline` support hasn't worked for a
long time (attempting to use it would lead to build errors) and would make Lix
subject to the GPL if it did work. In the future, we're hoping to replace
`editline` with [`rustyline`][rustyline] for improved ergonomics in the `nix
repl`.
[readline]: https://en.wikipedia.org/wiki/GNU_Readline
[editline]: https://github.com/troglobit/editline
[rustyline]: https://github.com/kkawakam/rustyline

View file

@ -1,30 +0,0 @@
---
synopsis: Relative and tilde paths in configuration
issues: [fj#482]
cls: [1851, 1863, 1864]
category: Features
credits: [9999years]
---
[Configuration settings](@docroot@/command-ref/conf-file.md) can now refer to
files with paths relative to the file they're written in or relative to your
home directory (with `~/`).
This makes settings like
[`repl-overlays`](@docroot@/command-ref/conf-file.md#conf-repl-overlays) and
[`secret-key-files`](@docroot@/command-ref/conf-file.md#conf-repl-overlays)
much easier to set, especially if you'd like to refer to files in an existing
dotfiles repo cloned into your home directory.
If you put `repl-overlays = repl.nix` in your `~/.config/nix/nix.conf`, it'll
load `~/.config/nix/repl.nix`. Similarly, you can set `repl-overlays =
~/.dotfiles/repl.nix` to load a file relative to your home directory.
Configuration files can also
[`include`](@docroot@/command-ref/conf-file.md#file-format) paths relative to
your home directory.
Only user configuration files (like `$XDG_CONFIG_HOME/nix/nix.conf` or the
files listed in `$NIX_USER_CONF_FILES`) can use tilde paths relative to your
home directory. Configuration listed in the `$NIX_CONFIG` environment variable
may not use relative paths.

View file

@ -192,7 +192,6 @@
- [Hacking](contributing/hacking.md)
- [Testing](contributing/testing.md)
- [Experimental Features](contributing/experimental-features.md)
- [Deprecated Features](contributing/deprecated-features.md)
- [CLI guideline](contributing/cli-guideline.md)
- [C++ style guide](contributing/cxx.md)
- [Release Notes](release-notes/release-notes.md)

View file

@ -1,37 +1,23 @@
xp_features_json = custom_target(
command : [nix, '__dump-xp-features'],
capture : true,
output : 'xp-features.json',
)
experimental_features_shortlist_md = custom_target(
command : nix_eval_for_docs + [
'--expr',
'import @INPUT0@ "experimental" "xp" (builtins.fromJSON (builtins.readFile @INPUT1@))',
'import @INPUT0@ (builtins.fromJSON (builtins.readFile @INPUT1@))',
],
input : [
'../../generate-features-shortlist.nix',
nix_exp_features_json,
'../../generate-xp-features-shortlist.nix',
xp_features_json,
],
capture : true,
output : 'experimental-features-shortlist.md',
env : nix_env_for_docs,
)
dp_features_json = custom_target(
command : [nix, '__dump-dp-features'],
capture : true,
output : 'dp-features.json',
)
deprecated_features_shortlist_md = custom_target(
command : nix_eval_for_docs + [
'--expr',
'import @INPUT0@ "deprecated" "dp" (builtins.fromJSON (builtins.readFile @INPUT1@))',
],
input : [
'../../generate-features-shortlist.nix',
dp_features_json,
],
capture : true,
output : 'deprecated-features-shortlist.md',
env : nix_env_for_docs,
)
# Intermediate step for manpage generation.
# This splorks the output of generate-manpage.nix as JSON,
# which gets written as a directory tree below.
@ -74,7 +60,6 @@ conf_file_md = custom_target(
'../../utils.nix',
conf_file_json,
experimental_features_shortlist_md,
deprecated_features_shortlist_md,
],
output : 'conf-file.md',
env : nix_env_for_docs,

View file

@ -1,37 +0,0 @@
This section describes the notion of *deprecated features*, and how it fits into the big picture of the development of Lix.
# What are deprecated features?
Deprecated features are disabled by default, with the intent to eventually remove them.
Users must explicitly enable them to keep using them, by toggling the associated [deprecated feature flags](@docroot@/command-ref/conf-file.md#conf-deprecated-features).
This allows backwards compatibility and a graceful transition away from undesired features.
# Which features can be deprecated?
Undesired features should be soft-deprecated by yielding a warning when used for a significant amount of time before the can be deprecated.
Legacy obsolete feature with little to no usage may go through this process faster.
Deprecated features should have a migration path to a preferred alternative.
# Lifecycle of a deprecated feature
This description is not normative, but a feature removal may roughly happen like this:
1. Add a warning when the feature is being used.
2. Disable the feature by default, putting it behind a deprecated feature flag.
- If disabling the feature started out as an opt-in experimental feature, turn that experimental flag into a no-op or remove it entirely.
For example, `--extra-experimental-features=no-url-literals` becomes `--extra-deprecated-features=url-literals`.
3. Decide on a time frame for how long that feature will still be supported for backwards compatibility, and clearly communicate that in the error messages.
- Sometimes, automatic migration to alternatives is possible, and such should be provided if possible
- At least one NixOS release cycle should be the minimum
4. Finally remove the feature entirely, only keeping the error message for those still using it.
# Relation to language versioning
Obviously, removing anything breaks backwards compatibility.
In an ideal world, we'd have SemVer controls over the language and its features, cleanly allowing us to make breaking changes.
See https://wiki.lix.systems/books/lix-contributors/page/language-versioning and [RFC 137](https://github.com/nixos/rfcs/pull/137) for efforts on that.
However, we do not live in such an ideal world, and currently this goal is so far away, that "just disable it with some back-compat for a couple of years" is the most realistic solution, especially for comparatively minor changes.
# Currently available deprecated features
{{#include @generated@/contributing/deprecated-feature-descriptions.md}}

View file

@ -4,25 +4,12 @@
experimental_feature_descriptions_md = custom_target(
command : nix_eval_for_docs + [
'--expr',
'import @INPUT0@ "experimental" "xp" (builtins.fromJSON (builtins.readFile @INPUT1@))',
'import @INPUT0@ (builtins.fromJSON (builtins.readFile @INPUT1@))',
],
input : [
'../../generate-features.nix',
nix_exp_features_json,
'../../generate-xp-features.nix',
xp_features_json,
],
capture : true,
output : 'experimental-feature-descriptions.md',
)
deprecated_feature_descriptions_md = custom_target(
command : nix_eval_for_docs + [
'--expr',
'import @INPUT0@ "deprecated" "dp" (builtins.fromJSON (builtins.readFile @INPUT1@))',
],
input : [
'../../generate-features.nix',
dp_features_json,
],
capture : true,
output : 'deprecated-feature-descriptions.md',
)

View file

@ -36,10 +36,7 @@ All users of the Lix daemon may do the following to bring things into the Nix st
- Input-addressed, so they are run in the sandbox with no network access, with the following exceptions:
- The (poorly named, since it is not *just* about chroot) property `__noChroot` is set on the derivation and `sandbox` is set to `relaxed`.
- On macOS, the derivation property `__darwinAllowLocalNetworking` allows network access to localhost from input-addressed derivations regardless of the `sandbox` setting value.
This property exists with such semantics because macOS has no network namespace equivalent to isolate individual processes' localhost networking.
- On macOS, the derivation property `__sandboxProfile` accepts extra sandbox profile S-expressions, allowing derivations to bypass arbitrary parts of the sandbox without altogether disabling it.
This is only permitted when `sandbox` is set to `relaxed`.
- On macOS, the derivation property `__darwinAllowLocalNetworking` allows network access to localhost from input-addressed derivations regardless of the `sandbox` setting value. This property exists with such semantics because macOS has no network namespace equivalent to isolate individual processes' localhost networking.
- Output-addressed, so they are run with network access but their result must match an expected hash.
Trusted users may set any setting, including `sandbox = false`, so the sandbox state can be different at runtime from what is described in `nix.conf` for builds invoked with such settings.

View file

@ -77,6 +77,12 @@
}
```
Finally, as a convenience, *URIs* as defined in appendix B of
[RFC 2396](http://www.ietf.org/rfc/rfc2396.txt) can be written *as
is*, without quotes. For instance, the string
`"http://example.org/foo.tar.bz2"` can also be written as
`http://example.org/foo.tar.bz2`.
- <a id="type-number" href="#type-number">Number</a>
Numbers, which can be *integers* (like `123`) or *floating point*

View file

@ -99,10 +99,9 @@
];
stdenvs = [
# see assertion in package.nix why these two are disabled
# "stdenv"
# "gccStdenv"
"gccStdenv"
"clangStdenv"
"stdenv"
"libcxxStdenv"
"ccacheStdenv"
];
@ -122,11 +121,7 @@
name = "${stdenvName}Packages";
value = f stdenvName;
}) stdenvs
)
// {
# TODO delete this and reënable gcc stdenvs once gcc compiles kj coros correctly
stdenvPackages = f "clangStdenv";
};
);
# Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor = forAllSystems (
@ -217,7 +212,7 @@
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
overlays.default = overlayFor (p: p.clangStdenv);
overlays.default = overlayFor (p: p.stdenv);
hydraJobs = {
# Binary package for various platforms.

View file

@ -2,6 +2,12 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "countme"
version = "3.0.1"
@ -10,15 +16,15 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
[[package]]
name = "dissimilar"
version = "1.0.9"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d"
checksum = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632"
[[package]]
name = "expect-test"
version = "1.5.0"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0"
checksum = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3"
dependencies = [
"dissimilar",
"once_cell",
@ -39,6 +45,15 @@ dependencies = [
"rowan",
]
[[package]]
name = "memoffset"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
dependencies = [
"autocfg",
]
[[package]]
name = "once_cell"
version = "1.19.0"
@ -56,12 +71,13 @@ dependencies = [
[[package]]
name = "rowan"
version = "0.15.16"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a542b0253fa46e632d27a1dc5cf7b930de4df8659dc6e720b647fc72147ae3d"
checksum = "32a58fa8a7ccff2aec4f39cc45bf5f985cec7125ab271cf681c279fd00192b49"
dependencies = [
"countme",
"hashbrown",
"memoffset",
"rustc-hash",
"text-size",
]

View file

@ -8,10 +8,13 @@ license = "BSD-2-Clause OR MIT"
homepage = "https://github.com/lf-/nix-doc"
repository = "https://github.com/lf-/nix-doc"
[lib]
crate_type = ["staticlib"]
[dependencies]
rnix = "0.11.0"
# Necessary because rnix fails to export a critical trait (Rowan's AstNode).
rowan = "0.15.16"
rowan = "0.15.0"
[dev-dependencies]
expect-test = "1.1.0"

View file

@ -30,14 +30,6 @@
# FIXME: This hack should be removed when https://git.lix.systems/lix-project/lix/issues/359
# is fixed.
#
# lix-doc is built with Meson in lix-doc/meson.build, and linked into libcmd in
# src/libcmd/meson.build. When building outside the Nix sandbox, Meson will use the .wrap
# files in subprojects/ to download and extract the dependency crates into subprojects/.
# When building inside the Nix sandbox, Lix's derivation in package.nix uses a
# fixed-output derivation to fetch those crates in advance instead, and then symlinks
# them into subprojects/ with the same names that Meson uses when downloading them
# itself -- perfect for --wrap-mode=nodownload, which mesonConfigurePhase uses.
#
# Unit tests are setup in tests/unit/meson.build, under the test suite "check".
#
# Functional tests are a bit more complicated. Generally they're defined in
@ -46,11 +38,10 @@
# be placed in specific directories' meson.build files to create the right directory tree
# in the build directory.
project('lix', 'cpp', 'rust',
project('lix', 'cpp',
version : run_command('bash', '-c', 'echo -n $(jq -r .version < ./version.json)$VERSION_SUFFIX', check : true).stdout().strip(),
default_options : [
'cpp_std=c++2a',
'rust_std=2021',
# TODO(Qyriad): increase the warning level
'warning_level=1',
'debug=true',
@ -147,17 +138,6 @@ if should_pch
# Unlike basically everything else that takes a file, Meson requires the arguments to
# cpp_pch : to be strings and doesn't accept files(). So absolute path it is.
cpp_pch = [meson.project_source_root() / 'src/pch/precompiled-headers.hh']
# Saves about 400s (30% at time of writing) from compile time on-cpu, mostly
# by removing instantiations of nlohmann from every single damned compilation
# unit.
# There is no equivalent in gcc.
if cxx.get_id() == 'clang'
add_project_arguments(
'-fpch-instantiate-templates',
language : 'cpp',
)
endif
else
cpp_pch = []
endif
@ -167,18 +147,10 @@ endif
# frees one would expect when the objects are unique_ptrs. these problems
# often show up as memory corruption when nesting generators (since we do
# treat generators like owned memory) and will cause inexplicable crashs.
#
# gcc 13 does not compile capnp coroutine code correctly. a newer version
# may fix this. (cf. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102051)
# we allow gcc 13 here anyway because CI uses it for clang-tidy, and when
# the compiler crashes outright if won't produce any bad binaries either.
assert(
cxx.get_id() != 'gcc' or cxx.version().version_compare('>=13'),
'GCC is known to miscompile coroutines, use clang.'
'GCC 12 and earlier are known to miscompile lix coroutines, use GCC 13 or clang.'
)
if cxx.get_id() == 'gcc'
warning('GCC is known to crash while building coroutines, use clang.')
endif
# Translate some historical and Mesony CPU names to Lixy CPU names.
@ -237,7 +209,6 @@ configdata += {
}
boost = dependency('boost', required : true, modules : ['container'], include_type : 'system')
kj = dependency('kj-async', required : true, include_type : 'system')
# cpuid only makes sense on x86_64
cpuid_required = is_x64 ? get_option('cpuid') : false
@ -351,6 +322,13 @@ pegtl = dependency(
nlohmann_json = dependency('nlohmann_json', required : true, include_type : 'system')
# lix-doc is a Rust project provided via buildInputs and unfortunately doesn't have any way to be detected.
# Just declare it manually to resolve this.
#
# FIXME: build this with meson in the future after we drop Make (with which we
# *absolutely* are not going to make it work)
lix_doc = declare_dependency(link_args : [ '-llix_doc' ])
if is_freebsd
libprocstat = declare_dependency(link_args : [ '-lprocstat' ])
endif
@ -565,33 +543,6 @@ if cxx.get_id() in ['clang', 'gcc']
)
endif
# Until Meson 1.5¹, we can't just give Meson a Cargo.lock file and be done with it.
# Meson will *detect* what dependencies are needed from Cargo files; it just won't
# fetch them. The Meson 1.5 feature essentially internally translates Cargo.lock entries
# to .wrap files, and that translation is incredibly straightforward, so let's just
# use a simple Python script to generate the .wrap files ourselves while we wait for
# Meson 1.5. Weirdly, it seems Meson will only detect dependencies from other
# dependency() calls, so we have to specify lix-doc's two top-level dependencies,
# rnix and rowan, manually, and then their dependencies will be recursively translated
# into more dependency() calls.
#
# When Meson translates a Cargo dependency, the string passed to `dependency()` follows
# a fixed format, which is important as the .wrap files' basenames must match the string
# passed to `dependency()` exactly.
# In Meson 1.4, this format is `$packageName-rs`. Meson 1.5 changes this to
# `$packageName-$shortenedVersionString-rs`, because of course it does, but we'll cross
# that bridge when we get there...
#
# [1]: https://github.com/mesonbuild/meson/commit/9b8378985dbdc0112d11893dd42b33b7bc8d1e62
# FIXME: remove (along with its generated wrap files) when we get rid of meson 1.4
run_command(
python,
meson.project_source_root() / 'meson/cargo-lock-to-wraps.py',
meson.project_source_root() / 'Cargo.lock',
meson.project_source_root() / 'subprojects',
check : true,
)
if is_darwin
configure_file(
input : 'misc/launchd/org.nixos.nix-daemon.plist.in',

View file

@ -1,43 +0,0 @@
#!/usr/bin/env python3
import argparse
import tomllib
import sys
DOWNLOAD_URI_FORMAT = 'https://crates.io/api/v1/crates/{crate}/{version}/download'
WRAP_TEMPLATE = """
[wrap-file]
method = cargo
directory = {crate}-{version}
source_url = {url}
source_filename = {crate}-{version}.tar.gz
source_hash = {hash}
""".lstrip()
parser = argparse.ArgumentParser()
parser.add_argument('lockfile', help='path to the Cargo lockfile to generate wraps from')
parser.add_argument('outdir', help="the 'subprojects' directory to write .wrap files to")
args = parser.parse_args()
with open(args.lockfile, 'rb') as f:
lock_toml = tomllib.load(f)
for dependency in lock_toml['package']:
try:
hash = dependency['checksum']
except KeyError:
# The base package, e.g. lix-doc, won't have a checksum, and conveniently
# the base package is also not something we want a wrap file for.
# Doesn't that work out nicely?
continue
crate = dependency['name']
version = dependency['version']
url = DOWNLOAD_URI_FORMAT.format(crate=crate, version=version)
wrap_text = WRAP_TEMPLATE.format(crate=crate, version=version, url=url, hash=hash)
with open(f'{args.outdir}/{crate}-rs.wrap', 'w') as f:
f.write(wrap_text)

View file

@ -1,89 +0,0 @@
#!/usr/bin/env python3
"""
Runs run-clang-tidy. A bit meta. Maybe it will replace run-clang-tidy one day
because the run-clang-tidy UX is so questionable.
"""
# I hereby dedicate this script to fuck you meson.
# I cannot simply write my code to invoke a subprocess in a meson file because
# Meson corrupts backslashes in command line args to subprocesses.
# This is allegedly for "Windows support", but last time I checked Windows
# neither needs nor wants you to corrupt its command lines.
# https://github.com/mesonbuild/meson/issues/1564
import multiprocessing
import subprocess
import os
import sys
from pathlib import Path
def default_concurrency():
return min(multiprocessing.cpu_count(),
int(os.environ.get("NIX_BUILD_CORES", "16")))
def go(exe: str, plugin_path: Path, compile_commands_json_dir: Path, jobs: int,
paths: list[Path], werror: bool, fix: bool):
args = [
# XXX: This explicitly invokes it with python because of a nixpkgs bug
# where clang-unwrapped does not patch interpreters in run-clang-tidy.
# However, making clang-unwrapped depend on python is also silly, so idk.
sys.executable,
exe,
'-quiet',
'-load',
plugin_path,
'-p',
compile_commands_json_dir,
'-j',
str(jobs),
'-header-filter',
r'src/[^/]+/.*\.hh'
]
if werror:
args += ['-warnings-as-errors', '*']
if fix:
args += ['-fix']
args += ['--']
args += paths
os.execvp(sys.executable, args)
def main():
import argparse
ap = argparse.ArgumentParser(description='Runs run-clang-tidy for you')
ap.add_argument('--jobs',
'-j',
type=int,
default=default_concurrency(),
help='Parallel linting jobs to run')
ap.add_argument('--plugin-path',
type=Path,
help='Path to the Lix clang-tidy plugin')
# FIXME: maybe we should integrate this so it just fixes the compdb for you and throws it in a tempdir?
ap.add_argument(
'--compdb-path',
type=Path,
help=
'Path to the directory containing the fixed-up compilation database from clean_compdb'
)
ap.add_argument('--werror',
action='store_true',
help='Warnings get turned into errors')
ap.add_argument('--fix',
action='store_true',
help='Apply fixes for warnings')
ap.add_argument('--run-clang-tidy-path',
default='run-clang-tidy',
help='Path to run-clang-tidy')
ap.add_argument('paths', nargs='*', help='Source paths to check')
args = ap.parse_args()
go(args.run_clang_tidy_path, args.plugin_path, args.compdb_path, args.jobs,
args.paths, args.werror, args.fix)
if __name__ == '__main__':
main()

View file

@ -13,8 +13,8 @@ def process_compdb(compdb: list[dict]) -> list[dict]:
out = []
eat_next = False
for i, arg in enumerate(args):
if arg in ['-fpch-preprocess', '-fpch-instantiate-templates']:
# -fpch-preprocess as used with gcc, -fpch-instantiate-templates as used by clang
if arg == '-fpch-preprocess':
# as used with gcc
continue
elif arg == '-include-pch' or (arg == '-include' and args[i + 1] == 'precompiled-headers.hh'):
# -include-pch some-pch (clang), or -include some-pch (gcc)
@ -30,14 +30,7 @@ def process_compdb(compdb: list[dict]) -> list[dict]:
item['command'] = shlex.join(munch_command(shlex.split(item['command'])))
return item
def cmdfilter(item: dict) -> bool:
file = item['file']
return (
not file.endswith('precompiled-headers.hh')
and not file.endswith('.rs')
)
return [chomp(x) for x in compdb if cmdfilter(x)]
return [chomp(x) for x in compdb if not x['file'].endswith('precompiled-headers.hh')]
def main():

View file

@ -58,17 +58,26 @@ build_all_generated_headers = custom_target(
if lix_clang_tidy_so_found
run_clang_tidy_args = [
meson.current_source_dir() / 'clang-tidy-runner.py',
'--run-clang-tidy-path', run_clang_tidy,
'--compdb-path', meson.current_build_dir(),
'--plugin-path', lix_clang_tidy_so,
'-load',
lix_clang_tidy_so,
'-p',
# We have to workaround a run-clang-tidy bug too, so we must give the
# directory name rather than the actual compdb file.
# https://github.com/llvm/llvm-project/issues/101440
meson.current_build_dir(),
'-quiet',
]
run_target(
'clang-tidy',
command : [
# XXX: This explicitly invokes it with python because of a nixpkgs bug
# where clang-unwrapped does not patch interpreters in run-clang-tidy.
# However, making clang-unwrapped depend on python is also silly, so idk.
python,
run_clang_tidy,
run_clang_tidy_args,
'--werror',
'-warnings-as-errors',
'*',
],
depends : [
build_all_generated_headers,
@ -78,8 +87,9 @@ if lix_clang_tidy_so_found
'clang-tidy-fix',
command : [
python,
run_clang_tidy,
run_clang_tidy_args,
'--fix',
'-fix',
],
depends : [
build_all_generated_headers,

View file

@ -14,7 +14,7 @@ function _nix_complete
# But the variable also misses the current token so it cancels out.
set -l nix_arg_to_complete (count $nix_args)
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token 2>/dev/null
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token
end
function _nix_accepts_files

View file

@ -1,106 +0,0 @@
From d0f2a5bc2300b96b2434c7838184c1dfd6a639f5 Mon Sep 17 00:00:00 2001
From: Rebecca Turner <rbt@sent.as>
Date: Sun, 8 Sep 2024 15:42:42 -0700
Subject: [PATCH 1/2] Recognize Meta+Left and Meta+Right
Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
terminals/shells/platforms.
I'm not sure exactly where to find canonical documentation for these
codes, but this seems to match what my terminal produces (macOS + iTerm2
+ Fish + Tmux).
It might also be nice to have some more support for editing the bindings
for these characters; sequences of more than one character are not
supported by `el_bind_key` and similar.
Originally from: https://github.com/troglobit/editline/pull/70
This patch is applied upstream: https://gerrit.lix.systems/c/lix/+/1883
---
src/editline.c | 29 +++++++++++++++++++++++++++--
1 file changed, 27 insertions(+), 2 deletions(-)
diff --git a/src/editline.c b/src/editline.c
index 5ec9afb..d1cfbbc 100644
--- a/src/editline.c
+++ b/src/editline.c
@@ -1034,6 +1034,30 @@ static el_status_t meta(void)
return CSeof;
#ifdef CONFIG_ANSI_ARROWS
+ /* See: https://en.wikipedia.org/wiki/ANSI_escape_code */
+ /* Recognize ANSI escapes for `Meta+Left` and `Meta+Right`. */
+ if (c == '\e') {
+ switch (tty_get()) {
+ case '[':
+ {
+ switch (tty_get()) {
+ /* \e\e[C = Meta+Left */
+ case 'C': return fd_word();
+ /* \e\e[D = Meta+Right */
+ case 'D': return bk_word();
+ default:
+ break;
+ }
+
+ return el_ring_bell();
+ }
+ default:
+ break;
+ }
+
+ return el_ring_bell();
+ }
+
/* Also include VT-100 arrows. */
if (c == '[' || c == 'O') {
switch (tty_get()) {
@@ -1043,6 +1067,7 @@ static el_status_t meta(void)
char seq[4] = { 0 };
seq[0] = tty_get();
+ /* \e[1~ */
if (seq[0] == '~')
return beg_line(); /* Home */
@@ -1050,9 +1075,9 @@ static el_status_t meta(void)
seq[c] = tty_get();
if (!strncmp(seq, ";5C", 3))
- return fd_word(); /* Ctrl+Right */
+ return fd_word(); /* \e[1;5C = Ctrl+Right */
if (!strncmp(seq, ";5D", 3))
- return bk_word(); /* Ctrl+Left */
+ return bk_word(); /* \e[1;5D = Ctrl+Left */
break;
}
From 4c4455353a0a88bee09d5f27c28f81f747682fed Mon Sep 17 00:00:00 2001
From: Rebecca Turner <rbt@sent.as>
Date: Mon, 9 Sep 2024 09:44:44 -0700
Subject: [PATCH 2/2] Add support for \e[1;3C and \e[1;3D
---
src/editline.c | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/src/editline.c b/src/editline.c
index d1cfbbc..350b5cb 100644
--- a/src/editline.c
+++ b/src/editline.c
@@ -1074,9 +1074,11 @@ static el_status_t meta(void)
for (c = 1; c < 3; c++)
seq[c] = tty_get();
- if (!strncmp(seq, ";5C", 3))
+ if (!strncmp(seq, ";5C", 3)
+ || !strncmp(seq, ";3C", 3))
return fd_word(); /* \e[1;5C = Ctrl+Right */
- if (!strncmp(seq, ";5D", 3))
+ if (!strncmp(seq, ";5D", 3)
+ || !strncmp(seq, ";3D", 3))
return bk_word(); /* \e[1;5D = Ctrl+Left */
break;

View file

@ -15,8 +15,6 @@
brotli,
bzip2,
callPackage,
capnproto-lix ? __forDefaults.capnproto-lix,
capnproto,
cmake,
curl,
doxygen,
@ -38,14 +36,11 @@
mercurial,
meson,
ninja,
ncurses,
openssl,
pegtl,
pkg-config,
python3,
rapidcheck,
rustPlatform,
rustc,
sqlite,
toml11,
util-linuxMinimal ? utillinuxMinimal,
@ -54,6 +49,9 @@
busybox-sandbox-shell,
# internal fork of nix-doc providing :doc in the repl
lix-doc ? __forDefaults.lix-doc,
pname ? "lix",
versionSuffix ? "",
officialRelease ? __forDefaults.versionJson.official_release,
@ -82,36 +80,13 @@
boehmgc-nix = boehmgc.override { enableLargeConfig = true; };
editline-lix = editline.overrideAttrs (prev: {
patches = (prev.patches or [ ]) ++ [
# Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
# terminals/shells/platforms.
#
# See: https://github.com/troglobit/editline/pull/70
./nix-support/editline.patch
];
configureFlags = (prev.configureFlags or [ ]) ++ [
# Enable SIGSTOP (Ctrl-Z) behavior.
(lib.enableFeature true "sigstop")
# Enable ANSI arrow keys.
(lib.enableFeature true "arrow-keys")
# Use termcap library to query terminal size.
(lib.enableFeature (ncurses != null) "termcap")
];
buildInputs = (prev.buildInputs or [ ]) ++ [ ncurses ];
configureFlags = prev.configureFlags or [ ] ++ [ (lib.enableFeature true "sigstop") ];
});
lix-doc = callPackage ./lix-doc/package.nix { };
build-release-notes = callPackage ./maintainers/build-release-notes.nix { };
# needs explicit c++20 to enable coroutine support
capnproto-lix = capnproto.overrideAttrs { CXXFLAGS = "-std=c++20"; };
},
}:
# gcc miscompiles coroutines at least until 13.2, possibly longer
assert stdenv.cc.isClang || lintInsteadOfBuild;
let
inherit (__forDefaults) canRunInstalled;
inherit (lib) fileset;
@ -164,8 +139,6 @@ let
./meson
./scripts/meson.build
./subprojects
# Required for meson to generate Cargo wraps
./Cargo.lock
]);
functionalTestFiles = fileset.unions [
@ -246,8 +219,6 @@ stdenv.mkDerivation (finalAttrs: {
meson
ninja
cmake
rustc
capnproto-lix
]
++ [
(lib.getBin lowdown)
@ -287,8 +258,8 @@ stdenv.mkDerivation (finalAttrs: {
lowdown
libsodium
toml11
lix-doc
pegtl
capnproto-lix
]
++ lib.optionals hostPlatform.isLinux [
libseccomp
@ -317,15 +288,8 @@ stdenv.mkDerivation (finalAttrs: {
env = {
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
# Meson allows referencing a /usr/share/cargo/registry shaped thing for subproject sources.
# Turns out the Nix-generated Cargo dependencies are named the same as they
# would be in a Cargo registry cache.
MESON_PACKAGE_CACHE_DIR = finalAttrs.cargoDeps;
};
cargoDeps = rustPlatform.importCargoLock { lockFile = ./Cargo.lock; };
preConfigure =
lib.optionalString (!finalAttrs.dontBuild && !hostPlatform.isStatic) ''
# Copy libboost_context so we don't get all of Boost in our closure.
@ -461,10 +425,6 @@ stdenv.mkDerivation (finalAttrs: {
pre-commit-checks,
contribNotice,
check-syscalls,
# debuggers
gdb,
rr,
}:
let
glibcFix = lib.optionalAttrs (buildPlatform.isLinux && glibcLocales != null) {
@ -544,8 +504,6 @@ stdenv.mkDerivation (finalAttrs: {
]
++ lib.optional (pre-commit-checks ? enabledPackages) pre-commit-checks.enabledPackages
++ lib.optional (lib.meta.availableOn buildPlatform clangbuildanalyzer) clangbuildanalyzer
++ lib.optional (!stdenv.isDarwin) gdb
++ lib.optional (lib.meta.availableOn buildPlatform rr) rr
++ finalAttrs.checkInputs;
shellHook = ''

View file

@ -1,7 +1,11 @@
#include <cstdlib>
#include <cstring>
#include <algorithm>
#include <set>
#include <memory>
#include <string_view>
#include <tuple>
#include <iomanip>
#if __APPLE__
#include <sys/time.h>
#endif
@ -14,7 +18,6 @@
#include "build-result.hh"
#include "store-api.hh"
#include "derivations.hh"
#include "strings.hh"
#include "local-store.hh"
#include "legacy.hh"
#include "experimental-features.hh"

View file

@ -20,15 +20,13 @@ struct SingleBuiltPathBuilt {
DECLARE_CMP(SingleBuiltPathBuilt);
};
namespace built_path::detail {
using SingleBuiltPathRaw = std::variant<
using _SingleBuiltPathRaw = std::variant<
DerivedPathOpaque,
SingleBuiltPathBuilt
>;
}
struct SingleBuiltPath : built_path::detail::SingleBuiltPathRaw {
using Raw = built_path::detail::SingleBuiltPathRaw;
struct SingleBuiltPath : _SingleBuiltPathRaw {
using Raw = _SingleBuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
@ -67,19 +65,17 @@ struct BuiltPathBuilt {
DECLARE_CMP(BuiltPathBuilt);
};
namespace built_path::detail {
using BuiltPathRaw = std::variant<
using _BuiltPathRaw = std::variant<
DerivedPath::Opaque,
BuiltPathBuilt
>;
}
/**
* A built path. Similar to a DerivedPath, but enriched with the corresponding
* output path(s).
*/
struct BuiltPath : built_path::detail::BuiltPathRaw {
using Raw = built_path::detail::BuiltPathRaw;
struct BuiltPath : _BuiltPathRaw {
using Raw = _BuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;

View file

@ -9,24 +9,8 @@
#include "store-api.hh"
#include "command.hh"
#include <regex>
namespace nix {
static std::regex const identifierRegex("^[A-Za-z_][A-Za-z0-9_'-]*$");
static void warnInvalidNixIdentifier(const std::string & name)
{
std::smatch match;
if (!std::regex_match(name, match, identifierRegex)) {
warn("This Nix invocation specifies a value for argument '%s' which isn't a valid \
Nix identifier. The project is considering to drop support for this \
or to require quotes around args that aren't valid Nix identifiers. \
If you depend on this behvior, please reach out in \
https://git.lix.systems/lix-project/lix/issues/496 so we can discuss \
your use-case.", name);
}
}
MixEvalArgs::MixEvalArgs()
{
addFlag({
@ -34,10 +18,7 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "expr"},
.handler = {[&](std::string name, std::string expr) {
warnInvalidNixIdentifier(name);
autoArgs[name] = 'E' + expr;
}}
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
});
addFlag({
@ -45,10 +26,7 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the string *string* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "string"},
.handler = {[&](std::string name, std::string s) {
warnInvalidNixIdentifier(name);
autoArgs[name] = 'S' + s;
}},
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
});
addFlag({

View file

@ -1,7 +1,6 @@
#include "editor-for.hh"
#include "environment-variables.hh"
#include "source-path.hh"
#include "strings.hh"
namespace nix {

View file

@ -50,7 +50,7 @@ libcmd = library(
editline,
lowdown,
nlohmann_json,
liblix_doc,
lix_doc
],
cpp_pch : cpp_pch,
install : true,

View file

@ -8,6 +8,10 @@
#include <string_view>
#include <cerrno>
#ifdef READLINE
#include <readline/history.h>
#include <readline/readline.h>
#else
// editline < 1.15.2 don't wrap their API for C++ usage
// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
// This results in linker errors due to to name-mangling of editline C symbols.
@ -16,6 +20,7 @@
extern "C" {
#include <editline.h>
}
#endif
#include "finally.hh"
#include "repl-interacter.hh"
@ -110,13 +115,17 @@ ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleter
} catch (SysError & e) {
logWarning(e.info());
}
#ifndef READLINE
el_hist_size = 1000;
#endif
read_history(historyFile.c_str());
auto oldRepl = curRepl;
curRepl = repl;
Guard restoreRepl([oldRepl] { curRepl = oldRepl; });
#ifndef READLINE
rl_set_complete_func(completionCallback);
rl_set_list_possib_func(listPossibleCallback);
#endif
return restoreRepl;
}

View file

@ -926,7 +926,7 @@ void NixRepl::loadFiles()
void NixRepl::loadReplOverlays()
{
if (evalSettings.replOverlays.get().empty()) {
if (!evalSettings.replOverlays) {
return;
}

View file

@ -1,8 +1,9 @@
#pragma once
///@file
#include <algorithm>
#include "error.hh"
#include "types.hh"
#include "pos-idx.hh"
namespace nix {

View file

@ -31,7 +31,7 @@ Value * EvalState::allocValue()
#endif
nrValues++;
return static_cast<Value *>(p);
return (Value *) p;
}
@ -54,10 +54,10 @@ Env & EvalState::allocEnv(size_t size)
void * p = *env1AllocCache;
*env1AllocCache = GC_NEXT(p);
GC_NEXT(p) = nullptr;
env = static_cast<Env *>(p);
env = (Env *) p;
} else
#endif
env = static_cast<Env *>(gcAllocBytes(sizeof(Env) + size * sizeof(Value *)));
env = (Env *) gcAllocBytes(sizeof(Env) + size * sizeof(Value *));
/* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */

View file

@ -151,7 +151,7 @@ struct EvalSettings : Config
This is useful for debugging warnings in third-party Nix code.
)"};
PathsSetting<Paths> replOverlays{this, Paths(), "repl-overlays",
PathsSetting replOverlays{this, Paths(), "repl-overlays",
R"(
A list of files containing Nix expressions that can be used to add
default bindings to [`nix
@ -185,54 +185,6 @@ struct EvalSettings : Config
else
{ }
```
Here's a more elaborate `repl-overlay`, which provides the following
variables:
- The original, unmodified variables are aliased to `original`.
- `legacyPackages.${system}` (if it exists) or `packages.${system}`
(otherwise) is aliased to `pkgs`.
- All attribute set variables with a `${system}` attribute are
abbreviated in the same manner; e.g. `devShells.${system}` is
shortened to `devShells`.
For example, the following attribute set:
```nix
info: final: attrs: let
# Equivalent to nixpkgs `lib.optionalAttrs`.
optionalAttrs = predicate: attrs:
if predicate
then attrs
else {};
# If `attrs.${oldName}.${info.currentSystem}` exists, alias `${newName}` to
# it.
collapseRenamed = oldName: newName:
optionalAttrs (builtins.hasAttr oldName attrs
&& builtins.hasAttr info.currentSystem attrs.${oldName})
{
${newName} = attrs.${oldName}.${info.currentSystem};
};
# Alias `attrs.${oldName}.${info.currentSystem} to `${newName}`.
collapse = name: collapseRenamed name name;
# Alias all `attrs` keys with an `${info.currentSystem}` attribute.
collapseAll =
builtins.foldl'
(prev: name: prev // collapse name)
{}
(builtins.attrNames attrs);
in
# Preserve the original bindings as `original`.
(optionalAttrs (! attrs ? original)
{
original = attrs;
})
// (collapseRenamed "packages" "pkgs")
// (collapseRenamed "legacyPackages" "pkgs")
// collapseAll
```
)"};
};

View file

@ -19,7 +19,6 @@
#include "gc-small-vector.hh"
#include "fetch-to-store.hh"
#include "flake/flakeref.hh"
#include "exit.hh"
#include <algorithm>
#include <iostream>
@ -250,7 +249,6 @@ EvalState::EvalState(
.findFile = symbols.create("__findFile"),
.nixPath = symbols.create("__nixPath"),
.body = symbols.create("body"),
.overrides = symbols.create("__overrides"),
}
, repair(NoRepair)
, emptyBindings(0)
@ -2712,29 +2710,20 @@ Expr & EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<Sta
}
Expr & EvalState::parseExprFromString(
std::string s_,
const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv,
const FeatureSettings & featureSettings
)
Expr & EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv, const ExperimentalFeatureSettings & xpSettings)
{
// NOTE this method (and parseStdin) must take care to *fully copy* their input
// into their respective Pos::Origin until the parser stops overwriting its input
// data.
auto s = make_ref<std::string>(s_);
s_.append("\0\0", 2);
return *parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv, featureSettings);
return *parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv, xpSettings);
}
Expr & EvalState::parseExprFromString(
std::string s,
const SourcePath & basePath,
const FeatureSettings & featureSettings
)
Expr & EvalState::parseExprFromString(std::string s, const SourcePath & basePath, const ExperimentalFeatureSettings & xpSettings)
{
return parseExprFromString(std::move(s), basePath, staticBaseEnv, featureSettings);
return parseExprFromString(std::move(s), basePath, staticBaseEnv, xpSettings);
}

View file

@ -12,7 +12,6 @@
#include "experimental-features.hh"
#include "search-path.hh"
#include "repl-exit-status.hh"
#include "backed-string-view.hh"
#include <map>
#include <optional>
@ -345,17 +344,8 @@ public:
/**
* Parse a Nix expression from the specified string.
*/
Expr & parseExprFromString(
std::string s,
const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv,
const FeatureSettings & xpSettings = featureSettings
);
Expr & parseExprFromString(
std::string s,
const SourcePath & basePath,
const FeatureSettings & xpSettings = featureSettings
);
Expr & parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
Expr & parseExprFromString(std::string s, const SourcePath & basePath, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
Expr & parseStdin();
@ -579,7 +569,7 @@ private:
Pos::Origin origin,
const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv,
const FeatureSettings & xpSettings = featureSettings);
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack.
@ -792,4 +782,4 @@ static constexpr std::string_view corepkgsPrefix{"/__corepkgs__/"};
}
#include "eval-inline.hh" // IWYU pragma: keep
#include "eval-inline.hh"

View file

@ -342,21 +342,8 @@ static void updateOverrides(std::map<InputPath, FlakeInput> & overrideMap, const
for (auto & [id, input] : overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
/* Given
*
* { inputs.hydra.inputs.nix-eval-jobs.inputs.lix.follows = "lix"; }
*
* then `nix-eval-jobs` doesn't have an override.
* It's neither replaced using follows nor by a different
* URL. Thus no need to add it to overrides and thus re-fetch
* it.
*/
if (input.ref || input.follows) {
// Do not override existing assignment from outer flake
overrideMap.insert({inputPath, input});
}
// Do not override existing assignment from outer flake
overrideMap.insert({inputPath, input});
updateOverrides(overrideMap, input.overrides, inputPath);
}
}

View file

@ -120,7 +120,6 @@ inline T * gcAllocType(size_t howMany = 1)
// However, people can and do request zero sized allocations, so we need
// to check that neither of our multiplicands were zero before complaining
// about it.
// NOLINTNEXTLINE(bugprone-sizeof-expression): yeah we only seem to alloc pointers with this. the calculation *is* correct though!
auto checkedSz = checked::Checked<size_t>(howMany) * sizeof(T);
size_t sz = checkedSz.valueWrapping();
if (checkedSz.overflowed()) {

View file

@ -11,7 +11,6 @@
namespace nix {
ExprBlackHole eBlackHole;
Expr *eBlackHoleAddr = &eBlackHole;
// FIXME: remove, because *symbols* are abstract and do not have a single
// textual representation; see printIdentifier()

View file

@ -11,7 +11,6 @@
#include "eval-error.hh"
#include "pos-idx.hh"
#include "pos-table.hh"
#include "strings.hh"
namespace nix {
@ -49,7 +48,7 @@ protected:
public:
struct AstSymbols {
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body, overrides;
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body;
};

View file

@ -115,7 +115,7 @@ struct ExprState
std::unique_ptr<Expr> pipe(PosIdx pos, State & state, bool flip = false)
{
if (!state.featureSettings.isEnabled(Xp::PipeOperator))
if (!state.xpSettings.isEnabled(Xp::PipeOperator))
throw ParseError({
.msg = HintFmt("Pipe operator is disabled"),
.pos = state.positions[pos]
@ -656,10 +656,10 @@ template<> struct BuildAST<grammar::expr::path> : p::maybe_nothing {};
template<> struct BuildAST<grammar::expr::uri> {
static void apply(const auto & in, ExprState & s, State & ps) {
bool URLLiterals = ps.featureSettings.isEnabled(Dep::UrlLiterals);
if (!URLLiterals)
bool noURLLiterals = ps.xpSettings.isEnabled(Xp::NoUrlLiterals);
if (noURLLiterals)
throw ParseError({
.msg = HintFmt("URL literals are deprecated, allow using them with --extra-deprecated-features=url-literals"),
.msg = HintFmt("URL literals are disabled"),
.pos = ps.positions[ps.at(in)]
});
s.pushExpr<ExprString>(ps.at(in), in.string());
@ -668,16 +668,6 @@ template<> struct BuildAST<grammar::expr::uri> {
template<> struct BuildAST<grammar::expr::ancient_let> : change_head<BindingsState> {
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
// Added 2024-09-18. Turn into an error at some point in the future.
// See the documentation on deprecated features for more details.
if (!ps.featureSettings.isEnabled(Dep::AncientLet))
warn(
"%s found at %s. This feature is deprecated and will be removed in the future. Use %s to silence this warning.",
"let {",
ps.positions[ps.at(in)],
"--extra-deprecated-features ancient-let"
);
b.attrs.pos = ps.at(in);
b.attrs.recursive = true;
s.pushExpr<ExprSelect>(b.attrs.pos, b.attrs.pos, std::make_unique<ExprAttrs>(std::move(b.attrs)), ps.s.body);
@ -686,12 +676,6 @@ template<> struct BuildAST<grammar::expr::ancient_let> : change_head<BindingsSta
template<> struct BuildAST<grammar::expr::rec_set> : change_head<BindingsState> {
static void success(const auto & in, BindingsState & b, ExprState & s, State & ps) {
// Before inserting new attrs, check for __override and throw an error
// (the error will initially be a warning to ease migration)
if (!featureSettings.isEnabled(Dep::RecSetOverrides) && b.attrs.attrs.contains(ps.s.overrides)) {
ps.overridesFound(ps.at(in));
}
b.attrs.pos = ps.at(in);
b.attrs.recursive = true;
s.pushExpr<ExprAttrs>(b.attrs.pos, std::move(b.attrs));
@ -874,7 +858,7 @@ Expr * EvalState::parse(
Pos::Origin origin,
const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv,
const FeatureSettings & featureSettings)
const ExperimentalFeatureSettings & xpSettings)
{
parser::State s = {
symbols,
@ -882,7 +866,7 @@ Expr * EvalState::parse(
basePath,
positions.addOrigin(origin, length),
exprSymbols,
featureSettings,
xpSettings
};
parser::ExprState x;

View file

@ -2,7 +2,6 @@
///@file
#include "eval.hh"
#include "logging.hh"
namespace nix::parser {
@ -20,11 +19,10 @@ struct State
SourcePath basePath;
PosTable::Origin origin;
const Expr::AstSymbols & s;
const FeatureSettings & featureSettings;
const ExperimentalFeatureSettings & xpSettings;
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
void overridesFound(const PosIdx pos);
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, std::unique_ptr<Expr> e, const PosIdx pos);
std::unique_ptr<Formals> validateFormals(std::unique_ptr<Formals> formals, PosIdx pos = noPos, Symbol arg = {});
std::unique_ptr<Expr> stripIndentation(const PosIdx pos,
@ -60,17 +58,6 @@ inline void State::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
});
}
inline void State::overridesFound(const PosIdx pos) {
// Added 2024-09-18. Turn into an error at some point in the future.
// See the documentation on deprecated features for more details.
warn(
"%s found at %s. This feature is deprecated and will be removed in the future. Use %s to silence this warning.",
"__overrides",
positions[pos],
"--extra-deprecated-features rec-set-overrides"
);
}
inline void State::addAttr(ExprAttrs * attrs, AttrPath && attrPath, std::unique_ptr<Expr> e, const PosIdx pos)
{
AttrPath::iterator i;
@ -136,12 +123,6 @@ inline void State::addAttr(ExprAttrs * attrs, AttrPath && attrPath, std::unique_
dupAttr(attrPath, pos, j->second.pos);
}
} else {
// Before inserting new attrs, check for __override and throw an error
// (the error will initially be a warning to ease migration)
if (attrs->recursive && !featureSettings.isEnabled(Dep::RecSetOverrides) && i->symbol == s.overrides) {
overridesFound(pos);
}
// This attr path is not defined. Let's create it.
e->setName(i->symbol);
attrs->attrs.emplace(std::piecewise_construct,

View file

@ -136,9 +136,7 @@ class ExternalValueBase
std::ostream & operator << (std::ostream & str, const ExternalValueBase & v);
/** This is just the address of eBlackHole. It exists because eBlackHole has an
* incomplete type at usage sites so is not possible to cast. */
extern Expr *eBlackHoleAddr;
extern ExprBlackHole eBlackHole;
struct NewValueAs
{
@ -198,7 +196,6 @@ private:
public:
// Discount `using NewValueAs::*;`
// NOLINTNEXTLINE(bugprone-macro-parentheses)
#define USING_VALUETYPE(name) using name = NewValueAs::name
USING_VALUETYPE(integer_t);
USING_VALUETYPE(floating_t);
@ -476,7 +473,7 @@ public:
/// Constructs an evil thunk, whose evaluation represents infinite recursion.
explicit Value(blackhole_t)
: internalType(tThunk)
, thunk({ .env = nullptr, .expr = eBlackHoleAddr })
, thunk({ .env = nullptr, .expr = reinterpret_cast<Expr *>(&eBlackHole) })
{ }
Value(Value const & rhs) = default;
@ -516,10 +513,7 @@ public:
// type() == nThunk
inline bool isThunk() const { return internalType == tThunk; };
inline bool isApp() const { return internalType == tApp; };
inline bool isBlackhole() const
{
return internalType == tThunk && thunk.expr == eBlackHoleAddr;
}
inline bool isBlackhole() const;
// type() == nFunction
inline bool isLambda() const { return internalType == tLambda; };
@ -675,6 +669,11 @@ public:
void mkStringMove(const char * s, const NixStringContext & context);
inline void mkString(const Symbol & s)
{
mkString(((const std::string &) s).c_str());
}
void mkPath(const SourcePath & path);
inline void mkPath(const char * path)
@ -733,11 +732,7 @@ public:
lambda.fun = f;
}
inline void mkBlackhole()
{
internalType = tThunk;
thunk.expr = eBlackHoleAddr;
}
inline void mkBlackhole();
void mkPrimOp(PrimOp * p);
@ -837,6 +832,18 @@ public:
}
};
bool Value::isBlackhole() const
{
return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole;
}
void Value::mkBlackhole()
{
internalType = tThunk;
thunk.expr = (Expr*) &eBlackHole;
}
using ValueVector = GcVector<Value *>;
using ValueMap = GcMap<Symbol, Value *>;
using ValueVectorMap = std::map<Symbol, ValueVector>;

View file

@ -7,33 +7,7 @@
namespace nix {
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e)
{
if (e == AcceptFlakeConfig::False) {
j = false;
} else if (e == AcceptFlakeConfig::Ask) {
j = "ask";
} else if (e == AcceptFlakeConfig::True) {
j = true;
} else {
abort();
}
}
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e)
{
if (j == false) {
e = AcceptFlakeConfig::False;
} else if (j == "ask") {
e = AcceptFlakeConfig::Ask;
} else if (j == true) {
e = AcceptFlakeConfig::True;
} else {
throw Error("Invalid accept-flake-config value '%s'", std::string(j));
}
}
template<> AcceptFlakeConfig BaseSetting<AcceptFlakeConfig>::parse(const std::string & str, const ApplyConfigOptions & options) const
template<> AcceptFlakeConfig BaseSetting<AcceptFlakeConfig>::parse(const std::string & str) const
{
if (str == "true") return AcceptFlakeConfig::True;
else if (str == "ask") return AcceptFlakeConfig::Ask;

View file

@ -13,9 +13,6 @@ namespace nix {
enum class AcceptFlakeConfig { False, Ask, True };
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e);
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e);
struct FetchSettings : public Config
{
FetchSettings();

View file

@ -7,8 +7,6 @@
#include "path.hh"
#include "attrs.hh"
#include "url.hh"
#include "ref.hh"
#include "strings.hh"
#include <memory>

View file

@ -7,7 +7,7 @@ namespace nix {
LogFormat defaultLogFormat = LogFormat::raw;
LogFormat parseLogFormat(const std::string & logFormatStr) {
if (logFormatStr == "raw")
if (logFormatStr == "raw" || getEnv("NIX_GET_COMPLETIONS"))
return LogFormat::raw;
else if (logFormatStr == "raw-with-logs")
return LogFormat::rawWithLogs;

View file

@ -1,9 +1,8 @@
#include "progress-bar.hh"
#include "file-system.hh"
#include "sync.hh"
#include "store-api.hh"
#include "names.hh"
#include "terminal.hh"
#include "strings.hh"
#include <map>
#include <thread>
@ -92,7 +91,7 @@ void ProgressBar::resume()
nextWakeup = draw(*state, {});
state.wait_for(quitCV, std::chrono::milliseconds(50));
}
eraseProgressDisplay(*state);
writeLogsToStderr("\r\e[K");
});
}
@ -558,8 +557,7 @@ std::optional<char> ProgressBar::ask(std::string_view msg)
{
auto state(state_.lock());
if (state->paused > 0 || !isatty(STDIN_FILENO)) return {};
eraseProgressDisplay(*state);
std::cerr << msg;
std::cerr << fmt("\r\e[K%s ", msg);
auto s = trim(readLine(STDIN_FILENO));
if (s.size() != 1) return {};
draw(*state, {});

View file

@ -6,8 +6,6 @@
#include "loggers.hh"
#include "current-process.hh"
#include "terminal.hh"
#include "strings.hh"
#include "exit.hh"
#include <algorithm>
#include <exception>

View file

@ -7,10 +7,12 @@
#include "path.hh"
#include "derived-path.hh"
#include "processes.hh"
#include "strings.hh"
#include "exit.hh"
#include <signal.h>
#include <locale>
namespace nix {

View file

@ -10,7 +10,6 @@
#include "nar-accessor.hh"
#include "thread-pool.hh"
#include "signals.hh"
#include "strings.hh"
#include <chrono>
#include <regex>

View file

@ -1,15 +1,22 @@
#include "derivation-goal.hh"
#include "hook-instance.hh"
#include "worker.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
#include "references.hh"
#include "finally.hh"
#include "archive.hh"
#include "compression.hh"
#include "common-protocol.hh"
#include "common-protocol-impl.hh" // IWYU pragma: keep
#include "common-protocol-impl.hh"
#include "topo-sort.hh"
#include "local-store.hh" // TODO remove, along with remaining downcasts
#include "logging-json.hh"
#include "substitution-goal.hh"
#include "drv-output-substitution-goal.hh"
#include "strings.hh"
#include <regex>
#include <queue>
#include <fstream>
#include <sys/types.h>
@ -58,8 +65,8 @@
namespace nix {
DerivationGoal::DerivationGoal(const StorePath & drvPath,
const OutputsSpec & wantedOutputs, Worker & worker, bool isDependency, BuildMode buildMode)
: Goal(worker, isDependency)
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs })
, useDerivation(true)
, drvPath(drvPath)
, wantedOutputs(wantedOutputs)
@ -71,13 +78,13 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
DerivedPath::Built { makeConstantStorePathRef(drvPath), wantedOutputs }.to_string(worker.store));
trace("created");
mcExpectedBuilds = worker.expectedBuilds.addTemporarily(1);
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
}
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
const OutputsSpec & wantedOutputs, Worker & worker, bool isDependency, BuildMode buildMode)
: Goal(worker, isDependency)
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs })
, useDerivation(false)
, drvPath(drvPath)
, wantedOutputs(wantedOutputs)
@ -91,7 +98,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation
DerivedPath::Built { makeConstantStorePathRef(drvPath), drv.outputNames() }.to_string(worker.store));
trace("created");
mcExpectedBuilds = worker.expectedBuilds.addTemporarily(1);
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
/* Prevent the .chroot directory from being
garbage-collected. (See isActiveTempFile() in gc.cc.) */
@ -120,7 +127,6 @@ std::string DerivationGoal::key()
void DerivationGoal::killChild()
{
hook.reset();
builderOutFD = nullptr;
}
@ -131,9 +137,9 @@ Goal::Finished DerivationGoal::timedOut(Error && ex)
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::work(bool inBuildSlot) noexcept
Goal::WorkResult DerivationGoal::work()
{
return (this->*state)(inBuildSlot);
return (this->*state)();
}
void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
@ -157,35 +163,29 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::getDerivation(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::getDerivation()
{
trace("init");
/* The first thing to do is to make sure that the derivation
exists. If it doesn't, it may be created through a
substitute. */
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
return loadDerivation(inBuildSlot);
return loadDerivation();
}
state = &DerivationGoal::loadDerivation;
return {WaitForGoals{{worker.goalFactory().makePathSubstitutionGoal(drvPath)}}};
} catch (...) {
return {std::current_exception()};
return WaitForGoals{{worker.makePathSubstitutionGoal(drvPath)}};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::loadDerivation(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::loadDerivation()
{
trace("loading derivation");
if (nrFailed != 0) {
return {done(
BuildResult::MiscFailure,
{},
Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
)};
return done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
}
/* `drvPath' should already be a root, but let's be on the safe
@ -207,14 +207,12 @@ try {
}
assert(drv);
return haveDerivation(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return haveDerivation();
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::haveDerivation(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::haveDerivation()
{
trace("have derivation");
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
@ -241,7 +239,7 @@ try {
});
}
return gaveUpOnSubstitution(inBuildSlot);
return gaveUpOnSubstitution();
}
for (auto & i : drv->outputsAndOptPaths(worker.store))
@ -263,64 +261,50 @@ try {
/* If they are all valid, then we're done. */
if (allValid && buildMode == bmNormal) {
return {done(BuildResult::AlreadyValid, std::move(validOutputs))};
return done(BuildResult::AlreadyValid, std::move(validOutputs));
}
/* We are first going to try to create the invalid output paths
through substitutes. If that doesn't work, we'll build
them. */
WaitForGoals result;
if (settings.useSubstitutes) {
if (parsedDrv->substitutesAllowed()) {
for (auto & [outputName, status] : initialOutputs) {
if (!status.wanted) continue;
if (!status.known)
result.goals.insert(
worker.goalFactory().makeDrvOutputSubstitutionGoal(
DrvOutput{status.outputHash, outputName},
buildMode == bmRepair ? Repair : NoRepair
)
);
else {
auto * cap = getDerivationCA(*drv);
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(
status.known->path,
buildMode == bmRepair ? Repair : NoRepair,
cap ? std::optional { *cap } : std::nullopt));
}
if (settings.useSubstitutes && parsedDrv->substitutesAllowed())
for (auto & [outputName, status] : initialOutputs) {
if (!status.wanted) continue;
if (!status.known)
result.goals.insert(
worker.makeDrvOutputSubstitutionGoal(
DrvOutput{status.outputHash, outputName},
buildMode == bmRepair ? Repair : NoRepair
)
);
else {
auto * cap = getDerivationCA(*drv);
result.goals.insert(worker.makePathSubstitutionGoal(
status.known->path,
buildMode == bmRepair ? Repair : NoRepair,
cap ? std::optional { *cap } : std::nullopt));
}
} else {
trace("skipping substitute because allowSubstitutes is false");
}
}
if (result.goals.empty()) { /* to prevent hang (no wake-up event) */
return outputsSubstitutionTried(inBuildSlot);
return outputsSubstitutionTried();
} else {
state = &DerivationGoal::outputsSubstitutionTried;
return {std::move(result)};
return result;
}
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::outputsSubstitutionTried(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::outputsSubstitutionTried()
{
trace("all outputs substituted (maybe)");
assert(drv->type().isPure());
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback)
{
return {done(
BuildResult::TransientFailure,
{},
Error(
"some substitutes for the outputs of derivation '%s' failed (usually happens due "
"to networking issues); try '--fallback' to build derivation from source ",
worker.store.printStorePath(drvPath)
)
)};
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
return done(BuildResult::TransientFailure, {},
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
worker.store.printStorePath(drvPath)));
}
/* If the substitutes form an incomplete closure, then we should
@ -354,13 +338,13 @@ try {
if (needRestart == NeedRestartForMoreOutputs::OutputsAddedDoNeed) {
needRestart = NeedRestartForMoreOutputs::OutputsUnmodifedDontNeed;
return haveDerivation(inBuildSlot);
return haveDerivation();
}
auto [allValid, validOutputs] = checkPathValidity();
if (buildMode == bmNormal && allValid) {
return {done(BuildResult::Substituted, std::move(validOutputs))};
return done(BuildResult::Substituted, std::move(validOutputs));
}
if (buildMode == bmRepair && allValid) {
return repairClosure();
@ -370,16 +354,14 @@ try {
worker.store.printStorePath(drvPath));
/* Nothing to wait for; tail call */
return gaveUpOnSubstitution(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return gaveUpOnSubstitution();
}
/* At least one of the output paths could not be
produced using a substitute. So we have to build instead. */
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::gaveUpOnSubstitution()
{
WaitForGoals result;
/* At this point we are building all outputs, so if more are wanted there
@ -393,7 +375,7 @@ try {
addWaiteeDerivedPath = [&](ref<SingleDerivedPath> inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
if (!inputNode.value.empty())
result.goals.insert(worker.goalFactory().makeGoal(
result.goals.insert(worker.makeGoal(
DerivedPath::Built {
.drvPath = inputDrv,
.outputs = inputNode.value,
@ -438,22 +420,20 @@ try {
if (!settings.useSubstitutes)
throw Error("dependency '%s' of '%s' does not exist, and substitution is disabled",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i));
result.goals.insert(worker.makePathSubstitutionGoal(i));
}
if (result.goals.empty()) {/* to prevent hang (no wake-up event) */
return inputsRealised(inBuildSlot);
return inputsRealised();
} else {
state = &DerivationGoal::inputsRealised;
return {result};
return result;
}
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::repairClosure() noexcept
try {
Goal::WorkResult DerivationGoal::repairClosure()
{
assert(drv->type().isPure());
/* If we're repairing, we now know that our own outputs are valid.
@ -496,9 +476,9 @@ try {
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
auto drvPath2 = outputsToDrv.find(i);
if (drvPath2 == outputsToDrv.end())
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i, Repair));
result.goals.insert(worker.makePathSubstitutionGoal(i, Repair));
else
result.goals.insert(worker.goalFactory().makeGoal(
result.goals.insert(worker.makeGoal(
DerivedPath::Built {
.drvPath = makeConstantStorePathRef(drvPath2->second),
.outputs = OutputsSpec::All { },
@ -507,49 +487,39 @@ try {
}
if (result.goals.empty()) {
return {done(BuildResult::AlreadyValid, assertPathValidity())};
return done(BuildResult::AlreadyValid, assertPathValidity());
}
state = &DerivationGoal::closureRepaired;
return {result};
} catch (...) {
return {std::current_exception()};
return result;
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::closureRepaired(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::closureRepaired()
{
trace("closure repaired");
if (nrFailed > 0)
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
worker.store.printStorePath(drvPath));
return {done(BuildResult::AlreadyValid, assertPathValidity())};
} catch (...) {
return {std::current_exception()};
return done(BuildResult::AlreadyValid, assertPathValidity());
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::inputsRealised(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::inputsRealised()
{
trace("all inputs realised");
if (nrFailed != 0) {
if (!useDerivation)
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
return {done(
BuildResult::DependencyFailed,
{},
Error(
return done(BuildResult::DependencyFailed, {}, Error(
"%s dependencies of derivation '%s' failed to build",
nrFailed,
worker.store.printStorePath(drvPath)
)
)};
nrFailed, worker.store.printStorePath(drvPath)));
}
if (retrySubstitution == RetrySubstitution::YesNeed) {
retrySubstitution = RetrySubstitution::AlreadyRetried;
return haveDerivation(inBuildSlot);
return haveDerivation();
}
/* Gather information necessary for computing the closure and/or
@ -611,11 +581,11 @@ try {
worker.store.printStorePath(pathResolved),
});
resolvedDrvGoal = worker.goalFactory().makeDerivationGoal(
resolvedDrvGoal = worker.makeDerivationGoal(
pathResolved, wantedOutputs, buildMode);
state = &DerivationGoal::resolvedFinished;
return {WaitForGoals{{resolvedDrvGoal}}};
return WaitForGoals{{resolvedDrvGoal}};
}
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumInputPaths;
@ -680,12 +650,10 @@ try {
slot to become available, since we don't need one if there is a
build hook. */
state = &DerivationGoal::tryToBuild;
return tryToBuild(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return ContinueImmediately{};
}
void DerivationGoal::started()
Goal::WorkResult DerivationGoal::started()
{
auto msg = fmt(
buildMode == bmRepair ? "repairing outputs of '%s'" :
@ -695,11 +663,12 @@ void DerivationGoal::started()
if (hook) msg += fmt(" on '%s'", machineName);
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", 1, 1});
mcRunningBuilds = worker.runningBuilds.addTemporarily(1);
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
return StillAlive{};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryToBuild(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::tryToBuild()
{
trace("trying to build");
/* Obtain locks on all output paths, if the paths are known a priori.
@ -733,7 +702,7 @@ try {
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for lock on %s", Magenta(showPaths(lockFiles))));
return {WaitForAWhile{}};
return WaitForAWhile{};
}
actLock.reset();
@ -750,7 +719,7 @@ try {
if (buildMode != bmCheck && allValid) {
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
outputLocks.setDeletion(true);
return {done(BuildResult::AlreadyValid, std::move(validOutputs))};
return done(BuildResult::AlreadyValid, std::move(validOutputs));
}
/* If any of the outputs already exist but are not valid, delete
@ -770,54 +739,39 @@ try {
&& settings.maxBuildJobs.get() != 0;
if (!buildLocally) {
auto hookReply = tryBuildHook(inBuildSlot);
auto result = std::visit(
overloaded{
[&](HookReply::Accept & a) -> std::optional<WorkResult> {
/* Yes, it has started doing so. Wait until we get
EOF from the hook. */
actLock.reset();
buildResult.startTime = time(0); // inexact
state = &DerivationGoal::buildDone;
started();
return WaitForWorld{std::move(a.fds), false};
},
[&](HookReply::Postpone) -> std::optional<WorkResult> {
/* Not now; wait until at least one child finishes or
the wake-up timeout expires. */
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlTalkative, actBuildWaiting,
fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath))));
outputLocks.unlock();
return WaitForAWhile{};
},
[&](HookReply::Decline) -> std::optional<WorkResult> {
/* We should do it ourselves. */
return std::nullopt;
},
},
hookReply);
if (result) {
return {std::move(*result)};
switch (tryBuildHook()) {
case rpAccept:
/* Yes, it has started doing so. Wait until we get
EOF from the hook. */
actLock.reset();
buildResult.startTime = time(0); // inexact
state = &DerivationGoal::buildDone;
return started();
case rpPostpone:
/* Not now; wait until at least one child finishes or
the wake-up timeout expires. */
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlTalkative, actBuildWaiting,
fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath))));
outputLocks.unlock();
return WaitForAWhile{};
case rpDecline:
/* We should do it ourselves. */
break;
}
}
actLock.reset();
state = &DerivationGoal::tryLocalBuild;
return tryLocalBuild(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return ContinueImmediately{};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryLocalBuild(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::tryLocalBuild() {
throw Error(
"unable to build with a primary store that isn't a local store; "
"either pass a different '--store' or enable remote builds."
"\nhttps://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html");
} catch (...) {
return {std::current_exception()};
}
@ -868,16 +822,14 @@ void replaceValidPath(const Path & storePath, const Path & tmpPath)
int DerivationGoal::getChildStatus()
{
builderOutFD = nullptr;
return hook->pid.kill();
}
void DerivationGoal::closeReadPipes()
{
hook->builderOut.reset();
hook->fromHook.reset();
builderOutFD = nullptr;
hook->builderOut.readSide.reset();
hook->fromHook.readSide.reset();
}
@ -973,8 +925,8 @@ void runPostBuildHook(
proc.getStdout()->drainInto(sink);
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::buildDone(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::buildDone()
{
trace("build done");
Finally releaseBuildUser([&](){ this->cleanupHookFinally(); });
@ -1068,7 +1020,7 @@ try {
outputLocks.setDeletion(true);
outputLocks.unlock();
return {done(BuildResult::Built, std::move(builtOutputs))};
return done(BuildResult::Built, std::move(builtOutputs));
} catch (BuildError & e) {
outputLocks.unlock();
@ -1089,14 +1041,12 @@ try {
BuildResult::PermanentFailure;
}
return {done(st, {}, std::move(e))};
return done(st, {}, std::move(e));
}
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::resolvedFinished(bool inBuildSlot) noexcept
try {
Goal::WorkResult DerivationGoal::resolvedFinished()
{
trace("resolved derivation finished");
assert(resolvedDrvGoal);
@ -1163,14 +1113,12 @@ try {
if (status == BuildResult::AlreadyValid)
status = BuildResult::ResolvesToAlreadyValid;
return {done(status, std::move(builtOutputs))};
} catch (...) {
return {std::current_exception()};
return done(status, std::move(builtOutputs));
}
HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
HookReply DerivationGoal::tryBuildHook()
{
if (!worker.hook.available || !useDerivation) return HookReply::Decline{};
if (!worker.hook.available || !useDerivation) return rpDecline;
if (!worker.hook.instance)
worker.hook.instance = std::make_unique<HookInstance>();
@ -1180,7 +1128,7 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
/* Send the request to the hook. */
worker.hook.instance->sink
<< "try"
<< (inBuildSlot ? 1 : 0)
<< (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0)
<< drv->platform
<< worker.store.printStorePath(drvPath)
<< parsedDrv->getRequiredSystemFeatures();
@ -1192,7 +1140,7 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
while (true) {
auto s = [&]() {
try {
return readLine(worker.hook.instance->fromHook.get());
return readLine(worker.hook.instance->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the response from the build hook");
throw;
@ -1213,14 +1161,14 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
debug("hook reply is '%1%'", reply);
if (reply == "decline")
return HookReply::Decline{};
return rpDecline;
else if (reply == "decline-permanently") {
worker.hook.available = false;
worker.hook.instance.reset();
return HookReply::Decline{};
return rpDecline;
}
else if (reply == "postpone")
return HookReply::Postpone{};
return rpPostpone;
else if (reply != "accept")
throw Error("bad hook reply '%s'", reply);
@ -1228,9 +1176,9 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
if (e.errNo == EPIPE) {
printError(
"build hook died unexpectedly: %s",
chomp(drainFD(worker.hook.instance->fromHook.get())));
chomp(drainFD(worker.hook.instance->fromHook.readSide.get())));
worker.hook.instance.reset();
return HookReply::Decline{};
return rpDecline;
} else
throw;
}
@ -1238,7 +1186,7 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
hook = std::move(worker.hook.instance);
try {
machineName = readLine(hook->fromHook.get());
machineName = readLine(hook->fromHook.readSide.get());
} catch (Error & e) {
e.addTrace({}, "while reading the machine name from the build hook");
throw;
@ -1261,17 +1209,17 @@ HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
}
hook->sink = FdSink();
hook->toHook.reset();
hook->toHook.writeSide.reset();
/* Create the log file and pipe. */
Path logFile = openLogFile();
std::set<int> fds;
fds.insert(hook->fromHook.get());
fds.insert(hook->builderOut.get());
builderOutFD = &hook->builderOut;
fds.insert(hook->fromHook.readSide.get());
fds.insert(hook->builderOut.readSide.get());
worker.childStarted(shared_from_this(), fds, false, false);
return HookReply::Accept{std::move(fds)};
return rpAccept;
}
@ -1331,23 +1279,24 @@ void DerivationGoal::closeLogFile()
}
bool DerivationGoal::isReadDesc(int fd)
{
return fd == hook->builderOut.readSide.get();
}
Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data)
{
assert(builderOutFD);
auto tooMuchLogs = [&] {
killChild();
return done(
BuildResult::LogLimitExceeded, {},
Error("%s killed after writing more than %d bytes of log output",
getName(), settings.maxLogSize));
};
// local & `ssh://`-builds are dealt with here.
if (fd == builderOutFD->get()) {
auto isWrittenToLog = isReadDesc(fd);
if (isWrittenToLog)
{
logSize += data.size();
if (settings.maxLogSize && logSize > settings.maxLogSize) {
return tooMuchLogs();
killChild();
return done(
BuildResult::LogLimitExceeded, {},
Error("%s killed after writing more than %d bytes of log output",
getName(), settings.maxLogSize));
}
for (auto c : data)
@ -1362,10 +1311,9 @@ Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data
}
if (logSink) (*logSink)(data);
return StillAlive{};
}
if (hook && fd == hook->fromHook.get()) {
if (hook && fd == hook->fromHook.readSide.get()) {
for (auto c : data)
if (c == '\n') {
auto json = parseJSONMessage(currentHookLine);
@ -1373,17 +1321,11 @@ Goal::WorkResult DerivationGoal::handleChildOutput(int fd, std::string_view data
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
// ensure that logs from a builder using `ssh-ng://` as protocol
// are also available to `nix log`.
if (s && logSink) {
if (s && !isWrittenToLog && logSink) {
const auto type = (*json)["type"];
const auto fields = (*json)["fields"];
if (type == resBuildLogLine) {
const std::string logLine =
(fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n";
logSize += logLine.size();
if (settings.maxLogSize && logSize > settings.maxLogSize) {
return tooMuchLogs();
}
(*logSink)(logLine);
(*logSink)((fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n");
} else if (type == resSetPhase && ! fields.is_null()) {
const auto phase = fields[0];
if (! phase.is_null()) {
@ -1586,14 +1528,9 @@ Goal::Finished DerivationGoal::done(
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
}
if (ex && isDependency) {
logError(ex->info());
}
return Finished{
.exitCode = buildResult.success() ? ecSuccess : ecFailed,
.result = buildResult,
.ex = ex ? std::make_shared<Error>(std::move(*ex)) : nullptr,
.result = buildResult.success() ? ecSuccess : ecFailed,
.ex = ex ? std::make_unique<Error>(std::move(*ex)) : nullptr,
.permanentFailure = buildResult.status == BuildResult::PermanentFailure,
.timedOut = buildResult.status == BuildResult::TimedOut,
.hashMismatch = anyHashMismatchSeen,

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "notifying-counter.hh"
#include "parsed-derivations.hh"
#include "lock.hh"
#include "outputs-spec.hh"
@ -15,21 +14,7 @@ using std::map;
struct HookInstance;
struct HookReplyBase {
struct [[nodiscard]] Accept {
std::set<int> fds;
};
struct [[nodiscard]] Decline {};
struct [[nodiscard]] Postpone {};
};
struct [[nodiscard]] HookReply
: HookReplyBase,
std::variant<HookReplyBase::Accept, HookReplyBase::Decline, HookReplyBase::Postpone>
{
HookReply() = delete;
using variant::variant;
};
typedef enum {rpAccept, rpDecline, rpPostpone} HookReply;
/**
* Unless we are repairing, we don't both to test validity and just assume it,
@ -201,24 +186,17 @@ struct DerivationGoal : public Goal
*/
std::unique_ptr<HookInstance> hook;
/**
* Builder output is pulled from this file descriptor when not null.
* Owned by the derivation goal or subclass, must not be reset until
* the build has finished and no more output must be processed by us
*/
AutoCloseFD * builderOutFD = nullptr;
/**
* The sort of derivation we are building.
*/
std::optional<DerivationType> derivationType;
typedef kj::Promise<Result<WorkResult>> (DerivationGoal::*GoalState)(bool inBuildSlot) noexcept;
typedef WorkResult (DerivationGoal::*GoalState)();
GoalState state;
BuildMode buildMode;
NotifyingCounter<uint64_t>::Bump mcExpectedBuilds, mcRunningBuilds;
std::unique_ptr<MaintainCount<uint64_t>> mcExpectedBuilds, mcRunningBuilds;
std::unique_ptr<Activity> act;
@ -235,10 +213,10 @@ struct DerivationGoal : public Goal
std::string machineName;
DerivationGoal(const StorePath & drvPath,
const OutputsSpec & wantedOutputs, Worker & worker, bool isDependency,
const OutputsSpec & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
const OutputsSpec & wantedOutputs, Worker & worker, bool isDependency,
const OutputsSpec & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
virtual ~DerivationGoal() noexcept(false);
@ -246,7 +224,7 @@ struct DerivationGoal : public Goal
std::string key() override;
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
WorkResult work() override;
/**
* Add wanted outputs to an already existing derivation goal.
@ -256,23 +234,23 @@ struct DerivationGoal : public Goal
/**
* The states.
*/
kj::Promise<Result<WorkResult>> getDerivation(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> loadDerivation(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> haveDerivation(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> outputsSubstitutionTried(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> gaveUpOnSubstitution(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> closureRepaired(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> inputsRealised(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> tryToBuild(bool inBuildSlot) noexcept;
virtual kj::Promise<Result<WorkResult>> tryLocalBuild(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> buildDone(bool inBuildSlot) noexcept;
WorkResult getDerivation();
WorkResult loadDerivation();
WorkResult haveDerivation();
WorkResult outputsSubstitutionTried();
WorkResult gaveUpOnSubstitution();
WorkResult closureRepaired();
WorkResult inputsRealised();
WorkResult tryToBuild();
virtual WorkResult tryLocalBuild();
WorkResult buildDone();
kj::Promise<Result<WorkResult>> resolvedFinished(bool inBuildSlot) noexcept;
WorkResult resolvedFinished();
/**
* Is the build hook willing to perform the build?
*/
HookReply tryBuildHook(bool inBuildSlot);
HookReply tryBuildHook();
virtual int getChildStatus();
@ -312,6 +290,8 @@ struct DerivationGoal : public Goal
virtual void cleanupPostOutputsRegisteredModeCheck();
virtual void cleanupPostOutputsRegisteredModeNonCheck();
virtual bool isReadDesc(int fd);
/**
* Callback used by the worker to write to the log.
*/
@ -346,9 +326,9 @@ struct DerivationGoal : public Goal
*/
virtual void killChild();
kj::Promise<Result<WorkResult>> repairClosure() noexcept;
WorkResult repairClosure();
void started();
WorkResult started();
Finished done(
BuildResult::Status status,

View file

@ -1,5 +1,4 @@
#include "drv-output-substitution-goal.hh"
#include "build-result.hh"
#include "finally.hh"
#include "worker.hh"
#include "substitution-goal.hh"
@ -10,10 +9,9 @@ namespace nix {
DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
const DrvOutput & id,
Worker & worker,
bool isDependency,
RepairFlag repair,
std::optional<ContentAddress> ca)
: Goal(worker, isDependency)
: Goal(worker, DerivedPath::Opaque { StorePath::dummy })
, id(id)
{
state = &DrvOutputSubstitutionGoal::init;
@ -22,30 +20,32 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::init(bool inBuildSlot) noexcept
try {
Goal::WorkResult DrvOutputSubstitutionGoal::init()
{
trace("init");
/* If the derivation already exists, were done */
if (worker.store.queryRealisation(id)) {
return {Finished{ecSuccess, std::move(buildResult)}};
return Finished{ecSuccess};
}
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
return tryNext(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return tryNext();
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot) noexcept
try {
Goal::WorkResult DrvOutputSubstitutionGoal::tryNext()
{
trace("trying next substituter");
if (!inBuildSlot) {
return {WaitForSlot{}};
/* Make sure that we are allowed to start a substitution. Note that even
if maxSubstitutionJobs == 0, we still allow a substituter to run. This
prevents infinite waiting. */
if (worker.runningSubstitutions >= std::max(1U, settings.maxSubstitutionJobs.get())) {
return WaitForSlot{};
}
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
maintainRunningSubstitutions =
std::make_unique<MaintainCount<uint64_t>>(worker.runningSubstitutions);
if (subs.size() == 0) {
/* None left. Terminate this goal and let someone else deal
@ -59,7 +59,7 @@ try {
/* Hack: don't indicate failure if there were no substituters.
In that case the calling derivation should just do a
build. */
return {Finished{substituterFailed ? ecFailed : ecNoSubstituters, std::move(buildResult)}};
return Finished{substituterFailed ? ecFailed : ecNoSubstituters};
}
sub = subs.front();
@ -78,14 +78,14 @@ try {
return sub->queryRealisation(id);
});
worker.childStarted(shared_from_this(), {downloadState->outPipe.readSide.get()}, true, false);
state = &DrvOutputSubstitutionGoal::realisationFetched;
return {WaitForWorld{{downloadState->outPipe.readSide.get()}, true}};
} catch (...) {
return {std::current_exception()};
return StillAlive{};
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot) noexcept
try {
Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched()
{
worker.childTerminated(this);
maintainRunningSubstitutions.reset();
@ -97,7 +97,7 @@ try {
}
if (!outputInfo) {
return tryNext(inBuildSlot);
return tryNext();
}
WaitForGoals result;
@ -114,49 +114,42 @@ try {
worker.store.printStorePath(localOutputInfo->outPath),
worker.store.printStorePath(depPath)
);
return tryNext(inBuildSlot);
return tryNext();
}
result.goals.insert(worker.goalFactory().makeDrvOutputSubstitutionGoal(depId));
result.goals.insert(worker.makeDrvOutputSubstitutionGoal(depId));
}
}
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(outputInfo->outPath));
result.goals.insert(worker.makePathSubstitutionGoal(outputInfo->outPath));
if (result.goals.empty()) {
return outPathValid(inBuildSlot);
return outPathValid();
} else {
state = &DrvOutputSubstitutionGoal::outPathValid;
return {std::move(result)};
return result;
}
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::outPathValid(bool inBuildSlot) noexcept
try {
Goal::WorkResult DrvOutputSubstitutionGoal::outPathValid()
{
assert(outputInfo);
trace("output path substituted");
if (nrFailed > 0) {
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
return {Finished{
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
std::move(buildResult),
}};
return Finished{
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed
};
}
worker.store.registerDrvOutput(*outputInfo);
return finished();
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::finished() noexcept
try {
Goal::WorkResult DrvOutputSubstitutionGoal::finished()
{
trace("finished");
return {Finished{ecSuccess, std::move(buildResult)}};
} catch (...) {
return {std::current_exception()};
return Finished{ecSuccess};
}
std::string DrvOutputSubstitutionGoal::key()
@ -166,9 +159,9 @@ std::string DrvOutputSubstitutionGoal::key()
return "a$" + std::string(id.to_string());
}
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::work(bool inBuildSlot) noexcept
Goal::WorkResult DrvOutputSubstitutionGoal::work()
{
return (this->*state)(inBuildSlot);
return (this->*state)();
}

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "notifying-counter.hh"
#include "store-api.hh"
#include "goal.hh"
#include "realisation.hh"
@ -41,7 +40,7 @@ class DrvOutputSubstitutionGoal : public Goal {
*/
std::shared_ptr<Store> sub;
NotifyingCounter<uint64_t>::Bump maintainRunningSubstitutions;
std::unique_ptr<MaintainCount<uint64_t>> maintainRunningSubstitutions;
struct DownloadState
{
@ -57,28 +56,22 @@ class DrvOutputSubstitutionGoal : public Goal {
bool substituterFailed = false;
public:
DrvOutputSubstitutionGoal(
const DrvOutput & id,
Worker & worker,
bool isDependency,
RepairFlag repair = NoRepair,
std::optional<ContentAddress> ca = std::nullopt
);
DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
typedef kj::Promise<Result<WorkResult>> (DrvOutputSubstitutionGoal::*GoalState)(bool inBuildSlot) noexcept;
typedef WorkResult (DrvOutputSubstitutionGoal::*GoalState)();
GoalState state;
kj::Promise<Result<WorkResult>> init(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> tryNext(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> realisationFetched(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> outPathValid(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> finished() noexcept;
WorkResult init();
WorkResult tryNext();
WorkResult realisationFetched();
WorkResult outPathValid();
WorkResult finished();
Finished timedOut(Error && ex) override { abort(); };
std::string key() override;
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
WorkResult work() override;
JobCategory jobCategory() const override {
return JobCategory::Substitution;

View file

@ -2,35 +2,27 @@
#include "substitution-goal.hh"
#include "derivation-goal.hh"
#include "local-store.hh"
#include "strings.hh"
namespace nix {
static auto runWorker(Worker & worker, auto mkGoals)
{
return worker.run(mkGoals);
}
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
{
auto aio = kj::setupAsyncIo();
Worker worker(*this, evalStore ? *evalStore : *this, aio);
Worker worker(*this, evalStore ? *evalStore : *this);
auto goals = runWorker(worker, [&](GoalFactory & gf) {
Goals goals;
for (auto & br : reqs)
goals.insert(gf.makeGoal(br, buildMode));
return goals;
});
Goals goals;
for (auto & br : reqs)
goals.insert(worker.makeGoal(br, buildMode));
worker.run(goals);
StringSet failed;
std::shared_ptr<Error> ex;
std::optional<Error> ex;
for (auto & i : goals) {
if (i->ex) {
if (ex)
logError(i->ex->info());
else
ex = i->ex;
ex = std::move(*i->ex);
}
if (i->exitCode != Goal::ecSuccess) {
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
@ -54,20 +46,18 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
BuildMode buildMode,
std::shared_ptr<Store> evalStore)
{
auto aio = kj::setupAsyncIo();
Worker worker(*this, evalStore ? *evalStore : *this, aio);
Worker worker(*this, evalStore ? *evalStore : *this);
Goals goals;
std::vector<std::pair<const DerivedPath &, GoalPtr>> state;
auto goals = runWorker(worker, [&](GoalFactory & gf) {
Goals goals;
for (const auto & req : reqs) {
auto goal = gf.makeGoal(req, buildMode);
goals.insert(goal);
state.push_back({req, goal});
}
return goals;
});
for (const auto & req : reqs) {
auto goal = worker.makeGoal(req, buildMode);
goals.insert(goal);
state.push_back({req, goal});
}
worker.run(goals);
std::vector<KeyedBuildResult> results;
@ -80,14 +70,11 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode)
{
auto aio = kj::setupAsyncIo();
Worker worker(*this, *this, aio);
Worker worker(*this, *this);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All {}, buildMode);
try {
auto goals = runWorker(worker, [&](GoalFactory & gf) -> Goals {
return Goals{gf.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All{}, buildMode)};
});
auto goal = *goals.begin();
worker.run(Goals{goal});
return goal->buildResult.restrictTo(DerivedPath::Built {
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::All {},
@ -106,13 +93,11 @@ void Store::ensurePath(const StorePath & path)
/* If the path is already valid, we're done. */
if (isValidPath(path)) return;
auto aio = kj::setupAsyncIo();
Worker worker(*this, *this, aio);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path);
Goals goals = {goal};
auto goals = runWorker(worker, [&](GoalFactory & gf) {
return Goals{gf.makePathSubstitutionGoal(path)};
});
auto goal = *goals.begin();
worker.run(goals);
if (goal->exitCode != Goal::ecSuccess) {
if (goal->ex) {
@ -126,29 +111,24 @@ void Store::ensurePath(const StorePath & path)
void Store::repairPath(const StorePath & path)
{
auto aio = kj::setupAsyncIo();
Worker worker(*this, *this, aio);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
Goals goals = {goal};
auto goals = runWorker(worker, [&](GoalFactory & gf) {
return Goals{gf.makePathSubstitutionGoal(path, Repair)};
});
auto goal = *goals.begin();
worker.run(goals);
if (goal->exitCode != Goal::ecSuccess) {
/* Since substituting the path didn't work, if we have a valid
deriver, then rebuild the deriver. */
auto info = queryPathInfo(path);
if (info->deriver && isValidPath(*info->deriver)) {
worker.run([&](GoalFactory & gf) {
return Goals{gf.makeGoal(
DerivedPath::Built{
.drvPath = makeConstantStorePathRef(*info->deriver),
// FIXME: Should just build the specific output we need.
.outputs = OutputsSpec::All{},
},
bmRepair
)};
});
goals.clear();
goals.insert(worker.makeGoal(DerivedPath::Built {
.drvPath = makeConstantStorePathRef(*info->deriver),
// FIXME: Should just build the specific output we need.
.outputs = OutputsSpec::All { },
}, bmRepair));
worker.run(goals);
} else
throw Error(worker.failingExitStatus(), "cannot repair path '%s'", printStorePath(path));
}

View file

@ -1,4 +1,5 @@
#include "goal.hh"
#include "worker.hh"
namespace nix {

View file

@ -1,11 +1,9 @@
#pragma once
///@file
#include "result.hh"
#include "types.hh"
#include "store-api.hh"
#include "build-result.hh"
#include <kj/async.h>
namespace nix {
@ -53,7 +51,7 @@ enum struct JobCategory {
Substitution,
};
struct Goal
struct Goal : public std::enable_shared_from_this<Goal>
{
typedef enum {ecSuccess, ecFailed, ecNoSubstituters, ecIncompleteClosure} ExitCode;
@ -62,13 +60,6 @@ struct Goal
*/
Worker & worker;
/**
* Whether this goal is only a dependency of other goals. Toplevel
* goals that are also dependencies of other toplevel goals do not
* set this, only goals that are exclusively dependencies do this.
*/
const bool isDependency;
/**
* Goals that this goal is waiting for.
*/
@ -121,14 +112,9 @@ public:
struct [[nodiscard]] WaitForGoals {
Goals goals;
};
struct [[nodiscard]] WaitForWorld {
std::set<int> fds;
bool inBuildSlot;
};
struct [[nodiscard]] Finished {
ExitCode exitCode;
BuildResult result;
std::shared_ptr<Error> ex;
ExitCode result;
std::unique_ptr<Error> ex;
bool permanentFailure = false;
bool timedOut = false;
bool hashMismatch = false;
@ -141,7 +127,6 @@ public:
WaitForAWhile,
ContinueImmediately,
WaitForGoals,
WaitForWorld,
Finished>
{
WorkResult() = delete;
@ -151,11 +136,10 @@ public:
/**
* Exception containing an error message, if any.
*/
std::shared_ptr<Error> ex;
std::unique_ptr<Error> ex;
explicit Goal(Worker & worker, bool isDependency)
Goal(Worker & worker, DerivedPath path)
: worker(worker)
, isDependency(isDependency)
{ }
virtual ~Goal() noexcept(false)
@ -163,7 +147,7 @@ public:
trace("goal destroyed");
}
virtual kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept = 0;
virtual WorkResult work() = 0;
virtual void waiteeDone(GoalPtr waitee) { }
@ -176,11 +160,6 @@ public:
{
}
virtual bool respectsTimeouts()
{
return false;
}
void trace(std::string_view s);
std::string getName() const

View file

@ -2,7 +2,6 @@
#include "file-system.hh"
#include "globals.hh"
#include "hook-instance.hh"
#include "strings.hh"
namespace nix {
@ -27,21 +26,18 @@ HookInstance::HookInstance()
args.push_back(std::to_string(verbosity));
/* Create a pipe to get the output of the child. */
Pipe fromHook_;
fromHook_.create();
fromHook.create();
/* Create the communication pipes. */
Pipe toHook_;
toHook_.create();
toHook.create();
/* Create a pipe to get the output of the builder. */
Pipe builderOut_;
builderOut_.create();
builderOut.create();
/* Fork the hook. */
pid = startProcess([&]() {
if (dup2(fromHook_.writeSide.get(), STDERR_FILENO) == -1)
if (dup2(fromHook.writeSide.get(), STDERR_FILENO) == -1)
throw SysError("cannot pipe standard error into log file");
commonChildInit();
@ -49,16 +45,16 @@ HookInstance::HookInstance()
if (chdir("/") == -1) throw SysError("changing into /");
/* Dup the communication pipes. */
if (dup2(toHook_.readSide.get(), STDIN_FILENO) == -1)
if (dup2(toHook.readSide.get(), STDIN_FILENO) == -1)
throw SysError("dupping to-hook read side");
/* Use fd 4 for the builder's stdout/stderr. */
if (dup2(builderOut_.writeSide.get(), 4) == -1)
if (dup2(builderOut.writeSide.get(), 4) == -1)
throw SysError("dupping builder's stdout/stderr");
/* Hack: pass the read side of that fd to allow build-remote
to read SSH error messages. */
if (dup2(builderOut_.readSide.get(), 5) == -1)
if (dup2(builderOut.readSide.get(), 5) == -1)
throw SysError("dupping builder's stdout/stderr");
execv(buildHook.c_str(), stringsToCharPtrs(args).data());
@ -67,11 +63,10 @@ HookInstance::HookInstance()
});
pid.setSeparatePG(true);
fromHook = std::move(fromHook_.readSide);
toHook = std::move(toHook_.writeSide);
builderOut = std::move(builderOut_.readSide);
fromHook.writeSide.reset();
toHook.readSide.reset();
sink = FdSink(toHook.get());
sink = FdSink(toHook.writeSide.get());
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & setting : settings)
@ -83,7 +78,7 @@ HookInstance::HookInstance()
HookInstance::~HookInstance()
{
try {
toHook.reset();
toHook.writeSide.reset();
if (pid) pid.kill();
} catch (...) {
ignoreException();

View file

@ -10,19 +10,19 @@ namespace nix {
struct HookInstance
{
/**
* Pipe for talking to the build hook.
* Pipes for talking to the build hook.
*/
AutoCloseFD toHook;
Pipe toHook;
/**
* Pipe for the hook's standard output/error.
*/
AutoCloseFD fromHook;
Pipe fromHook;
/**
* Pipe for the builder's standard output/error.
*/
AutoCloseFD builderOut;
Pipe builderOut;
/**
* The process ID of the hook.

View file

@ -1,12 +1,14 @@
#include "local-derivation-goal.hh"
#include "indirect-root-store.hh"
#include "machines.hh"
#include "hook-instance.hh"
#include "store-api.hh"
#include "worker.hh"
#include "builtins.hh"
#include "builtins/buildenv.hh"
#include "path-references.hh"
#include "finally.hh"
#include "archive.hh"
#include "compression.hh"
#include "daemon.hh"
#include "topo-sort.hh"
#include "json-utils.hh"
@ -15,8 +17,6 @@
#include "namespaces.hh"
#include "child.hh"
#include "unix-domain-socket.hh"
#include "mount.hh"
#include "strings.hh"
#include <regex>
#include <queue>
@ -149,30 +149,17 @@ void LocalDerivationGoal::killSandbox(bool getStats)
}
kj::Promise<Result<Goal::WorkResult>> LocalDerivationGoal::tryLocalBuild(bool inBuildSlot) noexcept
try {
Goal::WorkResult LocalDerivationGoal::tryLocalBuild()
{
#if __APPLE__
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
#endif
if (!inBuildSlot) {
unsigned int curBuilds = worker.getNrLocalBuilds();
if (curBuilds >= settings.maxBuildJobs) {
state = &DerivationGoal::tryToBuild;
outputLocks.unlock();
if (0U != settings.maxBuildJobs) {
return {WaitForSlot{}};
}
if (getMachines().empty()) {
throw Error(
"unable to start any build; either set '--max-jobs' to a non-zero value or enable "
"remote builds.\n"
"https://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html"
);
} else {
throw Error(
"unable to start any build; remote machines may not have all required system features.\n"
"https://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html"
);
}
return WaitForSlot{};
}
assert(derivationType);
@ -214,7 +201,7 @@ try {
if (!actLock)
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath))));
return {WaitForAWhile{}};
return WaitForAWhile{};
}
}
@ -243,24 +230,21 @@ try {
try {
/* Okay, we have to build. */
auto fds = startBuilder();
/* This state will be reached when we get EOF on the child's
log pipe. */
state = &DerivationGoal::buildDone;
started();
return {WaitForWorld{std::move(fds), true}};
startBuilder();
} catch (BuildError & e) {
outputLocks.unlock();
buildUser.reset();
auto report = done(BuildResult::InputRejected, {}, std::move(e));
report.permanentFailure = true;
return {std::move(report)};
return report;
}
} catch (...) {
return {std::current_exception()};
/* This state will be reached when we get EOF on the child's
log pipe. */
state = &DerivationGoal::buildDone;
return started();
}
@ -295,10 +279,8 @@ void LocalDerivationGoal::closeReadPipes()
{
if (hook) {
DerivationGoal::closeReadPipes();
} else {
builderOutPTY.close();
builderOutFD = nullptr;
}
} else
builderOut.close();
}
@ -390,7 +372,40 @@ void LocalDerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
cleanupPostOutputsRegisteredModeCheck();
}
std::set<int> LocalDerivationGoal::startBuilder()
#if __linux__
static void doBind(const Path & source, const Path & target, bool optional = false) {
debug("bind mounting '%1%' to '%2%'", source, target);
auto bindMount = [&]() {
if (mount(source.c_str(), target.c_str(), "", MS_BIND | MS_REC, 0) == -1)
throw SysError("bind mount from '%1%' to '%2%' failed", source, target);
};
auto maybeSt = maybeLstat(source);
if (!maybeSt) {
if (optional)
return;
else
throw SysError("getting attributes of path '%1%'", source);
}
auto st = *maybeSt;
if (S_ISDIR(st.st_mode)) {
createDirs(target);
bindMount();
} else if (S_ISLNK(st.st_mode)) {
// Symlinks can (apparently) not be bind-mounted, so just copy it
createDirs(dirOf(target));
copyFile(source, target, {});
} else {
createDirs(dirOf(target));
writeFile(target, "");
bindMount();
}
};
#endif
void LocalDerivationGoal::startBuilder()
{
if ((buildUser && buildUser->getUIDCount() != 1)
#if __linux__
@ -451,23 +466,13 @@ std::set<int> LocalDerivationGoal::startBuilder()
killSandbox(false);
/* Right platform? */
if (!parsedDrv->canBuildLocally(worker.store)) {
HintFmt addendum{""};
if (settings.useSubstitutes && !parsedDrv->substitutesAllowed()) {
addendum = HintFmt("\n\nHint: the failing derivation has %s set to %s, forcing it to be built rather than substituted.\n"
"Passing %s to force substitution may resolve this failure if the path is available in a substituter.",
"allowSubstitutes", "false", "--always-allow-substitutes");
}
throw Error({
.msg = HintFmt("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}%s",
drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath),
settings.thisSystem,
concatStringsSep<StringSet>(", ", worker.store.systemFeatures),
Uncolored(addendum))
});
}
if (!parsedDrv->canBuildLocally(worker.store))
throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
drv->platform,
concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
worker.store.printStorePath(drvPath),
settings.thisSystem,
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
/* Create a temporary directory where the build will take
place. */
@ -699,13 +704,12 @@ std::set<int> LocalDerivationGoal::startBuilder()
Path logFile = openLogFile();
/* Create a pseudoterminal to get the output of the builder. */
builderOutPTY = AutoCloseFD{posix_openpt(O_RDWR | O_NOCTTY)};
if (!builderOutPTY)
builderOut = AutoCloseFD{posix_openpt(O_RDWR | O_NOCTTY)};
if (!builderOut)
throw SysError("opening pseudoterminal master");
builderOutFD = &builderOutPTY;
// FIXME: not thread-safe, use ptsname_r
std::string slaveName = ptsname(builderOutPTY.get());
std::string slaveName = ptsname(builderOut.get());
if (buildUser) {
if (chmod(slaveName.c_str(), 0600))
@ -716,12 +720,12 @@ std::set<int> LocalDerivationGoal::startBuilder()
}
#if __APPLE__
else {
if (grantpt(builderOutPTY.get()))
if (grantpt(builderOut.get()))
throw SysError("granting access to pseudoterminal slave");
}
#endif
if (unlockpt(builderOutPTY.get()))
if (unlockpt(builderOut.get()))
throw SysError("unlocking pseudoterminal");
/* Open the slave side of the pseudoterminal and use it as stderr. */
@ -752,13 +756,14 @@ std::set<int> LocalDerivationGoal::startBuilder()
/* parent */
pid.setSeparatePG(true);
worker.childStarted(shared_from_this(), {builderOut.get()}, true, true);
/* Check if setting up the build environment failed. */
std::vector<std::string> msgs;
while (true) {
std::string msg = [&]() {
try {
return readLine(builderOutPTY.get());
return readLine(builderOut.get());
} catch (Error & e) {
auto status = pid.wait();
e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
@ -770,7 +775,7 @@ std::set<int> LocalDerivationGoal::startBuilder()
}();
if (msg.substr(0, 1) == "\2") break;
if (msg.substr(0, 1) == "\1") {
FdSource source(builderOutPTY.get());
FdSource source(builderOut.get());
auto ex = readError(source);
ex.addTrace({}, "while setting up the build environment");
throw ex;
@ -778,8 +783,6 @@ std::set<int> LocalDerivationGoal::startBuilder()
debug("sandbox setup: " + msg);
msgs.push_back(std::move(msg));
}
return {builderOutPTY.get()};
}
@ -1304,7 +1307,7 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
Path target = chrootRootDir + worker.store.printStorePath(path);
if (pathExists(target)) {
// There is a similar debug message in bindPath, so only run it in this block to not have double messages.
// There is a similar debug message in doBind, so only run it in this block to not have double messages.
debug("bind-mounting %s -> %s", target, source);
throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path));
}
@ -1321,7 +1324,7 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
if (setns(sandboxMountNamespace.get(), 0) == -1)
throw SysError("entering sandbox mount namespace");
bindPath(source, target);
doBind(source, target);
_exit(0);
});
@ -1513,7 +1516,7 @@ void LocalDerivationGoal::runChild()
chmodPath(dst, 0555);
} else
#endif
bindPath(i.second.source, chrootRootDir + i.first, i.second.optional);
doBind(i.second.source, chrootRootDir + i.first, i.second.optional);
}
/* Bind a new instance of procfs on /proc. */
@ -1552,8 +1555,8 @@ void LocalDerivationGoal::runChild()
} else {
if (errno != EINVAL)
throw SysError("mounting /dev/pts");
bindPath("/dev/pts", chrootRootDir + "/dev/pts");
bindPath("/dev/ptmx", chrootRootDir + "/dev/ptmx");
doBind("/dev/pts", chrootRootDir + "/dev/pts");
doBind("/dev/ptmx", chrootRootDir + "/dev/ptmx");
}
}
@ -2593,6 +2596,13 @@ void LocalDerivationGoal::deleteTmpDir(bool force)
}
bool LocalDerivationGoal::isReadDesc(int fd)
{
return (hook && DerivationGoal::isReadDesc(fd)) ||
(!hook && fd == builderOut.get());
}
StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
{
return worker.store.makeStorePath(

View file

@ -40,7 +40,7 @@ struct LocalDerivationGoal : public DerivationGoal
* Master side of the pseudoterminal used for the builder's
* standard output/error.
*/
AutoCloseFD builderOutPTY;
AutoCloseFD builderOut;
/**
* Pipe for synchronising updates to the builder namespaces.
@ -186,7 +186,6 @@ struct LocalDerivationGoal : public DerivationGoal
const StorePath & drvPath,
const OutputsSpec & wantedOutputs,
Worker & worker,
bool isDependency,
BuildMode buildMode
);
@ -199,7 +198,6 @@ struct LocalDerivationGoal : public DerivationGoal
const BasicDerivation & drv,
const OutputsSpec & wantedOutputs,
Worker & worker,
bool isDependency,
BuildMode buildMode
);
@ -213,12 +211,12 @@ struct LocalDerivationGoal : public DerivationGoal
/**
* The additional states.
*/
kj::Promise<Result<WorkResult>> tryLocalBuild(bool inBuildSlot) noexcept override;
WorkResult tryLocalBuild() override;
/**
* Start building a derivation.
*/
std::set<int> startBuilder();
void startBuilder();
/**
* Fill in the environment for the builder.
@ -287,6 +285,8 @@ struct LocalDerivationGoal : public DerivationGoal
void cleanupPostOutputsRegisteredModeCheck() override;
void cleanupPostOutputsRegisteredModeNonCheck() override;
bool isReadDesc(int fd) override;
/**
* Delete the temporary directory, if we have one.
*/
@ -359,10 +359,6 @@ protected:
return false;
}
virtual bool respectsTimeouts() override
{
return true;
}
};
}

View file

@ -6,14 +6,8 @@
namespace nix {
PathSubstitutionGoal::PathSubstitutionGoal(
const StorePath & storePath,
Worker & worker,
bool isDependency,
RepairFlag repair,
std::optional<ContentAddress> ca
)
: Goal(worker, isDependency)
PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
: Goal(worker, DerivedPath::Opaque { storePath })
, storePath(storePath)
, repair(repair)
, ca(ca)
@ -21,7 +15,7 @@ PathSubstitutionGoal::PathSubstitutionGoal(
state = &PathSubstitutionGoal::init;
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
trace("created");
maintainExpectedSubstitutions = worker.expectedSubstitutions.addTemporarily(1);
maintainExpectedSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.expectedSubstitutions);
}
@ -41,25 +35,25 @@ Goal::Finished PathSubstitutionGoal::done(
debug(*errorMsg);
buildResult.errorMsg = *errorMsg;
}
return Finished{result, std::move(buildResult)};
return Finished{result};
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::work(bool inBuildSlot) noexcept
Goal::WorkResult PathSubstitutionGoal::work()
{
return (this->*state)(inBuildSlot);
return (this->*state)();
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::init(bool inBuildSlot) noexcept
try {
Goal::WorkResult PathSubstitutionGoal::init()
{
trace("init");
worker.store.addTempRoot(storePath);
/* If the path already exists we're done. */
if (!repair && worker.store.isValidPath(storePath)) {
return {done(ecSuccess, BuildResult::AlreadyValid)};
return done(ecSuccess, BuildResult::AlreadyValid);
}
if (settings.readOnlyMode)
@ -67,14 +61,12 @@ try {
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
return tryNext(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return tryNext();
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryNext(bool inBuildSlot) noexcept
try {
Goal::WorkResult PathSubstitutionGoal::tryNext()
{
trace("trying next substituter");
cleanup();
@ -89,10 +81,10 @@ try {
/* Hack: don't indicate failure if there were no substituters.
In that case the calling derivation should just do a
build. */
return {done(
return done(
substituterFailed ? ecFailed : ecNoSubstituters,
BuildResult::NoSubstituters,
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)))};
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)));
}
sub = subs.front();
@ -105,23 +97,23 @@ try {
if (sub->storeDir == worker.store.storeDir)
assert(subPath == storePath);
} else if (sub->storeDir != worker.store.storeDir) {
return tryNext(inBuildSlot);
return tryNext();
}
try {
// FIXME: make async
info = sub->queryPathInfo(subPath ? *subPath : storePath);
} catch (InvalidPath &) {
return tryNext(inBuildSlot);
return tryNext();
} catch (SubstituterDisabled &) {
if (settings.tryFallback) {
return tryNext(inBuildSlot);
return tryNext();
}
throw;
} catch (Error & e) {
if (settings.tryFallback) {
logError(e.info());
return tryNext(inBuildSlot);
return tryNext();
}
throw;
}
@ -134,18 +126,18 @@ try {
} else {
printError("asked '%s' for '%s' but got '%s'",
sub->getUri(), worker.store.printStorePath(storePath), sub->printStorePath(info->path));
return tryNext(inBuildSlot);
return tryNext();
}
}
/* Update the total expected download size. */
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(info);
maintainExpectedNar = worker.expectedNarSize.addTemporarily(info->narSize);
maintainExpectedNar = std::make_unique<MaintainCount<uint64_t>>(worker.expectedNarSize, info->narSize);
maintainExpectedDownload =
narInfo && narInfo->fileSize
? worker.expectedDownloadSize.addTemporarily(narInfo->fileSize)
? std::make_unique<MaintainCount<uint64_t>>(worker.expectedDownloadSize, narInfo->fileSize)
: nullptr;
/* Bail out early if this substituter lacks a valid
@ -155,7 +147,7 @@ try {
{
warn("ignoring substitute for '%s' from '%s', as it's not signed by any of the keys in 'trusted-public-keys'",
worker.store.printStorePath(storePath), sub->getUri());
return tryNext(inBuildSlot);
return tryNext();
}
/* To maintain the closure invariant, we first have to realise the
@ -163,28 +155,26 @@ try {
WaitForGoals result;
for (auto & i : info->references)
if (i != storePath) /* ignore self-references */
result.goals.insert(worker.goalFactory().makePathSubstitutionGoal(i));
result.goals.insert(worker.makePathSubstitutionGoal(i));
if (result.goals.empty()) {/* to prevent hang (no wake-up event) */
return referencesValid(inBuildSlot);
return referencesValid();
} else {
state = &PathSubstitutionGoal::referencesValid;
return {std::move(result)};
return result;
}
} catch (...) {
return {std::current_exception()};
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::referencesValid(bool inBuildSlot) noexcept
try {
Goal::WorkResult PathSubstitutionGoal::referencesValid()
{
trace("all references realised");
if (nrFailed > 0) {
return {done(
return done(
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
BuildResult::DependencyFailed,
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)))};
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)));
}
for (auto & i : info->references)
@ -192,21 +182,22 @@ try {
assert(worker.store.isValidPath(i));
state = &PathSubstitutionGoal::tryToRun;
return tryToRun(inBuildSlot);
} catch (...) {
return {std::current_exception()};
return ContinueImmediately{};
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryToRun(bool inBuildSlot) noexcept
try {
Goal::WorkResult PathSubstitutionGoal::tryToRun()
{
trace("trying to run");
if (!inBuildSlot) {
return {WaitForSlot{}};
/* Make sure that we are allowed to start a substitution. Note that even
if maxSubstitutionJobs == 0, we still allow a substituter to run. This
prevents infinite waiting. */
if (worker.getNrSubstitutions() >= std::max(1U, (unsigned int) settings.maxSubstitutionJobs)) {
return WaitForSlot{};
}
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
maintainRunningSubstitutions = std::make_unique<MaintainCount<uint64_t>>(worker.runningSubstitutions);
outPipe.create();
@ -233,15 +224,15 @@ try {
}
});
worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
state = &PathSubstitutionGoal::finished;
return {WaitForWorld{{outPipe.readSide.get()}, true}};
} catch (...) {
return {std::current_exception()};
return StillAlive{};
}
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::finished(bool inBuildSlot) noexcept
try {
Goal::WorkResult PathSubstitutionGoal::finished()
{
trace("substitute finished");
worker.childTerminated(this);
@ -264,7 +255,7 @@ try {
/* Try the next substitute. */
state = &PathSubstitutionGoal::tryNext;
return tryNext(inBuildSlot);
return ContinueImmediately{};
}
worker.markContentsGood(storePath);
@ -276,15 +267,16 @@ try {
maintainExpectedSubstitutions.reset();
worker.doneSubstitutions++;
worker.doneDownloadSize += maintainExpectedDownload.delta();
maintainExpectedDownload.reset();
if (maintainExpectedDownload) {
auto fileSize = maintainExpectedDownload->delta;
maintainExpectedDownload.reset();
worker.doneDownloadSize += fileSize;
}
worker.doneNarSize += maintainExpectedNar.delta();
worker.doneNarSize += maintainExpectedNar->delta;
maintainExpectedNar.reset();
return {done(ecSuccess, BuildResult::Substituted)};
} catch (...) {
return {std::current_exception()};
return done(ecSuccess, BuildResult::Substituted);
}

View file

@ -2,7 +2,6 @@
///@file
#include "lock.hh"
#include "notifying-counter.hh"
#include "store-api.hh"
#include "goal.hh"
@ -64,10 +63,10 @@ struct PathSubstitutionGoal : public Goal
*/
Path destPath;
NotifyingCounter<uint64_t>::Bump maintainExpectedSubstitutions,
std::unique_ptr<MaintainCount<uint64_t>> maintainExpectedSubstitutions,
maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload;
typedef kj::Promise<Result<WorkResult>> (PathSubstitutionGoal::*GoalState)(bool inBuildSlot) noexcept;
typedef WorkResult (PathSubstitutionGoal::*GoalState)();
GoalState state;
/**
@ -81,13 +80,7 @@ struct PathSubstitutionGoal : public Goal
std::optional<std::string> errorMsg = {});
public:
PathSubstitutionGoal(
const StorePath & storePath,
Worker & worker,
bool isDependency,
RepairFlag repair = NoRepair,
std::optional<ContentAddress> ca = std::nullopt
);
PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
~PathSubstitutionGoal();
Finished timedOut(Error && ex) override { abort(); };
@ -101,16 +94,17 @@ public:
return "a$" + std::string(storePath.name()) + "$" + worker.store.printStorePath(storePath);
}
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
WorkResult work() override;
/**
* The states.
*/
kj::Promise<Result<WorkResult>> init(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> tryNext(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> referencesValid(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> tryToRun(bool inBuildSlot) noexcept;
kj::Promise<Result<WorkResult>> finished(bool inBuildSlot) noexcept;
WorkResult init();
WorkResult tryNext();
WorkResult gotInfo();
WorkResult referencesValid();
WorkResult tryToRun();
WorkResult finished();
/**
* Callback used by the worker to write to the log.

View file

@ -1,6 +1,6 @@
#include "charptr-cast.hh"
#include "machines.hh"
#include "worker.hh"
#include "finally.hh"
#include "substitution-goal.hh"
#include "drv-output-substitution-goal.hh"
#include "local-derivation-goal.hh"
@ -11,13 +11,12 @@
namespace nix {
Worker::Worker(Store & store, Store & evalStore, kj::AsyncIoContext & aio)
Worker::Worker(Store & store, Store & evalStore)
: act(*logger, actRealise)
, actDerivations(*logger, actBuilds)
, actSubstitutions(*logger, actCopyPaths)
, store(store)
, evalStore(evalStore)
, aio(aio)
{
/* Debugging: prevent recursive workers. */
nrLocalBuilds = 0;
@ -61,38 +60,22 @@ std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
const OutputsSpec & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(
drvPath,
wantedOutputs,
[&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared<DerivationGoal>(
drvPath, wantedOutputs, *this, running, buildMode
)
: LocalDerivationGoal::makeLocalDerivationGoal(
drvPath, wantedOutputs, *this, running, buildMode
);
}
);
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode)
: LocalDerivationGoal::makeLocalDerivationGoal(drvPath, wantedOutputs, *this, buildMode);
});
}
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, const OutputsSpec & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(
drvPath,
wantedOutputs,
[&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared<DerivationGoal>(
drvPath, drv, wantedOutputs, *this, running, buildMode
)
: LocalDerivationGoal::makeLocalDerivationGoal(
drvPath, drv, wantedOutputs, *this, running, buildMode
);
}
);
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() -> std::shared_ptr<DerivationGoal> {
return !dynamic_cast<LocalStore *>(&store)
? std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode)
: LocalDerivationGoal::makeLocalDerivationGoal(drvPath, drv, wantedOutputs, *this, buildMode);
});
}
@ -101,7 +84,7 @@ std::shared_ptr<PathSubstitutionGoal> Worker::makePathSubstitutionGoal(const Sto
std::weak_ptr<PathSubstitutionGoal> & goal_weak = substitutionGoals[path];
auto goal = goal_weak.lock(); // FIXME
if (!goal) {
goal = std::make_shared<PathSubstitutionGoal>(path, *this, running, repair, ca);
goal = std::make_shared<PathSubstitutionGoal>(path, *this, repair, ca);
goal_weak = goal;
wakeUp(goal);
}
@ -114,7 +97,7 @@ std::shared_ptr<DrvOutputSubstitutionGoal> Worker::makeDrvOutputSubstitutionGoal
std::weak_ptr<DrvOutputSubstitutionGoal> & goal_weak = drvOutputSubstitutionGoals[id];
auto goal = goal_weak.lock(); // FIXME
if (!goal) {
goal = std::make_shared<DrvOutputSubstitutionGoal>(id, *this, running, repair, ca);
goal = std::make_shared<DrvOutputSubstitutionGoal>(id, *this, repair, ca);
goal_weak = goal;
wakeUp(goal);
}
@ -157,14 +140,20 @@ void Worker::goalFinished(GoalPtr goal, Goal::Finished & f)
{
goal->trace("done");
assert(!goal->exitCode.has_value());
goal->exitCode = f.exitCode;
goal->ex = f.ex;
goal->exitCode = f.result;
permanentFailure |= f.permanentFailure;
timedOut |= f.timedOut;
hashMismatch |= f.hashMismatch;
checkMismatch |= f.checkMismatch;
if (f.ex) {
if (!goal->waiters.empty())
logError(f.ex->info());
else
goal->ex = std::move(f.ex);
}
for (auto & i : goal->waiters) {
if (GoalPtr waiting = i.lock()) {
assert(waiting->waitees.count(goal));
@ -172,11 +161,11 @@ void Worker::goalFinished(GoalPtr goal, Goal::Finished & f)
waiting->trace(fmt("waitee '%s' done; %d left", goal->name, waiting->waitees.size()));
if (f.exitCode != Goal::ecSuccess) ++waiting->nrFailed;
if (f.exitCode == Goal::ecNoSubstituters) ++waiting->nrNoSubstituters;
if (f.exitCode == Goal::ecIncompleteClosure) ++waiting->nrIncompleteClosure;
if (f.result != Goal::ecSuccess) ++waiting->nrFailed;
if (f.result == Goal::ecNoSubstituters) ++waiting->nrNoSubstituters;
if (f.result == Goal::ecIncompleteClosure) ++waiting->nrIncompleteClosure;
if (waiting->waitees.empty() || (f.exitCode == Goal::ecFailed && !settings.keepGoing)) {
if (waiting->waitees.empty() || (f.result == Goal::ecFailed && !settings.keepGoing)) {
/* If we failed and keepGoing is not set, we remove all
remaining waitees. */
for (auto & i : waiting->waitees) {
@ -209,7 +198,6 @@ void Worker::handleWorkResult(GoalPtr goal, Goal::WorkResult how)
dep->waiters.insert(goal);
}
},
[&](Goal::WaitForWorld & w) { childStarted(goal, w.fds, w.inBuildSlot); },
[&](Goal::Finished & f) { goalFinished(goal, f); },
},
how
@ -244,8 +232,20 @@ void Worker::wakeUp(GoalPtr goal)
}
unsigned Worker::getNrLocalBuilds()
{
return nrLocalBuilds;
}
unsigned Worker::getNrSubstitutions()
{
return nrSubstitutions;
}
void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
bool inBuildSlot)
bool inBuildSlot, bool respectTimeouts)
{
Child child;
child.goal = goal;
@ -253,6 +253,7 @@ void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
child.fds = fds;
child.timeStarted = child.lastOutput = steady_time_point::clock::now();
child.inBuildSlot = inBuildSlot;
child.respectTimeouts = respectTimeouts;
children.emplace_back(child);
if (inBuildSlot) {
switch (goal->jobCategory()) {
@ -306,8 +307,8 @@ void Worker::waitForBuildSlot(GoalPtr goal)
{
goal->trace("wait for build slot");
bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution;
if ((!isSubstitutionGoal && nrLocalBuilds < settings.maxBuildJobs) ||
(isSubstitutionGoal && nrSubstitutions < settings.maxSubstitutionJobs))
if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) ||
(isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs))
wakeUp(goal); /* we can do it right away */
else
wantingToBuild.insert(goal);
@ -321,38 +322,26 @@ void Worker::waitForAWhile(GoalPtr goal)
}
void Worker::updateStatistics()
void Worker::run(const Goals & _topGoals)
{
// only update progress info while running. this notably excludes updating
// progress info while destroying, which causes the progress bar to assert
if (running && statisticsOutdated) {
actDerivations.progress(
doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds
);
actSubstitutions.progress(
doneSubstitutions,
expectedSubstitutions + doneSubstitutions,
runningSubstitutions,
failedSubstitutions
);
act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
std::vector<nix::DerivedPath> topPaths;
statisticsOutdated = false;
for (auto & i : _topGoals) {
topGoals.insert(i);
if (auto goal = dynamic_cast<DerivationGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Built {
.drvPath = makeConstantStorePathRef(goal->drvPath),
.outputs = goal->wantedOutputs,
});
} else if (auto goal = dynamic_cast<PathSubstitutionGoal *>(i.get())) {
topPaths.push_back(DerivedPath::Opaque{goal->storePath});
}
}
}
Goals Worker::run(std::function<Goals (GoalFactory &)> req)
{
auto _topGoals = req(goalFactory());
assert(!running);
running = true;
Finally const _stop([&] { running = false; });
updateStatistics();
topGoals = _topGoals;
/* Call queryMissing() to efficiently query substitutes. */
StorePathSet willBuild, willSubstitute, unknown;
uint64_t downloadSize, narSize;
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
debug("entered goal loop");
@ -375,13 +364,19 @@ Goals Worker::run(std::function<Goals (GoalFactory &)> req)
awake.clear();
for (auto & goal : awake2) {
checkInterrupt();
/* Make sure that we are always allowed to run at least one substitution.
This prevents infinite waiting. */
const bool inSlot = goal->jobCategory() == JobCategory::Substitution
? nrSubstitutions < std::max(1U, (unsigned int) settings.maxSubstitutionJobs)
: nrLocalBuilds < settings.maxBuildJobs;
handleWorkResult(goal, goal->work(inSlot).wait(aio.waitScope).value());
updateStatistics();
handleWorkResult(goal, goal->work());
actDerivations.progress(
doneBuilds, expectedBuilds + doneBuilds, runningBuilds, failedBuilds
);
actSubstitutions.progress(
doneSubstitutions,
expectedSubstitutions + doneSubstitutions,
runningSubstitutions,
failedSubstitutions
);
act.setExpected(actFileTransfer, expectedDownloadSize + doneDownloadSize);
act.setExpected(actCopyPath, expectedNarSize + doneNarSize);
if (topGoals.empty()) break; // stuff may have been cancelled
}
@ -393,6 +388,18 @@ Goals Worker::run(std::function<Goals (GoalFactory &)> req)
if (!children.empty() || !waitingForAWhile.empty())
waitForInput();
else {
if (awake.empty() && 0U == settings.maxBuildJobs)
{
if (getMachines().empty())
throw Error("unable to start any build; either increase '--max-jobs' "
"or enable remote builds."
"\nhttps://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html");
else
throw Error("unable to start any build; remote machines may not have "
"all required system features."
"\nhttps://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html");
}
assert(!awake.empty());
}
}
@ -403,8 +410,6 @@ Goals Worker::run(std::function<Goals (GoalFactory &)> req)
assert(!settings.keepGoing || awake.empty());
assert(!settings.keepGoing || wantingToBuild.empty());
assert(!settings.keepGoing || children.empty());
return _topGoals;
}
void Worker::waitForInput()
@ -429,13 +434,11 @@ void Worker::waitForInput()
// Periodicallty wake up to see if we need to run the garbage collector.
nearest = before + std::chrono::seconds(10);
for (auto & i : children) {
if (auto goal = i.goal.lock()) {
if (!goal->respectsTimeouts()) continue;
if (0 != settings.maxSilentTime)
nearest = std::min(nearest, i.lastOutput + std::chrono::seconds(settings.maxSilentTime));
if (0 != settings.buildTimeout)
nearest = std::min(nearest, i.timeStarted + std::chrono::seconds(settings.buildTimeout));
}
if (!i.respectTimeouts) continue;
if (0 != settings.maxSilentTime)
nearest = std::min(nearest, i.lastOutput + std::chrono::seconds(settings.maxSilentTime));
if (0 != settings.buildTimeout)
nearest = std::min(nearest, i.timeStarted + std::chrono::seconds(settings.buildTimeout));
}
if (nearest != steady_time_point::max()) {
timeout = std::max(1L, (long) std::chrono::duration_cast<std::chrono::seconds>(nearest - before).count());
@ -488,7 +491,7 @@ void Worker::waitForInput()
if (!goal->exitCode.has_value() &&
0 != settings.maxSilentTime &&
goal->respectsTimeouts() &&
j->respectTimeouts &&
after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime))
{
handleWorkResult(
@ -504,7 +507,7 @@ void Worker::waitForInput()
else if (!goal->exitCode.has_value() &&
0 != settings.buildTimeout &&
goal->respectsTimeouts() &&
j->respectTimeouts &&
after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout))
{
handleWorkResult(

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "notifying-counter.hh"
#include "types.hh"
#include "lock.hh"
#include "store-api.hh"
@ -9,7 +8,6 @@
#include "realisation.hh"
#include <future>
#include <kj/async-io.h>
#include <thread>
namespace nix {
@ -31,6 +29,7 @@ struct Child
WeakGoalPtr goal;
Goal * goal2; // ugly hackery
std::set<int> fds;
bool respectTimeouts;
bool inBuildSlot;
/**
* Time we last got output on stdout/stderr
@ -42,62 +41,13 @@ struct Child
/* Forward definition. */
struct HookInstance;
class GoalFactory
{
public:
virtual std::shared_ptr<DerivationGoal> makeDerivationGoal(
const StorePath & drvPath, const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal
) = 0;
virtual std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
const StorePath & drvPath,
const BasicDerivation & drv,
const OutputsSpec & wantedOutputs,
BuildMode buildMode = bmNormal
) = 0;
/**
* @ref SubstitutionGoal "substitution goal"
*/
virtual std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(
const StorePath & storePath,
RepairFlag repair = NoRepair,
std::optional<ContentAddress> ca = std::nullopt
) = 0;
virtual std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(
const DrvOutput & id,
RepairFlag repair = NoRepair,
std::optional<ContentAddress> ca = std::nullopt
) = 0;
/**
* Make a goal corresponding to the `DerivedPath`.
*
* It will be a `DerivationGoal` for a `DerivedPath::Built` or
* a `SubstitutionGoal` for a `DerivedPath::Opaque`.
*/
virtual GoalPtr makeGoal(const DerivedPath & req, BuildMode buildMode = bmNormal) = 0;
};
// elaborate hoax to let goals access factory methods while hiding them from the public
class WorkerBase : protected GoalFactory
{
friend struct DerivationGoal;
friend struct PathSubstitutionGoal;
friend class DrvOutputSubstitutionGoal;
protected:
GoalFactory & goalFactory() { return *this; }
};
/**
* The worker class.
*/
class Worker : public WorkerBase
class Worker
{
private:
bool running = false;
/* Note: the worker should only have strong pointers to the
top-level goals. */
@ -203,33 +153,6 @@ private:
*/
void waitForInput();
/**
* Remove a dead goal.
*/
void removeGoal(GoalPtr goal);
/**
* Registers a running child process. `inBuildSlot` means that
* the process counts towards the jobs limit.
*/
void childStarted(GoalPtr goal, const std::set<int> & fds,
bool inBuildSlot);
/**
* Pass current stats counters to the logger for progress bar updates.
*/
void updateStatistics();
bool statisticsOutdated = true;
/**
* Mark statistics as outdated, such that `updateStatistics` will be called.
*/
void updateStatisticsLater()
{
statisticsOutdated = true;
}
public:
const Activity act;
@ -238,7 +161,6 @@ public:
Store & store;
Store & evalStore;
kj::AsyncIoContext & aio;
struct HookState {
std::unique_ptr<HookInstance> instance;
@ -252,21 +174,21 @@ public:
HookState hook;
NotifyingCounter<uint64_t> expectedBuilds{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> doneBuilds{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> failedBuilds{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> runningBuilds{[this] { updateStatisticsLater(); }};
uint64_t expectedBuilds = 0;
uint64_t doneBuilds = 0;
uint64_t failedBuilds = 0;
uint64_t runningBuilds = 0;
NotifyingCounter<uint64_t> expectedSubstitutions{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> doneSubstitutions{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> failedSubstitutions{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> runningSubstitutions{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> expectedDownloadSize{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> doneDownloadSize{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> expectedNarSize{[this] { updateStatisticsLater(); }};
NotifyingCounter<uint64_t> doneNarSize{[this] { updateStatisticsLater(); }};
uint64_t expectedSubstitutions = 0;
uint64_t doneSubstitutions = 0;
uint64_t failedSubstitutions = 0;
uint64_t runningSubstitutions = 0;
uint64_t expectedDownloadSize = 0;
uint64_t doneDownloadSize = 0;
uint64_t expectedNarSize = 0;
uint64_t doneNarSize = 0;
Worker(Store & store, Store & evalStore, kj::AsyncIoContext & aio);
Worker(Store & store, Store & evalStore);
~Worker();
/**
@ -280,18 +202,19 @@ private:
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
const StorePath & drvPath, const OutputsSpec & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
public:
std::shared_ptr<DerivationGoal> makeDerivationGoal(
const StorePath & drvPath,
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal) override;
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
const StorePath & drvPath, const BasicDerivation & drv,
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal) override;
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
/**
* @ref SubstitutionGoal "substitution goal"
*/
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt) override;
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt) override;
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
/**
* Make a goal corresponding to the `DerivedPath`.
@ -299,9 +222,31 @@ private:
* It will be a `DerivationGoal` for a `DerivedPath::Built` or
* a `SubstitutionGoal` for a `DerivedPath::Opaque`.
*/
GoalPtr makeGoal(const DerivedPath & req, BuildMode buildMode = bmNormal) override;
GoalPtr makeGoal(const DerivedPath & req, BuildMode buildMode = bmNormal);
/**
* Remove a dead goal.
*/
void removeGoal(GoalPtr goal);
/**
* Return the number of local build processes currently running (but not
* remote builds via the build hook).
*/
unsigned int getNrLocalBuilds();
/**
* Return the number of substitution processes currently running.
*/
unsigned int getNrSubstitutions();
/**
* Registers a running child process. `inBuildSlot` means that
* the process counts towards the jobs limit.
*/
void childStarted(GoalPtr goal, const std::set<int> & fds,
bool inBuildSlot, bool respectTimeouts);
public:
/**
* Unregisters a running child process.
*/
@ -310,7 +255,7 @@ public:
/**
* Loop until the specified top-level goals have finished.
*/
Goals run(std::function<Goals (GoalFactory &)> req);
void run(const Goals & topGoals);
/***
* The exit status in case of failure.

View file

@ -1,5 +1,4 @@
#include "buildenv.hh"
#include "strings.hh"
#include <sys/stat.h>
#include <sys/types.h>

View file

@ -3,7 +3,6 @@
#include "store-api.hh"
#include "archive.hh"
#include "compression.hh"
#include "strings.hh"
namespace nix {

View file

@ -20,7 +20,6 @@ namespace nix {
{ \
return LengthPrefixedProtoHelper<CommonProto, T >::read(store, conn); \
} \
/* NOLINTNEXTLINE(bugprone-macro-parentheses) */ \
TEMPLATE [[nodiscard]] WireFormatGenerator CommonProto::Serialise< T >::write(const Store & store, CommonProto::WriteConn conn, const T & t) \
{ \
return LengthPrefixedProtoHelper<CommonProto, T >::write(store, conn, t); \

View file

@ -1,7 +1,6 @@
#include "args.hh"
#include "content-address.hh"
#include "split.hh"
#include "strings.hh"
namespace nix {

View file

@ -2,7 +2,7 @@
#include "monitor-fd.hh"
#include "worker-protocol.hh"
#include "worker-protocol-impl.hh"
#include "build-result.hh" // IWYU pragma: keep
#include "build-result.hh"
#include "store-api.hh"
#include "store-cast.hh"
#include "gc-store.hh"
@ -12,7 +12,6 @@
#include "finally.hh"
#include "archive.hh"
#include "derivations.hh"
#include "strings.hh"
#include "args.hh"
#include <sstream>

View file

@ -3,12 +3,11 @@
#include "store-api.hh"
#include "globals.hh"
#include "types.hh"
#include "split.hh"
#include "common-protocol.hh"
#include "common-protocol-impl.hh"
#include "fs-accessor.hh"
#include "json-utils.hh"
#include "strings.hh"
#include "backed-string-view.hh"
#include <boost/container/small_vector.hpp>
#include <nlohmann/json.hpp>

View file

@ -5,7 +5,6 @@
#include "path.hh"
#include "outputs-spec.hh"
#include "comparator.hh"
#include "ref.hh"
#include <variant>
@ -79,12 +78,10 @@ struct SingleDerivedPathBuilt {
DECLARE_CMP(SingleDerivedPathBuilt);
};
namespace derived_path::detail {
using SingleDerivedPathRaw = std::variant<
using _SingleDerivedPathRaw = std::variant<
DerivedPathOpaque,
SingleDerivedPathBuilt
>;
}
/**
* A "derived path" is a very simple sort of expression (not a Nix
@ -97,8 +94,8 @@ using SingleDerivedPathRaw = std::variant<
* - built, in which case it is a pair of a derivation path and an
* output name.
*/
struct SingleDerivedPath : derived_path::detail::SingleDerivedPathRaw {
using Raw = derived_path::detail::SingleDerivedPathRaw;
struct SingleDerivedPath : _SingleDerivedPathRaw {
using Raw = _SingleDerivedPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
@ -204,12 +201,10 @@ struct DerivedPathBuilt {
DECLARE_CMP(DerivedPathBuilt);
};
namespace derived_path::detail {
using DerivedPathRaw = std::variant<
using _DerivedPathRaw = std::variant<
DerivedPathOpaque,
DerivedPathBuilt
>;
}
/**
* A "derived path" is a very simple sort of expression that evaluates
@ -221,8 +216,8 @@ using DerivedPathRaw = std::variant<
* - built, in which case it is a pair of a derivation path and some
* output names.
*/
struct DerivedPath : derived_path::detail::DerivedPathRaw {
using Raw = derived_path::detail::DerivedPathRaw;
struct DerivedPath : _DerivedPathRaw {
using Raw = _DerivedPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;

View file

@ -1,4 +1,3 @@
#include "dummy-store.hh"
#include "store-api.hh"
namespace nix {
@ -74,8 +73,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
{ unsupported("getFSAccessor"); }
};
void registerDummyStore() {
StoreImplementations::add<DummyStore, DummyStoreConfig>();
}
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
}

View file

@ -1,8 +0,0 @@
#pragma once
///@file
namespace nix {
void registerDummyStore();
}

View file

@ -5,7 +5,6 @@
#include "s3.hh"
#include "signals.hh"
#include "compression.hh"
#include "strings.hh"
#if ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
@ -337,7 +336,7 @@ struct curlFileTransfer : public FileTransfer
// wrapping user `callback`s instead is not possible because the
// Callback api expects std::functions, and copying Callbacks is
// not possible due the promises they hold.
if (code == CURLE_OK && !dataCallback && result.data.length() > 0) {
if (code == CURLE_OK && !dataCallback) {
result.data = decompress(encoding, result.data);
}

View file

@ -2,7 +2,6 @@
///@file
#include "box_ptr.hh"
#include "ref.hh"
#include "logging.hh"
#include "serialise.hh"
#include "types.hh"

View file

@ -5,7 +5,6 @@
#include "signals.hh"
#include "finally.hh"
#include "unix-domain-socket.hh"
#include "strings.hh"
#include <queue>
#include <regex>

View file

@ -33,16 +33,6 @@
#include <sys/sysctl.h>
#endif
// All built-in store implementations.
#include "dummy-store.hh"
#include "http-binary-cache-store.hh"
#include "legacy-ssh-store.hh"
#include "local-binary-cache-store.hh"
#include "local-store.hh"
#include "s3-binary-cache-store.hh"
#include "ssh-store.hh"
#include "uds-remote-store.hh"
namespace nix {
@ -126,30 +116,29 @@ Settings::Settings()
void loadConfFile()
{
auto applyConfigFile = [&](const ApplyConfigOptions & options) {
auto applyConfigFile = [&](const Path & path) {
try {
std::string contents = readFile(*options.path);
globalConfig.applyConfig(contents, options);
} catch (SysError &) {
}
std::string contents = readFile(path);
globalConfig.applyConfig(contents, path);
} catch (SysError &) { }
};
applyConfigFile(ApplyConfigOptions{.path = settings.nixConfDir + "/nix.conf"});
applyConfigFile(settings.nixConfDir + "/nix.conf");
/* We only want to send overrides to the daemon, i.e. stuff from
~/.nix/nix.conf or the command line. */
globalConfig.resetOverridden();
auto files = settings.nixUserConfFiles;
auto home = getHome();
for (auto file = files.rbegin(); file != files.rend(); file++) {
applyConfigFile(ApplyConfigOptions{.path = *file, .home = home});
applyConfigFile(*file);
}
auto nixConfEnv = getEnv("NIX_CONFIG");
if (nixConfEnv.has_value()) {
globalConfig.applyConfig(nixConfEnv.value(), ApplyConfigOptions{.fromEnvVar = true});
globalConfig.applyConfig(nixConfEnv.value(), "NIX_CONFIG");
}
}
std::vector<Path> getUserConfigFiles()
@ -269,33 +258,13 @@ Path Settings::getDefaultSSLCertFile()
const std::string nixVersion = PACKAGE_VERSION;
void to_json(nlohmann::json & j, const SandboxMode & e)
{
if (e == SandboxMode::smEnabled) {
j = true;
} else if (e == SandboxMode::smRelaxed) {
j = "relaxed";
} else if (e == SandboxMode::smDisabled) {
j = false;
} else {
abort();
}
}
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
{SandboxMode::smEnabled, true},
{SandboxMode::smRelaxed, "relaxed"},
{SandboxMode::smDisabled, false},
});
void from_json(const nlohmann::json & j, SandboxMode & e)
{
if (j == true) {
e = SandboxMode::smEnabled;
} else if (j == "relaxed") {
e = SandboxMode::smRelaxed;
} else if (j == false) {
e = SandboxMode::smDisabled;
} else {
throw Error("Invalid sandbox mode '%s'", std::string(j));
}
}
template<> SandboxMode BaseSetting<SandboxMode>::parse(const std::string & str, const ApplyConfigOptions & options) const
template<> SandboxMode BaseSetting<SandboxMode>::parse(const std::string & str) const
{
if (str == "true") return smEnabled;
else if (str == "relaxed") return smRelaxed;
@ -338,7 +307,7 @@ template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::s
});
}
unsigned int MaxBuildJobsSetting::parse(const std::string & str, const ApplyConfigOptions & options) const
unsigned int MaxBuildJobsSetting::parse(const std::string & str) const
{
if (str == "auto") return std::max(1U, std::thread::hardware_concurrency());
else {
@ -346,15 +315,15 @@ unsigned int MaxBuildJobsSetting::parse(const std::string & str, const ApplyConf
return *n;
else
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
}
}
}
Paths PluginFilesSetting::parse(const std::string & str, const ApplyConfigOptions & options) const
Paths PluginFilesSetting::parse(const std::string & str) const
{
if (pluginsLoaded)
throw UsageError("plugin-files set after plugins were loaded, you may need to move the flag before the subcommand");
return BaseSetting<Paths>::parse(str, options);
return BaseSetting<Paths>::parse(str);
}
@ -427,17 +396,6 @@ static void preloadNSS()
});
}
static void registerStoreImplementations() {
registerDummyStore();
registerHttpBinaryCacheStore();
registerLegacySSHStore();
registerLocalBinaryCacheStore();
registerLocalStore();
registerS3BinaryCacheStore();
registerSSHStore();
registerUDSRemoteStore();
}
static bool initLibStoreDone = false;
void assertLibStoreInitialized() {
@ -475,8 +433,6 @@ void initLibStore() {
unsetenv("TMPDIR");
#endif
registerStoreImplementations();
initLibStoreDone = true;
}

View file

@ -14,9 +14,6 @@ namespace nix {
typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode;
void to_json(nlohmann::json & j, const SandboxMode & e);
void from_json(const nlohmann::json & j, SandboxMode & e);
struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
{
MaxBuildJobsSetting(Config * options,
@ -29,7 +26,7 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
options->addSetting(this);
}
unsigned int parse(const std::string & str, const ApplyConfigOptions & options) const override;
unsigned int parse(const std::string & str) const override;
};
struct PluginFilesSetting : public BaseSetting<Paths>
@ -46,7 +43,7 @@ struct PluginFilesSetting : public BaseSetting<Paths>
options->addSetting(this);
}
Paths parse(const std::string & str, const ApplyConfigOptions & options) const override;
Paths parse(const std::string & str) const override;
};
const uint32_t maxIdsPerBuild =
@ -637,13 +634,13 @@ public:
line.
)"};
PathsSetting<std::optional<Path>> diffHook{
OptionalPathSetting diffHook{
this, std::nullopt, "diff-hook",
R"(
Path to an executable capable of diffing build results. The hook is
executed if `run-diff-hook` is true, and the output of a build is
known to not be the same. This program is not executed to determine
if two results are the same.
Absolute path to an executable capable of diffing build
results. The hook is executed if `run-diff-hook` is true, and the
output of a build is known to not be the same. This program is not
executed to determine if two results are the same.
The diff hook is executed by the same user and group who ran the
build. However, the diff hook does not have write access to the
@ -1091,7 +1088,6 @@ void loadConfFile();
// Used by the Settings constructor
std::vector<Path> getUserConfigFiles();
std::vector<Path> getHomeConfigFile();
extern const std::string nixVersion;

View file

@ -1,4 +1,3 @@
#include "http-binary-cache-store.hh"
#include "binary-cache-store.hh"
#include "filetransfer.hh"
#include "globals.hh"
@ -195,8 +194,6 @@ protected:
}
};
void registerHttpBinaryCacheStore() {
StoreImplementations::add<HttpBinaryCacheStore, HttpBinaryCacheStoreConfig>();
}
static RegisterStoreImplementation<HttpBinaryCacheStore, HttpBinaryCacheStoreConfig> regHttpBinaryCacheStore;
}

View file

@ -1,8 +0,0 @@
#pragma once
///@file
namespace nix {
void registerHttpBinaryCacheStore();
}

View file

@ -1,4 +1,4 @@
#include "legacy-ssh-store.hh"
#include "ssh-store-config.hh"
#include "archive.hh"
#include "pool.hh"
#include "remote-store.hh"
@ -8,8 +8,6 @@
#include "store-api.hh"
#include "path-with-outputs.hh"
#include "ssh.hh"
#include "ssh-store.hh"
#include "strings.hh"
#include "derivations.hh"
namespace nix {
@ -414,8 +412,6 @@ public:
{ unsupported("queryRealisation"); }
};
void registerLegacySSHStore() {
StoreImplementations::add<LegacySSHStore, LegacySSHStoreConfig>();
}
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
}

View file

@ -1,8 +0,0 @@
#pragma once
///@file
namespace nix {
void registerLegacySSHStore();
}

View file

@ -61,9 +61,9 @@ template<class Inner, typename... Ts>
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple<Ts...>);
template<class Inner, typename K, typename V>
#define DONT_SUBSTITUTE_KV_TYPE std::map<K, V>
LENGTH_PREFIXED_PROTO_HELPER(Inner, DONT_SUBSTITUTE_KV_TYPE);
#undef DONT_SUBSTITUTE_KV_TYPE
#define _X std::map<K, V>
LENGTH_PREFIXED_PROTO_HELPER(Inner, _X);
#undef _X
template<class Inner, typename T>
std::vector<T>

View file

@ -1,4 +1,3 @@
#include "local-binary-cache-store.hh"
#include "binary-cache-store.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
@ -125,8 +124,6 @@ std::set<std::string> LocalBinaryCacheStore::uriSchemes()
return {"file"};
}
void registerLocalBinaryCacheStore() {
StoreImplementations::add<LocalBinaryCacheStore, LocalBinaryCacheStoreConfig>();
}
static RegisterStoreImplementation<LocalBinaryCacheStore, LocalBinaryCacheStoreConfig> regLocalBinaryCacheStore;
}

View file

@ -1,8 +0,0 @@
#pragma once
///@file
namespace nix {
void registerLocalBinaryCacheStore();
}

View file

@ -11,21 +11,21 @@ struct LocalFSStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
const PathsSetting<std::optional<Path>> rootDir{this, std::nullopt,
const OptionalPathSetting rootDir{this, std::nullopt,
"root",
"Directory prefixed to all other paths."};
const PathsSetting<Path> stateDir{this,
const PathSetting stateDir{this,
rootDir.get() ? *rootDir.get() + "/nix/var/nix" : settings.nixStateDir,
"state",
"Directory where Lix will store state."};
const PathsSetting<Path> logDir{this,
const PathSetting logDir{this,
rootDir.get() ? *rootDir.get() + "/nix/var/log/nix" : settings.nixLogDir,
"log",
"directory where Lix will store log files."};
const PathsSetting<Path> realStoreDir{this,
const PathSetting realStoreDir{this,
rootDir.get() ? *rootDir.get() + "/nix/store" : storeDir, "real",
"Physical path of the Nix store."};
};

View file

@ -10,7 +10,6 @@
#include "signals.hh"
#include "finally.hh"
#include "compression.hh"
#include "strings.hh"
#include <algorithm>
#include <cstring>
@ -664,20 +663,6 @@ static void canonicalisePathMetaData_(
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
throw Error("file '%1%' has an unsupported type", path);
/* Fail if the file is not owned by the build user. This prevents
us from messing up the ownership/permissions of files
hard-linked into the output (e.g. "ln /etc/shadow $out/foo").
However, ignore files that we chown'ed ourselves previously to
ensure that we don't fail on hard links within the same build
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */
if (uidRange && (st.st_uid < uidRange->first || st.st_uid > uidRange->second)) {
if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
return;
}
#if __linux__
/* Remove extended attributes / ACLs. */
ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0);
@ -691,8 +676,6 @@ static void canonicalisePathMetaData_(
if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0)
throw SysError("querying extended attributes of '%s'", path);
if (S_ISREG(st.st_mode) || S_ISDIR(st.st_mode))
chmod(path.c_str(), st.st_mode | S_IWUSR);
for (auto & eaName: tokenizeString<Strings>(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) {
if (settings.ignoredAcls.get().count(eaName)) continue;
if (lremovexattr(path.c_str(), eaName.c_str()) == -1)
@ -701,6 +684,20 @@ static void canonicalisePathMetaData_(
}
#endif
/* Fail if the file is not owned by the build user. This prevents
us from messing up the ownership/permissions of files
hard-linked into the output (e.g. "ln /etc/shadow $out/foo").
However, ignore files that we chown'ed ourselves previously to
ensure that we don't fail on hard links within the same build
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */
if (uidRange && (st.st_uid < uidRange->first || st.st_uid > uidRange->second)) {
if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
return;
}
inodesSeen.insert(Inode(st.st_dev, st.st_ino));
canonicaliseTimestampAndPermissions(path, st);
@ -1218,7 +1215,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
bool narRead = false;
Finally cleanup = [&]() {
if (!narRead) {
NARParseVisitor sink;
ParseSink sink;
try {
parseDump(sink, source);
} catch (...) {

Some files were not shown because too many files have changed in this diff Show more