forked from lix-project/lix
Compare commits
44 commits
repl-overl
...
main
Author | SHA1 | Date | |
---|---|---|---|
Maximilian Bosch | 31954b5136 | ||
jade | 8a6b84df14 | ||
Maximilian Bosch | eccbe9586a | ||
alois31 | 5f298f74c9 | ||
jade | 79246a3733 | ||
jade | 789b19a0cf | ||
Rebecca Turner | 0943b214c9 | ||
alois31 | 2afdf1ed66 | ||
jade | ed381cd58a | ||
jade | 4046e019ca | ||
Rebecca Turner | 8ab5743904 | ||
Rebecca Turner | 7ae0409989 | ||
Maximilian Bosch | 80202e3ca3 | ||
jade | 727258241f | ||
jade | 5246cea6c8 | ||
jade | 8f88590d13 | ||
alois31 | 3f07c65510 | ||
jade | b7fc37b015 | ||
jade | ca1dc3f70b | ||
alois31 | b2fc007811 | ||
alois31 | 82aa1ccab4 | ||
Rebecca Turner | df0137226d | ||
jade | 81c2e0ac8e | ||
Rebecca Turner | 24db81eaf2 | ||
Rebecca Turner | cc183fdbc1 | ||
Rebecca Turner | f5ae72d445 | ||
Rebecca Turner | 6de6cae3e7 | ||
alois31 | 8f7ab26f96 | ||
eldritch horrors | c14486ae8d | ||
alois31 | e9505dcc5a | ||
eldritch horrors | f2a49032a6 | ||
eldritch horrors | 92eccfbd68 | ||
alois31 | 4715d557ef | ||
alois31 | 991d8ce275 | ||
Rebecca Turner | 72589e7032 | ||
Rebecca Turner | 644176a631 | ||
alois31 | 63ee2cdda3 | ||
alois31 | d7c37324bb | ||
Rebecca Turner | 75c0de3e3c | ||
Rebecca Turner | fc4a160878 | ||
Rebecca Turner | b7b1b9723f | ||
Rebecca Turner | 9d8f433246 | ||
Rebecca Turner | 742303dc3a | ||
alois31 | de552c42cb |
|
@ -2,7 +2,7 @@
|
|||
name: Missing or incorrect documentation
|
||||
about: Help us improve the reference manual
|
||||
title: ''
|
||||
labels: documentation
|
||||
labels: docs
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
@ -19,10 +19,10 @@ assignees: ''
|
|||
|
||||
<!-- make sure this issue is not redundant or obsolete -->
|
||||
|
||||
- [ ] checked [latest Lix manual] \([source]\)
|
||||
- [ ] checked [latest Lix manual] or its [source code]
|
||||
- [ ] checked [documentation issues] and [recent documentation changes] for possible duplicates
|
||||
|
||||
[latest Nix manual]: https://docs.lix.systems/manual/lix/nightly
|
||||
[source]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
|
||||
[latest Lix manual]: https://docs.lix.systems/manual/lix/nightly
|
||||
[source code]: https://git.lix.systems/lix-project/lix/src/main/doc/manual/src
|
||||
[documentation issues]: https://git.lix.systems/lix-project/lix/issues?labels=151&state=all
|
||||
[recent documentation changes]: https://gerrit.lix.systems/q/p:lix+path:%22%5Edoc/manual/.*%22
|
||||
|
|
10
doc/manual/rl-next/alt-left-and-alt-right-in-repl.md
Normal file
10
doc/manual/rl-next/alt-left-and-alt-right-in-repl.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
synopsis: "`Alt+Left` and `Alt+Right` go back/forwards by words in `nix repl`"
|
||||
issues: [fj#501]
|
||||
cls: [1883]
|
||||
category: Fixes
|
||||
credits: 9999years
|
||||
---
|
||||
|
||||
`nix repl` now recognizes `Alt+Left` and `Alt+Right` for navigating by words
|
||||
when entering input in `nix repl` on more terminals/platforms.
|
17
doc/manual/rl-next/readline-support-removed.md
Normal file
17
doc/manual/rl-next/readline-support-removed.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
synopsis: readline support removed
|
||||
cls: [1885]
|
||||
category: Packaging
|
||||
credits: [9999years]
|
||||
---
|
||||
|
||||
Support for building Lix with [`readline`][readline] instead of
|
||||
[`editline`][editline] has been removed. `readline` support hasn't worked for a
|
||||
long time (attempting to use it would lead to build errors) and would make Lix
|
||||
subject to the GPL if it did work. In the future, we're hoping to replace
|
||||
`editline` with [`rustyline`][rustyline] for improved ergonomics in the `nix
|
||||
repl`.
|
||||
|
||||
[readline]: https://en.wikipedia.org/wiki/GNU_Readline
|
||||
[editline]: https://github.com/troglobit/editline
|
||||
[rustyline]: https://github.com/kkawakam/rustyline
|
13
flake.nix
13
flake.nix
|
@ -99,9 +99,10 @@
|
|||
];
|
||||
|
||||
stdenvs = [
|
||||
"gccStdenv"
|
||||
# see assertion in package.nix why these two are disabled
|
||||
# "stdenv"
|
||||
# "gccStdenv"
|
||||
"clangStdenv"
|
||||
"stdenv"
|
||||
"libcxxStdenv"
|
||||
"ccacheStdenv"
|
||||
];
|
||||
|
@ -121,7 +122,11 @@
|
|||
name = "${stdenvName}Packages";
|
||||
value = f stdenvName;
|
||||
}) stdenvs
|
||||
);
|
||||
)
|
||||
// {
|
||||
# TODO delete this and reënable gcc stdenvs once gcc compiles kj coros correctly
|
||||
stdenvPackages = f "clangStdenv";
|
||||
};
|
||||
|
||||
# Memoize nixpkgs for different platforms for efficiency.
|
||||
nixpkgsFor = forAllSystems (
|
||||
|
@ -212,7 +217,7 @@
|
|||
|
||||
# A Nixpkgs overlay that overrides the 'nix' and
|
||||
# 'nix.perl-bindings' packages.
|
||||
overlays.default = overlayFor (p: p.stdenv);
|
||||
overlays.default = overlayFor (p: p.clangStdenv);
|
||||
|
||||
hydraJobs = {
|
||||
# Binary package for various platforms.
|
||||
|
|
11
meson.build
11
meson.build
|
@ -167,10 +167,18 @@ endif
|
|||
# frees one would expect when the objects are unique_ptrs. these problems
|
||||
# often show up as memory corruption when nesting generators (since we do
|
||||
# treat generators like owned memory) and will cause inexplicable crashs.
|
||||
#
|
||||
# gcc 13 does not compile capnp coroutine code correctly. a newer version
|
||||
# may fix this. (cf. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=102051)
|
||||
# we allow gcc 13 here anyway because CI uses it for clang-tidy, and when
|
||||
# the compiler crashes outright if won't produce any bad binaries either.
|
||||
assert(
|
||||
cxx.get_id() != 'gcc' or cxx.version().version_compare('>=13'),
|
||||
'GCC 12 and earlier are known to miscompile lix coroutines, use GCC 13 or clang.'
|
||||
'GCC is known to miscompile coroutines, use clang.'
|
||||
)
|
||||
if cxx.get_id() == 'gcc'
|
||||
warning('GCC is known to crash while building coroutines, use clang.')
|
||||
endif
|
||||
|
||||
|
||||
# Translate some historical and Mesony CPU names to Lixy CPU names.
|
||||
|
@ -229,6 +237,7 @@ configdata += {
|
|||
}
|
||||
|
||||
boost = dependency('boost', required : true, modules : ['container'], include_type : 'system')
|
||||
kj = dependency('kj-async', required : true, include_type : 'system')
|
||||
|
||||
# cpuid only makes sense on x86_64
|
||||
cpuid_required = is_x64 ? get_option('cpuid') : false
|
||||
|
|
|
@ -14,7 +14,7 @@ function _nix_complete
|
|||
# But the variable also misses the current token so it cancels out.
|
||||
set -l nix_arg_to_complete (count $nix_args)
|
||||
|
||||
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token
|
||||
env NIX_GET_COMPLETIONS=$nix_arg_to_complete $nix_args $current_token 2>/dev/null
|
||||
end
|
||||
|
||||
function _nix_accepts_files
|
||||
|
|
106
nix-support/editline.patch
Normal file
106
nix-support/editline.patch
Normal file
|
@ -0,0 +1,106 @@
|
|||
From d0f2a5bc2300b96b2434c7838184c1dfd6a639f5 Mon Sep 17 00:00:00 2001
|
||||
From: Rebecca Turner <rbt@sent.as>
|
||||
Date: Sun, 8 Sep 2024 15:42:42 -0700
|
||||
Subject: [PATCH 1/2] Recognize Meta+Left and Meta+Right
|
||||
|
||||
Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
|
||||
terminals/shells/platforms.
|
||||
|
||||
I'm not sure exactly where to find canonical documentation for these
|
||||
codes, but this seems to match what my terminal produces (macOS + iTerm2
|
||||
+ Fish + Tmux).
|
||||
|
||||
It might also be nice to have some more support for editing the bindings
|
||||
for these characters; sequences of more than one character are not
|
||||
supported by `el_bind_key` and similar.
|
||||
|
||||
Originally from: https://github.com/troglobit/editline/pull/70
|
||||
This patch is applied upstream: https://gerrit.lix.systems/c/lix/+/1883
|
||||
|
||||
---
|
||||
src/editline.c | 29 +++++++++++++++++++++++++++--
|
||||
1 file changed, 27 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/editline.c b/src/editline.c
|
||||
index 5ec9afb..d1cfbbc 100644
|
||||
--- a/src/editline.c
|
||||
+++ b/src/editline.c
|
||||
@@ -1034,6 +1034,30 @@ static el_status_t meta(void)
|
||||
return CSeof;
|
||||
|
||||
#ifdef CONFIG_ANSI_ARROWS
|
||||
+ /* See: https://en.wikipedia.org/wiki/ANSI_escape_code */
|
||||
+ /* Recognize ANSI escapes for `Meta+Left` and `Meta+Right`. */
|
||||
+ if (c == '\e') {
|
||||
+ switch (tty_get()) {
|
||||
+ case '[':
|
||||
+ {
|
||||
+ switch (tty_get()) {
|
||||
+ /* \e\e[C = Meta+Left */
|
||||
+ case 'C': return fd_word();
|
||||
+ /* \e\e[D = Meta+Right */
|
||||
+ case 'D': return bk_word();
|
||||
+ default:
|
||||
+ break;
|
||||
+ }
|
||||
+
|
||||
+ return el_ring_bell();
|
||||
+ }
|
||||
+ default:
|
||||
+ break;
|
||||
+ }
|
||||
+
|
||||
+ return el_ring_bell();
|
||||
+ }
|
||||
+
|
||||
/* Also include VT-100 arrows. */
|
||||
if (c == '[' || c == 'O') {
|
||||
switch (tty_get()) {
|
||||
@@ -1043,6 +1067,7 @@ static el_status_t meta(void)
|
||||
char seq[4] = { 0 };
|
||||
seq[0] = tty_get();
|
||||
|
||||
+ /* \e[1~ */
|
||||
if (seq[0] == '~')
|
||||
return beg_line(); /* Home */
|
||||
|
||||
@@ -1050,9 +1075,9 @@ static el_status_t meta(void)
|
||||
seq[c] = tty_get();
|
||||
|
||||
if (!strncmp(seq, ";5C", 3))
|
||||
- return fd_word(); /* Ctrl+Right */
|
||||
+ return fd_word(); /* \e[1;5C = Ctrl+Right */
|
||||
if (!strncmp(seq, ";5D", 3))
|
||||
- return bk_word(); /* Ctrl+Left */
|
||||
+ return bk_word(); /* \e[1;5D = Ctrl+Left */
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
From 4c4455353a0a88bee09d5f27c28f81f747682fed Mon Sep 17 00:00:00 2001
|
||||
From: Rebecca Turner <rbt@sent.as>
|
||||
Date: Mon, 9 Sep 2024 09:44:44 -0700
|
||||
Subject: [PATCH 2/2] Add support for \e[1;3C and \e[1;3D
|
||||
|
||||
---
|
||||
src/editline.c | 6 ++++--
|
||||
1 file changed, 4 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/editline.c b/src/editline.c
|
||||
index d1cfbbc..350b5cb 100644
|
||||
--- a/src/editline.c
|
||||
+++ b/src/editline.c
|
||||
@@ -1074,9 +1074,11 @@ static el_status_t meta(void)
|
||||
for (c = 1; c < 3; c++)
|
||||
seq[c] = tty_get();
|
||||
|
||||
- if (!strncmp(seq, ";5C", 3))
|
||||
+ if (!strncmp(seq, ";5C", 3)
|
||||
+ || !strncmp(seq, ";3C", 3))
|
||||
return fd_word(); /* \e[1;5C = Ctrl+Right */
|
||||
- if (!strncmp(seq, ";5D", 3))
|
||||
+ if (!strncmp(seq, ";5D", 3)
|
||||
+ || !strncmp(seq, ";3D", 3))
|
||||
return bk_word(); /* \e[1;5D = Ctrl+Left */
|
||||
|
||||
break;
|
31
package.nix
31
package.nix
|
@ -15,6 +15,8 @@
|
|||
brotli,
|
||||
bzip2,
|
||||
callPackage,
|
||||
capnproto-lix ? __forDefaults.capnproto-lix,
|
||||
capnproto,
|
||||
cmake,
|
||||
curl,
|
||||
doxygen,
|
||||
|
@ -36,6 +38,7 @@
|
|||
mercurial,
|
||||
meson,
|
||||
ninja,
|
||||
ncurses,
|
||||
openssl,
|
||||
pegtl,
|
||||
pkg-config,
|
||||
|
@ -79,12 +82,36 @@
|
|||
boehmgc-nix = boehmgc.override { enableLargeConfig = true; };
|
||||
|
||||
editline-lix = editline.overrideAttrs (prev: {
|
||||
configureFlags = prev.configureFlags or [ ] ++ [ (lib.enableFeature true "sigstop") ];
|
||||
patches = (prev.patches or [ ]) ++ [
|
||||
# Recognize `Alt-Left` and `Alt-Right` for navigating by words in more
|
||||
# terminals/shells/platforms.
|
||||
#
|
||||
# See: https://github.com/troglobit/editline/pull/70
|
||||
./nix-support/editline.patch
|
||||
];
|
||||
|
||||
configureFlags = (prev.configureFlags or [ ]) ++ [
|
||||
# Enable SIGSTOP (Ctrl-Z) behavior.
|
||||
(lib.enableFeature true "sigstop")
|
||||
# Enable ANSI arrow keys.
|
||||
(lib.enableFeature true "arrow-keys")
|
||||
# Use termcap library to query terminal size.
|
||||
(lib.enableFeature (ncurses != null) "termcap")
|
||||
];
|
||||
|
||||
buildInputs = (prev.buildInputs or [ ]) ++ [ ncurses ];
|
||||
});
|
||||
|
||||
build-release-notes = callPackage ./maintainers/build-release-notes.nix { };
|
||||
|
||||
# needs explicit c++20 to enable coroutine support
|
||||
capnproto-lix = capnproto.overrideAttrs { CXXFLAGS = "-std=c++20"; };
|
||||
},
|
||||
}:
|
||||
|
||||
# gcc miscompiles coroutines at least until 13.2, possibly longer
|
||||
assert stdenv.cc.isClang || lintInsteadOfBuild;
|
||||
|
||||
let
|
||||
inherit (__forDefaults) canRunInstalled;
|
||||
inherit (lib) fileset;
|
||||
|
@ -220,6 +247,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
ninja
|
||||
cmake
|
||||
rustc
|
||||
capnproto-lix
|
||||
]
|
||||
++ [
|
||||
(lib.getBin lowdown)
|
||||
|
@ -260,6 +288,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
libsodium
|
||||
toml11
|
||||
pegtl
|
||||
capnproto-lix
|
||||
]
|
||||
++ lib.optionals hostPlatform.isLinux [
|
||||
libseccomp
|
||||
|
|
|
@ -9,8 +9,24 @@
|
|||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
|
||||
#include <regex>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::regex const identifierRegex("^[A-Za-z_][A-Za-z0-9_'-]*$");
|
||||
static void warnInvalidNixIdentifier(const std::string & name)
|
||||
{
|
||||
std::smatch match;
|
||||
if (!std::regex_match(name, match, identifierRegex)) {
|
||||
warn("This Nix invocation specifies a value for argument '%s' which isn't a valid \
|
||||
Nix identifier. The project is considering to drop support for this \
|
||||
or to require quotes around args that aren't valid Nix identifiers. \
|
||||
If you depend on this behvior, please reach out in \
|
||||
https://git.lix.systems/lix-project/lix/issues/496 so we can discuss \
|
||||
your use-case.", name);
|
||||
}
|
||||
}
|
||||
|
||||
MixEvalArgs::MixEvalArgs()
|
||||
{
|
||||
addFlag({
|
||||
|
@ -18,7 +34,10 @@ MixEvalArgs::MixEvalArgs()
|
|||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "expr"},
|
||||
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
||||
.handler = {[&](std::string name, std::string expr) {
|
||||
warnInvalidNixIdentifier(name);
|
||||
autoArgs[name] = 'E' + expr;
|
||||
}}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
@ -26,7 +45,10 @@ MixEvalArgs::MixEvalArgs()
|
|||
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "string"},
|
||||
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
||||
.handler = {[&](std::string name, std::string s) {
|
||||
warnInvalidNixIdentifier(name);
|
||||
autoArgs[name] = 'S' + s;
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
|
|
@ -8,10 +8,6 @@
|
|||
#include <string_view>
|
||||
#include <cerrno>
|
||||
|
||||
#ifdef READLINE
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
#else
|
||||
// editline < 1.15.2 don't wrap their API for C++ usage
|
||||
// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
|
||||
// This results in linker errors due to to name-mangling of editline C symbols.
|
||||
|
@ -20,7 +16,6 @@
|
|||
extern "C" {
|
||||
#include <editline.h>
|
||||
}
|
||||
#endif
|
||||
|
||||
#include "finally.hh"
|
||||
#include "repl-interacter.hh"
|
||||
|
@ -115,17 +110,13 @@ ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleter
|
|||
} catch (SysError & e) {
|
||||
logWarning(e.info());
|
||||
}
|
||||
#ifndef READLINE
|
||||
el_hist_size = 1000;
|
||||
#endif
|
||||
read_history(historyFile.c_str());
|
||||
auto oldRepl = curRepl;
|
||||
curRepl = repl;
|
||||
Guard restoreRepl([oldRepl] { curRepl = oldRepl; });
|
||||
#ifndef READLINE
|
||||
rl_set_complete_func(completionCallback);
|
||||
rl_set_list_possib_func(listPossibleCallback);
|
||||
#endif
|
||||
return restoreRepl;
|
||||
}
|
||||
|
||||
|
|
|
@ -185,6 +185,54 @@ struct EvalSettings : Config
|
|||
else
|
||||
{ }
|
||||
```
|
||||
|
||||
Here's a more elaborate `repl-overlay`, which provides the following
|
||||
variables:
|
||||
- The original, unmodified variables are aliased to `original`.
|
||||
- `legacyPackages.${system}` (if it exists) or `packages.${system}`
|
||||
(otherwise) is aliased to `pkgs`.
|
||||
- All attribute set variables with a `${system}` attribute are
|
||||
abbreviated in the same manner; e.g. `devShells.${system}` is
|
||||
shortened to `devShells`.
|
||||
|
||||
For example, the following attribute set:
|
||||
|
||||
```nix
|
||||
info: final: attrs: let
|
||||
# Equivalent to nixpkgs `lib.optionalAttrs`.
|
||||
optionalAttrs = predicate: attrs:
|
||||
if predicate
|
||||
then attrs
|
||||
else {};
|
||||
|
||||
# If `attrs.${oldName}.${info.currentSystem}` exists, alias `${newName}` to
|
||||
# it.
|
||||
collapseRenamed = oldName: newName:
|
||||
optionalAttrs (builtins.hasAttr oldName attrs
|
||||
&& builtins.hasAttr info.currentSystem attrs.${oldName})
|
||||
{
|
||||
${newName} = attrs.${oldName}.${info.currentSystem};
|
||||
};
|
||||
|
||||
# Alias `attrs.${oldName}.${info.currentSystem} to `${newName}`.
|
||||
collapse = name: collapseRenamed name name;
|
||||
|
||||
# Alias all `attrs` keys with an `${info.currentSystem}` attribute.
|
||||
collapseAll =
|
||||
builtins.foldl'
|
||||
(prev: name: prev // collapse name)
|
||||
{}
|
||||
(builtins.attrNames attrs);
|
||||
in
|
||||
# Preserve the original bindings as `original`.
|
||||
(optionalAttrs (! attrs ? original)
|
||||
{
|
||||
original = attrs;
|
||||
})
|
||||
// (collapseRenamed "packages" "pkgs")
|
||||
// (collapseRenamed "legacyPackages" "pkgs")
|
||||
// collapseAll
|
||||
```
|
||||
)"};
|
||||
};
|
||||
|
||||
|
|
|
@ -7,6 +7,32 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e)
|
||||
{
|
||||
if (e == AcceptFlakeConfig::False) {
|
||||
j = false;
|
||||
} else if (e == AcceptFlakeConfig::Ask) {
|
||||
j = "ask";
|
||||
} else if (e == AcceptFlakeConfig::True) {
|
||||
j = true;
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e)
|
||||
{
|
||||
if (j == false) {
|
||||
e = AcceptFlakeConfig::False;
|
||||
} else if (j == "ask") {
|
||||
e = AcceptFlakeConfig::Ask;
|
||||
} else if (j == true) {
|
||||
e = AcceptFlakeConfig::True;
|
||||
} else {
|
||||
throw Error("Invalid accept-flake-config value '%s'", std::string(j));
|
||||
}
|
||||
}
|
||||
|
||||
template<> AcceptFlakeConfig BaseSetting<AcceptFlakeConfig>::parse(const std::string & str, const ApplyConfigOptions & options) const
|
||||
{
|
||||
if (str == "true") return AcceptFlakeConfig::True;
|
||||
|
|
|
@ -13,6 +13,9 @@ namespace nix {
|
|||
|
||||
enum class AcceptFlakeConfig { False, Ask, True };
|
||||
|
||||
void to_json(nlohmann::json & j, const AcceptFlakeConfig & e);
|
||||
void from_json(const nlohmann::json & j, AcceptFlakeConfig & e);
|
||||
|
||||
struct FetchSettings : public Config
|
||||
{
|
||||
FetchSettings();
|
||||
|
|
|
@ -7,7 +7,7 @@ namespace nix {
|
|||
LogFormat defaultLogFormat = LogFormat::raw;
|
||||
|
||||
LogFormat parseLogFormat(const std::string & logFormatStr) {
|
||||
if (logFormatStr == "raw" || getEnv("NIX_GET_COMPLETIONS"))
|
||||
if (logFormatStr == "raw")
|
||||
return LogFormat::raw;
|
||||
else if (logFormatStr == "raw-with-logs")
|
||||
return LogFormat::rawWithLogs;
|
||||
|
|
|
@ -92,7 +92,7 @@ void ProgressBar::resume()
|
|||
nextWakeup = draw(*state, {});
|
||||
state.wait_for(quitCV, std::chrono::milliseconds(50));
|
||||
}
|
||||
writeLogsToStderr("\r\e[K");
|
||||
eraseProgressDisplay(*state);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -558,7 +558,8 @@ std::optional<char> ProgressBar::ask(std::string_view msg)
|
|||
{
|
||||
auto state(state_.lock());
|
||||
if (state->paused > 0 || !isatty(STDIN_FILENO)) return {};
|
||||
std::cerr << fmt("\r\e[K%s ", msg);
|
||||
eraseProgressDisplay(*state);
|
||||
std::cerr << msg;
|
||||
auto s = trim(readLine(STDIN_FILENO));
|
||||
if (s.size() != 1) return {};
|
||||
draw(*state, {});
|
||||
|
|
|
@ -131,7 +131,7 @@ Goal::Finished DerivationGoal::timedOut(Error && ex)
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::work(bool inBuildSlot)
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::work(bool inBuildSlot) noexcept
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
}
|
||||
|
@ -157,8 +157,8 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::getDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::getDerivation(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
/* The first thing to do is to make sure that the derivation
|
||||
|
@ -170,16 +170,22 @@ Goal::WorkResult DerivationGoal::getDerivation(bool inBuildSlot)
|
|||
|
||||
|
||||
state = &DerivationGoal::loadDerivation;
|
||||
return WaitForGoals{{worker.goalFactory().makePathSubstitutionGoal(drvPath)}};
|
||||
return {WaitForGoals{{worker.goalFactory().makePathSubstitutionGoal(drvPath)}}};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::loadDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::loadDerivation(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("loading derivation");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
return done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
return {done(
|
||||
BuildResult::MiscFailure,
|
||||
{},
|
||||
Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
|
||||
)};
|
||||
}
|
||||
|
||||
/* `drvPath' should already be a root, but let's be on the safe
|
||||
|
@ -202,11 +208,13 @@ Goal::WorkResult DerivationGoal::loadDerivation(bool inBuildSlot)
|
|||
assert(drv);
|
||||
|
||||
return haveDerivation(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::haveDerivation(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("have derivation");
|
||||
|
||||
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
|
||||
|
@ -255,7 +263,7 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
|
||||
/* If they are all valid, then we're done. */
|
||||
if (allValid && buildMode == bmNormal) {
|
||||
return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
return {done(BuildResult::AlreadyValid, std::move(validOutputs))};
|
||||
}
|
||||
|
||||
/* We are first going to try to create the invalid output paths
|
||||
|
@ -290,20 +298,29 @@ Goal::WorkResult DerivationGoal::haveDerivation(bool inBuildSlot)
|
|||
return outputsSubstitutionTried(inBuildSlot);
|
||||
} else {
|
||||
state = &DerivationGoal::outputsSubstitutionTried;
|
||||
return result;
|
||||
return {std::move(result)};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::outputsSubstitutionTried(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(drv->type().isPure());
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
return done(BuildResult::TransientFailure, {},
|
||||
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback)
|
||||
{
|
||||
return {done(
|
||||
BuildResult::TransientFailure,
|
||||
{},
|
||||
Error(
|
||||
"some substitutes for the outputs of derivation '%s' failed (usually happens due "
|
||||
"to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)
|
||||
)
|
||||
)};
|
||||
}
|
||||
|
||||
/* If the substitutes form an incomplete closure, then we should
|
||||
|
@ -343,7 +360,7 @@ Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
|||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
if (buildMode == bmNormal && allValid) {
|
||||
return done(BuildResult::Substituted, std::move(validOutputs));
|
||||
return {done(BuildResult::Substituted, std::move(validOutputs))};
|
||||
}
|
||||
if (buildMode == bmRepair && allValid) {
|
||||
return repairClosure();
|
||||
|
@ -354,13 +371,15 @@ Goal::WorkResult DerivationGoal::outputsSubstitutionTried(bool inBuildSlot)
|
|||
|
||||
/* Nothing to wait for; tail call */
|
||||
return gaveUpOnSubstitution(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
/* At least one of the output paths could not be
|
||||
produced using a substitute. So we have to build instead. */
|
||||
Goal::WorkResult DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot) noexcept
|
||||
try {
|
||||
WaitForGoals result;
|
||||
|
||||
/* At this point we are building all outputs, so if more are wanted there
|
||||
|
@ -426,13 +445,15 @@ Goal::WorkResult DerivationGoal::gaveUpOnSubstitution(bool inBuildSlot)
|
|||
return inputsRealised(inBuildSlot);
|
||||
} else {
|
||||
state = &DerivationGoal::inputsRealised;
|
||||
return result;
|
||||
return {result};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::repairClosure()
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::repairClosure() noexcept
|
||||
try {
|
||||
assert(drv->type().isPure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
|
@ -486,34 +507,44 @@ Goal::WorkResult DerivationGoal::repairClosure()
|
|||
}
|
||||
|
||||
if (result.goals.empty()) {
|
||||
return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
return {done(BuildResult::AlreadyValid, assertPathValidity())};
|
||||
}
|
||||
|
||||
state = &DerivationGoal::closureRepaired;
|
||||
return result;
|
||||
return {result};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::closureRepaired(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::closureRepaired(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("closure repaired");
|
||||
if (nrFailed > 0)
|
||||
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
|
||||
worker.store.printStorePath(drvPath));
|
||||
return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
return {done(BuildResult::AlreadyValid, assertPathValidity())};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::inputsRealised(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("all inputs realised");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
if (!useDerivation)
|
||||
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
|
||||
return done(BuildResult::DependencyFailed, {}, Error(
|
||||
return {done(
|
||||
BuildResult::DependencyFailed,
|
||||
{},
|
||||
Error(
|
||||
"%s dependencies of derivation '%s' failed to build",
|
||||
nrFailed, worker.store.printStorePath(drvPath)));
|
||||
nrFailed,
|
||||
worker.store.printStorePath(drvPath)
|
||||
)
|
||||
)};
|
||||
}
|
||||
|
||||
if (retrySubstitution == RetrySubstitution::YesNeed) {
|
||||
|
@ -584,7 +615,7 @@ Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
|||
pathResolved, wantedOutputs, buildMode);
|
||||
|
||||
state = &DerivationGoal::resolvedFinished;
|
||||
return WaitForGoals{{resolvedDrvGoal}};
|
||||
return {WaitForGoals{{resolvedDrvGoal}}};
|
||||
}
|
||||
|
||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumInputPaths;
|
||||
|
@ -650,6 +681,8 @@ Goal::WorkResult DerivationGoal::inputsRealised(bool inBuildSlot)
|
|||
build hook. */
|
||||
state = &DerivationGoal::tryToBuild;
|
||||
return tryToBuild(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
void DerivationGoal::started()
|
||||
|
@ -665,8 +698,8 @@ void DerivationGoal::started()
|
|||
mcRunningBuilds = worker.runningBuilds.addTemporarily(1);
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryToBuild(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("trying to build");
|
||||
|
||||
/* Obtain locks on all output paths, if the paths are known a priori.
|
||||
|
@ -700,7 +733,7 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for lock on %s", Magenta(showPaths(lockFiles))));
|
||||
return WaitForAWhile{};
|
||||
return {WaitForAWhile{}};
|
||||
}
|
||||
|
||||
actLock.reset();
|
||||
|
@ -717,7 +750,7 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
if (buildMode != bmCheck && allValid) {
|
||||
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
|
||||
outputLocks.setDeletion(true);
|
||||
return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
return {done(BuildResult::AlreadyValid, std::move(validOutputs))};
|
||||
}
|
||||
|
||||
/* If any of the outputs already exist but are not valid, delete
|
||||
|
@ -765,7 +798,7 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
},
|
||||
hookReply);
|
||||
if (result) {
|
||||
return std::move(*result);
|
||||
return {std::move(*result)};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -773,13 +806,18 @@ Goal::WorkResult DerivationGoal::tryToBuild(bool inBuildSlot)
|
|||
|
||||
state = &DerivationGoal::tryLocalBuild;
|
||||
return tryLocalBuild(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::tryLocalBuild(bool inBuildSlot) {
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::tryLocalBuild(bool inBuildSlot) noexcept
|
||||
try {
|
||||
throw Error(
|
||||
"unable to build with a primary store that isn't a local store; "
|
||||
"either pass a different '--store' or enable remote builds."
|
||||
"\nhttps://docs.lix.systems/manual/lix/stable/advanced-topics/distributed-builds.html");
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
|
@ -935,8 +973,8 @@ void runPostBuildHook(
|
|||
proc.getStdout()->drainInto(sink);
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::buildDone(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("build done");
|
||||
|
||||
Finally releaseBuildUser([&](){ this->cleanupHookFinally(); });
|
||||
|
@ -1030,7 +1068,7 @@ Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
|||
outputLocks.setDeletion(true);
|
||||
outputLocks.unlock();
|
||||
|
||||
return done(BuildResult::Built, std::move(builtOutputs));
|
||||
return {done(BuildResult::Built, std::move(builtOutputs))};
|
||||
} catch (BuildError & e) {
|
||||
outputLocks.unlock();
|
||||
|
||||
|
@ -1051,12 +1089,14 @@ Goal::WorkResult DerivationGoal::buildDone(bool inBuildSlot)
|
|||
BuildResult::PermanentFailure;
|
||||
}
|
||||
|
||||
return done(st, {}, std::move(e));
|
||||
return {done(st, {}, std::move(e))};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DerivationGoal::resolvedFinished(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DerivationGoal::resolvedFinished(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("resolved derivation finished");
|
||||
|
||||
assert(resolvedDrvGoal);
|
||||
|
@ -1123,7 +1163,9 @@ Goal::WorkResult DerivationGoal::resolvedFinished(bool inBuildSlot)
|
|||
if (status == BuildResult::AlreadyValid)
|
||||
status = BuildResult::ResolvesToAlreadyValid;
|
||||
|
||||
return done(status, std::move(builtOutputs));
|
||||
return {done(status, std::move(builtOutputs))};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
HookReply DerivationGoal::tryBuildHook(bool inBuildSlot)
|
||||
|
|
|
@ -213,7 +213,7 @@ struct DerivationGoal : public Goal
|
|||
*/
|
||||
std::optional<DerivationType> derivationType;
|
||||
|
||||
typedef WorkResult (DerivationGoal::*GoalState)(bool inBuildSlot);
|
||||
typedef kj::Promise<Result<WorkResult>> (DerivationGoal::*GoalState)(bool inBuildSlot) noexcept;
|
||||
GoalState state;
|
||||
|
||||
BuildMode buildMode;
|
||||
|
@ -246,7 +246,7 @@ struct DerivationGoal : public Goal
|
|||
|
||||
std::string key() override;
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
|
||||
|
||||
/**
|
||||
* Add wanted outputs to an already existing derivation goal.
|
||||
|
@ -256,18 +256,18 @@ struct DerivationGoal : public Goal
|
|||
/**
|
||||
* The states.
|
||||
*/
|
||||
WorkResult getDerivation(bool inBuildSlot);
|
||||
WorkResult loadDerivation(bool inBuildSlot);
|
||||
WorkResult haveDerivation(bool inBuildSlot);
|
||||
WorkResult outputsSubstitutionTried(bool inBuildSlot);
|
||||
WorkResult gaveUpOnSubstitution(bool inBuildSlot);
|
||||
WorkResult closureRepaired(bool inBuildSlot);
|
||||
WorkResult inputsRealised(bool inBuildSlot);
|
||||
WorkResult tryToBuild(bool inBuildSlot);
|
||||
virtual WorkResult tryLocalBuild(bool inBuildSlot);
|
||||
WorkResult buildDone(bool inBuildSlot);
|
||||
kj::Promise<Result<WorkResult>> getDerivation(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> loadDerivation(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> haveDerivation(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> outputsSubstitutionTried(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> gaveUpOnSubstitution(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> closureRepaired(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> inputsRealised(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryToBuild(bool inBuildSlot) noexcept;
|
||||
virtual kj::Promise<Result<WorkResult>> tryLocalBuild(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> buildDone(bool inBuildSlot) noexcept;
|
||||
|
||||
WorkResult resolvedFinished(bool inBuildSlot);
|
||||
kj::Promise<Result<WorkResult>> resolvedFinished(bool inBuildSlot) noexcept;
|
||||
|
||||
/**
|
||||
* Is the build hook willing to perform the build?
|
||||
|
@ -346,7 +346,7 @@ struct DerivationGoal : public Goal
|
|||
*/
|
||||
virtual void killChild();
|
||||
|
||||
WorkResult repairClosure();
|
||||
kj::Promise<Result<WorkResult>> repairClosure() noexcept;
|
||||
|
||||
void started();
|
||||
|
||||
|
|
|
@ -22,25 +22,27 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::init(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::init(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
/* If the derivation already exists, we’re done */
|
||||
if (worker.store.queryRealisation(id)) {
|
||||
return Finished{ecSuccess, std::move(buildResult)};
|
||||
return {Finished{ecSuccess, std::move(buildResult)}};
|
||||
}
|
||||
|
||||
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
|
||||
return tryNext(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("trying next substituter");
|
||||
|
||||
if (!inBuildSlot) {
|
||||
return WaitForSlot{};
|
||||
return {WaitForSlot{}};
|
||||
}
|
||||
|
||||
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
|
||||
|
@ -57,7 +59,7 @@ Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
return Finished{substituterFailed ? ecFailed : ecNoSubstituters, std::move(buildResult)};
|
||||
return {Finished{substituterFailed ? ecFailed : ecNoSubstituters, std::move(buildResult)}};
|
||||
}
|
||||
|
||||
sub = subs.front();
|
||||
|
@ -77,11 +79,13 @@ Goal::WorkResult DrvOutputSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
});
|
||||
|
||||
state = &DrvOutputSubstitutionGoal::realisationFetched;
|
||||
return WaitForWorld{{downloadState->outPipe.readSide.get()}, true};
|
||||
return {WaitForWorld{{downloadState->outPipe.readSide.get()}, true}};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot) noexcept
|
||||
try {
|
||||
worker.childTerminated(this);
|
||||
maintainRunningSubstitutions.reset();
|
||||
|
||||
|
@ -122,31 +126,37 @@ Goal::WorkResult DrvOutputSubstitutionGoal::realisationFetched(bool inBuildSlot)
|
|||
return outPathValid(inBuildSlot);
|
||||
} else {
|
||||
state = &DrvOutputSubstitutionGoal::outPathValid;
|
||||
return result;
|
||||
return {std::move(result)};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::outPathValid(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::outPathValid(bool inBuildSlot) noexcept
|
||||
try {
|
||||
assert(outputInfo);
|
||||
trace("output path substituted");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
|
||||
return Finished{
|
||||
return {Finished{
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
std::move(buildResult),
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
worker.store.registerDrvOutput(*outputInfo);
|
||||
return finished();
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::finished()
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::finished() noexcept
|
||||
try {
|
||||
trace("finished");
|
||||
return Finished{ecSuccess, std::move(buildResult)};
|
||||
return {Finished{ecSuccess, std::move(buildResult)}};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
std::string DrvOutputSubstitutionGoal::key()
|
||||
|
@ -156,7 +166,7 @@ std::string DrvOutputSubstitutionGoal::key()
|
|||
return "a$" + std::string(id.to_string());
|
||||
}
|
||||
|
||||
Goal::WorkResult DrvOutputSubstitutionGoal::work(bool inBuildSlot)
|
||||
kj::Promise<Result<Goal::WorkResult>> DrvOutputSubstitutionGoal::work(bool inBuildSlot) noexcept
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
}
|
||||
|
|
|
@ -65,20 +65,20 @@ public:
|
|||
std::optional<ContentAddress> ca = std::nullopt
|
||||
);
|
||||
|
||||
typedef WorkResult (DrvOutputSubstitutionGoal::*GoalState)(bool inBuildSlot);
|
||||
typedef kj::Promise<Result<WorkResult>> (DrvOutputSubstitutionGoal::*GoalState)(bool inBuildSlot) noexcept;
|
||||
GoalState state;
|
||||
|
||||
WorkResult init(bool inBuildSlot);
|
||||
WorkResult tryNext(bool inBuildSlot);
|
||||
WorkResult realisationFetched(bool inBuildSlot);
|
||||
WorkResult outPathValid(bool inBuildSlot);
|
||||
WorkResult finished();
|
||||
kj::Promise<Result<WorkResult>> init(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryNext(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> realisationFetched(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> outPathValid(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> finished() noexcept;
|
||||
|
||||
Finished timedOut(Error && ex) override { abort(); };
|
||||
|
||||
std::string key() override;
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
|
||||
|
||||
JobCategory jobCategory() const override {
|
||||
return JobCategory::Substitution;
|
||||
|
|
|
@ -6,11 +6,17 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static auto runWorker(Worker & worker, auto mkGoals)
|
||||
{
|
||||
return worker.run(mkGoals);
|
||||
}
|
||||
|
||||
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
Worker worker(*this, evalStore ? *evalStore : *this, aio);
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
auto goals = runWorker(worker, [&](GoalFactory & gf) {
|
||||
Goals goals;
|
||||
for (auto & br : reqs)
|
||||
goals.insert(gf.makeGoal(br, buildMode));
|
||||
|
@ -48,10 +54,12 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
BuildMode buildMode,
|
||||
std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
Worker worker(*this, evalStore ? *evalStore : *this, aio);
|
||||
|
||||
std::vector<std::pair<const DerivedPath &, GoalPtr>> state;
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
auto goals = runWorker(worker, [&](GoalFactory & gf) {
|
||||
Goals goals;
|
||||
for (const auto & req : reqs) {
|
||||
auto goal = gf.makeGoal(req, buildMode);
|
||||
|
@ -72,10 +80,11 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
Worker worker(*this, *this, aio);
|
||||
|
||||
try {
|
||||
auto goals = worker.run([&](GoalFactory & gf) -> Goals {
|
||||
auto goals = runWorker(worker, [&](GoalFactory & gf) -> Goals {
|
||||
return Goals{gf.makeBasicDerivationGoal(drvPath, drv, OutputsSpec::All{}, buildMode)};
|
||||
});
|
||||
auto goal = *goals.begin();
|
||||
|
@ -97,10 +106,12 @@ void Store::ensurePath(const StorePath & path)
|
|||
/* If the path is already valid, we're done. */
|
||||
if (isValidPath(path)) return;
|
||||
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
Worker worker(*this, *this, aio);
|
||||
|
||||
auto goals =
|
||||
worker.run([&](GoalFactory & gf) { return Goals{gf.makePathSubstitutionGoal(path)}; });
|
||||
auto goals = runWorker(worker, [&](GoalFactory & gf) {
|
||||
return Goals{gf.makePathSubstitutionGoal(path)};
|
||||
});
|
||||
auto goal = *goals.begin();
|
||||
|
||||
if (goal->exitCode != Goal::ecSuccess) {
|
||||
|
@ -115,9 +126,10 @@ void Store::ensurePath(const StorePath & path)
|
|||
|
||||
void Store::repairPath(const StorePath & path)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto aio = kj::setupAsyncIo();
|
||||
Worker worker(*this, *this, aio);
|
||||
|
||||
auto goals = worker.run([&](GoalFactory & gf) {
|
||||
auto goals = runWorker(worker, [&](GoalFactory & gf) {
|
||||
return Goals{gf.makePathSubstitutionGoal(path, Repair)};
|
||||
});
|
||||
auto goal = *goals.begin();
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "result.hh"
|
||||
#include "types.hh"
|
||||
#include "store-api.hh"
|
||||
#include "build-result.hh"
|
||||
#include <kj/async.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -161,7 +163,7 @@ public:
|
|||
trace("goal destroyed");
|
||||
}
|
||||
|
||||
virtual WorkResult work(bool inBuildSlot) = 0;
|
||||
virtual kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept = 0;
|
||||
|
||||
virtual void waiteeDone(GoalPtr waitee) { }
|
||||
|
||||
|
|
|
@ -149,8 +149,8 @@ void LocalDerivationGoal::killSandbox(bool getStats)
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> LocalDerivationGoal::tryLocalBuild(bool inBuildSlot) noexcept
|
||||
try {
|
||||
#if __APPLE__
|
||||
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
|
||||
#endif
|
||||
|
@ -159,7 +159,7 @@ Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
|||
state = &DerivationGoal::tryToBuild;
|
||||
outputLocks.unlock();
|
||||
if (0U != settings.maxBuildJobs) {
|
||||
return WaitForSlot{};
|
||||
return {WaitForSlot{}};
|
||||
}
|
||||
if (getMachines().empty()) {
|
||||
throw Error(
|
||||
|
@ -214,7 +214,7 @@ Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
|||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
return WaitForAWhile{};
|
||||
return {WaitForAWhile{}};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,15 +250,17 @@ Goal::WorkResult LocalDerivationGoal::tryLocalBuild(bool inBuildSlot)
|
|||
state = &DerivationGoal::buildDone;
|
||||
|
||||
started();
|
||||
return WaitForWorld{std::move(fds), true};
|
||||
return {WaitForWorld{std::move(fds), true}};
|
||||
|
||||
} catch (BuildError & e) {
|
||||
outputLocks.unlock();
|
||||
buildUser.reset();
|
||||
auto report = done(BuildResult::InputRejected, {}, std::move(e));
|
||||
report.permanentFailure = true;
|
||||
return report;
|
||||
return {std::move(report)};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -213,7 +213,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
/**
|
||||
* The additional states.
|
||||
*/
|
||||
WorkResult tryLocalBuild(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> tryLocalBuild(bool inBuildSlot) noexcept override;
|
||||
|
||||
/**
|
||||
* Start building a derivation.
|
||||
|
|
|
@ -45,21 +45,21 @@ Goal::Finished PathSubstitutionGoal::done(
|
|||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::work(bool inBuildSlot)
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::work(bool inBuildSlot) noexcept
|
||||
{
|
||||
return (this->*state)(inBuildSlot);
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::init(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::init(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("init");
|
||||
|
||||
worker.store.addTempRoot(storePath);
|
||||
|
||||
/* If the path already exists we're done. */
|
||||
if (!repair && worker.store.isValidPath(storePath)) {
|
||||
return done(ecSuccess, BuildResult::AlreadyValid);
|
||||
return {done(ecSuccess, BuildResult::AlreadyValid)};
|
||||
}
|
||||
|
||||
if (settings.readOnlyMode)
|
||||
|
@ -68,11 +68,13 @@ Goal::WorkResult PathSubstitutionGoal::init(bool inBuildSlot)
|
|||
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
|
||||
|
||||
return tryNext(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryNext(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("trying next substituter");
|
||||
|
||||
cleanup();
|
||||
|
@ -87,10 +89,10 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
return done(
|
||||
return {done(
|
||||
substituterFailed ? ecFailed : ecNoSubstituters,
|
||||
BuildResult::NoSubstituters,
|
||||
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)));
|
||||
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)))};
|
||||
}
|
||||
|
||||
sub = subs.front();
|
||||
|
@ -167,20 +169,22 @@ Goal::WorkResult PathSubstitutionGoal::tryNext(bool inBuildSlot)
|
|||
return referencesValid(inBuildSlot);
|
||||
} else {
|
||||
state = &PathSubstitutionGoal::referencesValid;
|
||||
return result;
|
||||
return {std::move(result)};
|
||||
}
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::referencesValid(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::referencesValid(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("all references realised");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
return done(
|
||||
return {done(
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
BuildResult::DependencyFailed,
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)));
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)))};
|
||||
}
|
||||
|
||||
for (auto & i : info->references)
|
||||
|
@ -189,15 +193,17 @@ Goal::WorkResult PathSubstitutionGoal::referencesValid(bool inBuildSlot)
|
|||
|
||||
state = &PathSubstitutionGoal::tryToRun;
|
||||
return tryToRun(inBuildSlot);
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::tryToRun(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::tryToRun(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("trying to run");
|
||||
|
||||
if (!inBuildSlot) {
|
||||
return WaitForSlot{};
|
||||
return {WaitForSlot{}};
|
||||
}
|
||||
|
||||
maintainRunningSubstitutions = worker.runningSubstitutions.addTemporarily(1);
|
||||
|
@ -228,12 +234,14 @@ Goal::WorkResult PathSubstitutionGoal::tryToRun(bool inBuildSlot)
|
|||
});
|
||||
|
||||
state = &PathSubstitutionGoal::finished;
|
||||
return WaitForWorld{{outPipe.readSide.get()}, true};
|
||||
return {WaitForWorld{{outPipe.readSide.get()}, true}};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
Goal::WorkResult PathSubstitutionGoal::finished(bool inBuildSlot)
|
||||
{
|
||||
kj::Promise<Result<Goal::WorkResult>> PathSubstitutionGoal::finished(bool inBuildSlot) noexcept
|
||||
try {
|
||||
trace("substitute finished");
|
||||
|
||||
worker.childTerminated(this);
|
||||
|
@ -274,7 +282,9 @@ Goal::WorkResult PathSubstitutionGoal::finished(bool inBuildSlot)
|
|||
worker.doneNarSize += maintainExpectedNar.delta();
|
||||
maintainExpectedNar.reset();
|
||||
|
||||
return done(ecSuccess, BuildResult::Substituted);
|
||||
return {done(ecSuccess, BuildResult::Substituted)};
|
||||
} catch (...) {
|
||||
return {std::current_exception()};
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ struct PathSubstitutionGoal : public Goal
|
|||
NotifyingCounter<uint64_t>::Bump maintainExpectedSubstitutions,
|
||||
maintainRunningSubstitutions, maintainExpectedNar, maintainExpectedDownload;
|
||||
|
||||
typedef WorkResult (PathSubstitutionGoal::*GoalState)(bool inBuildSlot);
|
||||
typedef kj::Promise<Result<WorkResult>> (PathSubstitutionGoal::*GoalState)(bool inBuildSlot) noexcept;
|
||||
GoalState state;
|
||||
|
||||
/**
|
||||
|
@ -101,16 +101,16 @@ public:
|
|||
return "a$" + std::string(storePath.name()) + "$" + worker.store.printStorePath(storePath);
|
||||
}
|
||||
|
||||
WorkResult work(bool inBuildSlot) override;
|
||||
kj::Promise<Result<WorkResult>> work(bool inBuildSlot) noexcept override;
|
||||
|
||||
/**
|
||||
* The states.
|
||||
*/
|
||||
WorkResult init(bool inBuildSlot);
|
||||
WorkResult tryNext(bool inBuildSlot);
|
||||
WorkResult referencesValid(bool inBuildSlot);
|
||||
WorkResult tryToRun(bool inBuildSlot);
|
||||
WorkResult finished(bool inBuildSlot);
|
||||
kj::Promise<Result<WorkResult>> init(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryNext(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> referencesValid(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> tryToRun(bool inBuildSlot) noexcept;
|
||||
kj::Promise<Result<WorkResult>> finished(bool inBuildSlot) noexcept;
|
||||
|
||||
/**
|
||||
* Callback used by the worker to write to the log.
|
||||
|
|
|
@ -11,12 +11,13 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Worker::Worker(Store & store, Store & evalStore)
|
||||
Worker::Worker(Store & store, Store & evalStore, kj::AsyncIoContext & aio)
|
||||
: act(*logger, actRealise)
|
||||
, actDerivations(*logger, actBuilds)
|
||||
, actSubstitutions(*logger, actCopyPaths)
|
||||
, store(store)
|
||||
, evalStore(evalStore)
|
||||
, aio(aio)
|
||||
{
|
||||
/* Debugging: prevent recursive workers. */
|
||||
nrLocalBuilds = 0;
|
||||
|
@ -379,7 +380,7 @@ Goals Worker::run(std::function<Goals (GoalFactory &)> req)
|
|||
const bool inSlot = goal->jobCategory() == JobCategory::Substitution
|
||||
? nrSubstitutions < std::max(1U, (unsigned int) settings.maxSubstitutionJobs)
|
||||
: nrLocalBuilds < settings.maxBuildJobs;
|
||||
handleWorkResult(goal, goal->work(inSlot));
|
||||
handleWorkResult(goal, goal->work(inSlot).wait(aio.waitScope).value());
|
||||
updateStatistics();
|
||||
|
||||
if (topGoals.empty()) break; // stuff may have been cancelled
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "realisation.hh"
|
||||
|
||||
#include <future>
|
||||
#include <kj/async-io.h>
|
||||
#include <thread>
|
||||
|
||||
namespace nix {
|
||||
|
@ -237,6 +238,7 @@ public:
|
|||
|
||||
Store & store;
|
||||
Store & evalStore;
|
||||
kj::AsyncIoContext & aio;
|
||||
|
||||
struct HookState {
|
||||
std::unique_ptr<HookInstance> instance;
|
||||
|
@ -264,7 +266,7 @@ public:
|
|||
NotifyingCounter<uint64_t> expectedNarSize{[this] { updateStatisticsLater(); }};
|
||||
NotifyingCounter<uint64_t> doneNarSize{[this] { updateStatisticsLater(); }};
|
||||
|
||||
Worker(Store & store, Store & evalStore);
|
||||
Worker(Store & store, Store & evalStore, kj::AsyncIoContext & aio);
|
||||
~Worker();
|
||||
|
||||
/**
|
||||
|
|
|
@ -337,7 +337,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
// wrapping user `callback`s instead is not possible because the
|
||||
// Callback api expects std::functions, and copying Callbacks is
|
||||
// not possible due the promises they hold.
|
||||
if (code == CURLE_OK && !dataCallback) {
|
||||
if (code == CURLE_OK && !dataCallback && result.data.length() > 0) {
|
||||
result.data = decompress(encoding, result.data);
|
||||
}
|
||||
|
||||
|
|
|
@ -269,11 +269,31 @@ Path Settings::getDefaultSSLCertFile()
|
|||
|
||||
const std::string nixVersion = PACKAGE_VERSION;
|
||||
|
||||
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
|
||||
{SandboxMode::smEnabled, true},
|
||||
{SandboxMode::smRelaxed, "relaxed"},
|
||||
{SandboxMode::smDisabled, false},
|
||||
});
|
||||
void to_json(nlohmann::json & j, const SandboxMode & e)
|
||||
{
|
||||
if (e == SandboxMode::smEnabled) {
|
||||
j = true;
|
||||
} else if (e == SandboxMode::smRelaxed) {
|
||||
j = "relaxed";
|
||||
} else if (e == SandboxMode::smDisabled) {
|
||||
j = false;
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
void from_json(const nlohmann::json & j, SandboxMode & e)
|
||||
{
|
||||
if (j == true) {
|
||||
e = SandboxMode::smEnabled;
|
||||
} else if (j == "relaxed") {
|
||||
e = SandboxMode::smRelaxed;
|
||||
} else if (j == false) {
|
||||
e = SandboxMode::smDisabled;
|
||||
} else {
|
||||
throw Error("Invalid sandbox mode '%s'", std::string(j));
|
||||
}
|
||||
}
|
||||
|
||||
template<> SandboxMode BaseSetting<SandboxMode>::parse(const std::string & str, const ApplyConfigOptions & options) const
|
||||
{
|
||||
|
|
|
@ -14,6 +14,9 @@ namespace nix {
|
|||
|
||||
typedef enum { smEnabled, smRelaxed, smDisabled } SandboxMode;
|
||||
|
||||
void to_json(nlohmann::json & j, const SandboxMode & e);
|
||||
void from_json(const nlohmann::json & j, SandboxMode & e);
|
||||
|
||||
struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
|
||||
{
|
||||
MaxBuildJobsSetting(Config * options,
|
||||
|
@ -637,10 +640,10 @@ public:
|
|||
PathsSetting<std::optional<Path>> diffHook{
|
||||
this, std::nullopt, "diff-hook",
|
||||
R"(
|
||||
Absolute path to an executable capable of diffing build
|
||||
results. The hook is executed if `run-diff-hook` is true, and the
|
||||
output of a build is known to not be the same. This program is not
|
||||
executed to determine if two results are the same.
|
||||
Path to an executable capable of diffing build results. The hook is
|
||||
executed if `run-diff-hook` is true, and the output of a build is
|
||||
known to not be the same. This program is not executed to determine
|
||||
if two results are the same.
|
||||
|
||||
The diff hook is executed by the same user and group who ran the
|
||||
build. However, the diff hook does not have write access to the
|
||||
|
|
|
@ -664,27 +664,6 @@ static void canonicalisePathMetaData_(
|
|||
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
|
||||
throw Error("file '%1%' has an unsupported type", path);
|
||||
|
||||
#if __linux__
|
||||
/* Remove extended attributes / ACLs. */
|
||||
ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0);
|
||||
|
||||
if (eaSize < 0) {
|
||||
if (errno != ENOTSUP && errno != ENODATA)
|
||||
throw SysError("querying extended attributes of '%s'", path);
|
||||
} else if (eaSize > 0) {
|
||||
std::vector<char> eaBuf(eaSize);
|
||||
|
||||
if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0)
|
||||
throw SysError("querying extended attributes of '%s'", path);
|
||||
|
||||
for (auto & eaName: tokenizeString<Strings>(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) {
|
||||
if (settings.ignoredAcls.get().count(eaName)) continue;
|
||||
if (lremovexattr(path.c_str(), eaName.c_str()) == -1)
|
||||
throw SysError("removing extended attribute '%s' from '%s'", eaName, path);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Fail if the file is not owned by the build user. This prevents
|
||||
us from messing up the ownership/permissions of files
|
||||
hard-linked into the output (e.g. "ln /etc/shadow $out/foo").
|
||||
|
@ -699,6 +678,29 @@ static void canonicalisePathMetaData_(
|
|||
return;
|
||||
}
|
||||
|
||||
#if __linux__
|
||||
/* Remove extended attributes / ACLs. */
|
||||
ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0);
|
||||
|
||||
if (eaSize < 0) {
|
||||
if (errno != ENOTSUP && errno != ENODATA)
|
||||
throw SysError("querying extended attributes of '%s'", path);
|
||||
} else if (eaSize > 0) {
|
||||
std::vector<char> eaBuf(eaSize);
|
||||
|
||||
if ((eaSize = llistxattr(path.c_str(), eaBuf.data(), eaBuf.size())) < 0)
|
||||
throw SysError("querying extended attributes of '%s'", path);
|
||||
|
||||
if (S_ISREG(st.st_mode) || S_ISDIR(st.st_mode))
|
||||
chmod(path.c_str(), st.st_mode | S_IWUSR);
|
||||
for (auto & eaName: tokenizeString<Strings>(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) {
|
||||
if (settings.ignoredAcls.get().count(eaName)) continue;
|
||||
if (lremovexattr(path.c_str(), eaName.c_str()) == -1)
|
||||
throw SysError("removing extended attribute '%s' from '%s'", eaName, path);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
inodesSeen.insert(Inode(st.st_dev, st.st_ino));
|
||||
|
||||
canonicaliseTimestampAndPermissions(path, st);
|
||||
|
@ -1216,7 +1218,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
bool narRead = false;
|
||||
Finally cleanup = [&]() {
|
||||
if (!narRead) {
|
||||
ParseSink sink;
|
||||
NARParseVisitor sink;
|
||||
try {
|
||||
parseDump(sink, source);
|
||||
} catch (...) {
|
||||
|
|
|
@ -73,8 +73,16 @@ struct SimpleUserLock : UserLock
|
|||
debug("trying user '%s'", i);
|
||||
|
||||
struct passwd * pw = getpwnam(i.c_str());
|
||||
if (!pw)
|
||||
throw Error("the user '%s' in the group '%s' does not exist", i, settings.buildUsersGroup);
|
||||
if (!pw) {
|
||||
#ifdef __APPLE__
|
||||
#define APPLE_HINT "\n\nhint: this may be caused by an update to macOS Sequoia breaking existing Lix installations.\n" \
|
||||
"See the macOS Sequoia page on the Lix wiki for detailed repair instructions: https://wiki.lix.systems/link/81"
|
||||
#else
|
||||
#define APPLE_HINT
|
||||
#endif
|
||||
throw Error("the user '%s' in the group '%s' does not exist" APPLE_HINT, i, settings.buildUsersGroup);
|
||||
#undef APPLE_HINT
|
||||
}
|
||||
|
||||
auto fnUserLock = fmt("%s/userpool/%s", settings.nixStateDir,pw->pw_uid);
|
||||
|
||||
|
|
|
@ -221,6 +221,7 @@ dependencies = [
|
|||
aws_s3,
|
||||
aws_sdk_transfer,
|
||||
nlohmann_json,
|
||||
kj,
|
||||
]
|
||||
|
||||
if host_machine.system() == 'freebsd'
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "archive.hh"
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <stack>
|
||||
#include <algorithm>
|
||||
|
||||
|
@ -33,7 +34,7 @@ struct NarAccessor : public FSAccessor
|
|||
|
||||
NarMember root;
|
||||
|
||||
struct NarIndexer : ParseSink, Source
|
||||
struct NarIndexer : NARParseVisitor, Source
|
||||
{
|
||||
NarAccessor & acc;
|
||||
Source & source;
|
||||
|
@ -44,11 +45,12 @@ struct NarAccessor : public FSAccessor
|
|||
|
||||
uint64_t pos = 0;
|
||||
|
||||
public:
|
||||
NarIndexer(NarAccessor & acc, Source & source)
|
||||
: acc(acc), source(source)
|
||||
{ }
|
||||
|
||||
void createMember(const Path & path, NarMember member)
|
||||
NarMember & createMember(const Path & path, NarMember member)
|
||||
{
|
||||
size_t level = std::count(path.begin(), path.end(), '/');
|
||||
while (parents.size() > level) parents.pop();
|
||||
|
@ -62,6 +64,8 @@ struct NarAccessor : public FSAccessor
|
|||
auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member));
|
||||
parents.push(&result.first->second);
|
||||
}
|
||||
|
||||
return *parents.top();
|
||||
}
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
|
@ -69,28 +73,17 @@ struct NarAccessor : public FSAccessor
|
|||
createMember(path, {FSAccessor::Type::tDirectory, false, 0, 0});
|
||||
}
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
std::unique_ptr<FileHandle> createRegularFile(const Path & path, uint64_t size, bool executable) override
|
||||
{
|
||||
createMember(path, {FSAccessor::Type::tRegular, false, 0, 0});
|
||||
}
|
||||
auto & memb = createMember(path, {FSAccessor::Type::tRegular, false, 0, 0});
|
||||
|
||||
void closeRegularFile() override
|
||||
{ }
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
parents.top()->isExecutable = true;
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
assert(size <= std::numeric_limits<uint64_t>::max());
|
||||
parents.top()->size = (uint64_t) size;
|
||||
parents.top()->start = pos;
|
||||
}
|
||||
memb.size = (uint64_t) size;
|
||||
memb.start = pos;
|
||||
memb.isExecutable = executable;
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{ }
|
||||
return std::make_unique<FileHandle>();
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
|
|
|
@ -379,6 +379,48 @@ void Store::addMultipleToStore(
|
|||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
/**
|
||||
* If the NAR archive contains a single file at top-level, then save
|
||||
* the contents of the file to `s`. Otherwise assert.
|
||||
*/
|
||||
struct RetrieveRegularNARVisitor : NARParseVisitor
|
||||
{
|
||||
struct MyFileHandle : public FileHandle
|
||||
{
|
||||
Sink & sink;
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
sink(data);
|
||||
}
|
||||
|
||||
private:
|
||||
MyFileHandle(Sink & sink) : sink(sink) {}
|
||||
|
||||
friend struct RetrieveRegularNARVisitor;
|
||||
};
|
||||
|
||||
Sink & sink;
|
||||
|
||||
RetrieveRegularNARVisitor(Sink & sink) : sink(sink) { }
|
||||
|
||||
std::unique_ptr<FileHandle> createRegularFile(const Path & path, uint64_t size, bool executable) override
|
||||
{
|
||||
return std::unique_ptr<MyFileHandle>(new MyFileHandle{sink});
|
||||
}
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
assert(false && "RetrieveRegularNARVisitor::createDirectory must not be called");
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
assert(false && "RetrieveRegularNARVisitor::createSymlink must not be called");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
The aim of this function is to compute in one pass the correct ValidPathInfo for
|
||||
|
@ -413,7 +455,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
|
||||
they both write to caHashSink. Note that that requisite is currently true
|
||||
because the former is only used in the flat case. */
|
||||
RetrieveRegularNARSink fileSink { caHashSink };
|
||||
RetrieveRegularNARVisitor fileSink { caHashSink };
|
||||
TeeSink unusualHashTee { narHashSink, caHashSink };
|
||||
|
||||
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashType::SHA256
|
||||
|
@ -429,7 +471,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
|||
information to narSink. */
|
||||
TeeSource tapped { fileSource, narSink };
|
||||
|
||||
ParseSink blank;
|
||||
NARParseVisitor blank;
|
||||
auto & parseSink = method == FileIngestionMethod::Flat
|
||||
? fileSink
|
||||
: blank;
|
||||
|
|
|
@ -334,7 +334,7 @@ Generator<Entry> parse(Source & source)
|
|||
}
|
||||
|
||||
|
||||
static WireFormatGenerator restore(ParseSink & sink, Generator<nar::Entry> nar)
|
||||
static WireFormatGenerator restore(NARParseVisitor & sink, Generator<nar::Entry> nar)
|
||||
{
|
||||
while (auto entry = nar.next()) {
|
||||
co_yield std::visit(
|
||||
|
@ -347,16 +347,13 @@ static WireFormatGenerator restore(ParseSink & sink, Generator<nar::Entry> nar)
|
|||
},
|
||||
[&](nar::File f) {
|
||||
return [](auto f, auto & sink) -> WireFormatGenerator {
|
||||
sink.createRegularFile(f.path);
|
||||
sink.preallocateContents(f.size);
|
||||
if (f.executable) {
|
||||
sink.isExecutable();
|
||||
}
|
||||
auto handle = sink.createRegularFile(f.path, f.size, f.executable);
|
||||
|
||||
while (auto block = f.contents.next()) {
|
||||
sink.receiveContents(std::string_view{block->data(), block->size()});
|
||||
handle->receiveContents(std::string_view{block->data(), block->size()});
|
||||
co_yield *block;
|
||||
}
|
||||
sink.closeRegularFile();
|
||||
handle->close();
|
||||
}(std::move(f), sink);
|
||||
},
|
||||
[&](nar::Symlink sl) {
|
||||
|
@ -377,12 +374,12 @@ static WireFormatGenerator restore(ParseSink & sink, Generator<nar::Entry> nar)
|
|||
}
|
||||
}
|
||||
|
||||
WireFormatGenerator parseAndCopyDump(ParseSink & sink, Source & source)
|
||||
WireFormatGenerator parseAndCopyDump(NARParseVisitor & sink, Source & source)
|
||||
{
|
||||
return restore(sink, nar::parse(source));
|
||||
}
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source)
|
||||
void parseDump(NARParseVisitor & sink, Source & source)
|
||||
{
|
||||
auto parser = parseAndCopyDump(sink, source);
|
||||
while (parser.next()) {
|
||||
|
@ -390,11 +387,99 @@ void parseDump(ParseSink & sink, Source & source)
|
|||
}
|
||||
}
|
||||
|
||||
struct RestoreSink : ParseSink
|
||||
/*
|
||||
* Note [NAR restoration security]:
|
||||
* It's *critical* that NAR restoration will never overwrite anything even if
|
||||
* duplicate filenames are passed in. It is inevitable that not all NARs are
|
||||
* fit to actually successfully restore to the target filesystem; errors may
|
||||
* occur due to collisions, and this *must* cause the NAR to be rejected.
|
||||
*
|
||||
* Although the filenames are blocked from being *the same bytes* by a higher
|
||||
* layer, filesystems have other ideas on every platform:
|
||||
* - The store may be on a case-insensitive filesystem like APFS, ext4 with
|
||||
* casefold directories, zfs with casesensitivity=insensitive
|
||||
* - The store may be on a Unicode normalizing (or normalization-insensitive)
|
||||
* filesystem like APFS (where files are looked up by
|
||||
* hash(normalize(fname))), HFS+ (where file names are always normalized to
|
||||
* approximately NFD), or zfs with normalization=formC, etc.
|
||||
*
|
||||
* It is impossible to know the version of Unicode being used by the underlying
|
||||
* filesystem, thus it is *impossible* to stop these collisions.
|
||||
*
|
||||
* Overwriting files as a result of invalid NARs will cause a security bug like
|
||||
* CppNix's CVE-2024-45593 (GHSA-h4vv-h3jq-v493)
|
||||
*/
|
||||
|
||||
/**
|
||||
* This code restores NARs from disk.
|
||||
*
|
||||
* See Note [NAR restoration security] for security invariants in this procedure.
|
||||
*
|
||||
*/
|
||||
struct NARRestoreVisitor : NARParseVisitor
|
||||
{
|
||||
Path dstPath;
|
||||
AutoCloseFD fd;
|
||||
|
||||
private:
|
||||
class MyFileHandle : public FileHandle
|
||||
{
|
||||
AutoCloseFD fd;
|
||||
|
||||
MyFileHandle(AutoCloseFD && fd, uint64_t size, bool executable) : FileHandle(), fd(std::move(fd))
|
||||
{
|
||||
if (executable) {
|
||||
makeExecutable();
|
||||
}
|
||||
|
||||
maybePreallocateContents(size);
|
||||
}
|
||||
|
||||
void makeExecutable()
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void maybePreallocateContents(uint64_t len)
|
||||
{
|
||||
if (!archiveSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
~MyFileHandle() = default;
|
||||
|
||||
virtual void close() override
|
||||
{
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
||||
friend struct NARRestoreVisitor;
|
||||
};
|
||||
|
||||
public:
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
|
@ -402,49 +487,13 @@ struct RestoreSink : ParseSink
|
|||
throw SysError("creating directory '%1%'", p);
|
||||
};
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
std::unique_ptr<FileHandle> createRegularFile(const Path & path, uint64_t size, bool executable) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
fd = AutoCloseFD{open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666)};
|
||||
AutoCloseFD fd = AutoCloseFD{open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666)};
|
||||
if (!fd) throw SysError("creating file '%1%'", p);
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
{
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t len) override
|
||||
{
|
||||
if (!archiveSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
return std::unique_ptr<MyFileHandle>(new MyFileHandle(std::move(fd), size, executable));
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
|
@ -457,7 +506,7 @@ struct RestoreSink : ParseSink
|
|||
|
||||
void restorePath(const Path & path, Source & source)
|
||||
{
|
||||
RestoreSink sink;
|
||||
NARRestoreVisitor sink;
|
||||
sink.dstPath = path;
|
||||
parseDump(sink, source);
|
||||
}
|
||||
|
@ -468,10 +517,9 @@ WireFormatGenerator copyNAR(Source & source)
|
|||
// FIXME: if 'source' is the output of dumpPath() followed by EOF,
|
||||
// we should just forward all data directly without parsing.
|
||||
|
||||
static ParseSink parseSink; /* null sink; just parse the NAR */
|
||||
static NARParseVisitor parseSink; /* null sink; just parse the NAR */
|
||||
|
||||
return parseAndCopyDump(parseSink, source);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -76,45 +76,47 @@ WireFormatGenerator dumpString(std::string_view s);
|
|||
|
||||
/**
|
||||
* \todo Fix this API, it sucks.
|
||||
* A visitor for NAR parsing that performs filesystem (or virtual-filesystem)
|
||||
* actions to restore a NAR.
|
||||
*
|
||||
* Methods of this may arbitrarily fail due to filename collisions.
|
||||
*/
|
||||
struct ParseSink
|
||||
struct NARParseVisitor
|
||||
{
|
||||
virtual void createDirectory(const Path & path) { };
|
||||
|
||||
virtual void createRegularFile(const Path & path) { };
|
||||
virtual void closeRegularFile() { };
|
||||
virtual void isExecutable() { };
|
||||
virtual void preallocateContents(uint64_t size) { };
|
||||
virtual void receiveContents(std::string_view data) { };
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) { };
|
||||
};
|
||||
|
||||
/**
|
||||
* If the NAR archive contains a single file at top-level, then save
|
||||
* the contents of the file to `s`. Otherwise barf.
|
||||
*/
|
||||
struct RetrieveRegularNARSink : ParseSink
|
||||
{
|
||||
bool regular = true;
|
||||
Sink & sink;
|
||||
|
||||
RetrieveRegularNARSink(Sink & sink) : sink(sink) { }
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
/**
|
||||
* A type-erased file handle specific to this particular NARParseVisitor.
|
||||
*/
|
||||
struct FileHandle
|
||||
{
|
||||
regular = false;
|
||||
FileHandle() {}
|
||||
FileHandle(FileHandle const &) = delete;
|
||||
FileHandle & operator=(FileHandle &) = delete;
|
||||
|
||||
/** Puts one block of data into the file */
|
||||
virtual void receiveContents(std::string_view data) { }
|
||||
|
||||
/**
|
||||
* Explicitly closes the file. Further operations may throw an assert.
|
||||
* This exists so that closing can fail and throw an exception without doing so in a destructor.
|
||||
*/
|
||||
virtual void close() { }
|
||||
|
||||
virtual ~FileHandle() = default;
|
||||
};
|
||||
|
||||
virtual void createDirectory(const Path & path) { }
|
||||
|
||||
/**
|
||||
* Creates a regular file in the extraction output with the given size and executable flag.
|
||||
* The size is guaranteed to be the true size of the file.
|
||||
*/
|
||||
[[nodiscard]]
|
||||
virtual std::unique_ptr<FileHandle> createRegularFile(const Path & path, uint64_t size, bool executable)
|
||||
{
|
||||
return std::make_unique<FileHandle>();
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
sink(data);
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
virtual void createSymlink(const Path & path, const std::string & target) { }
|
||||
};
|
||||
|
||||
namespace nar {
|
||||
|
@ -160,8 +162,8 @@ Generator<Entry> parse(Source & source);
|
|||
|
||||
}
|
||||
|
||||
WireFormatGenerator parseAndCopyDump(ParseSink & sink, Source & source);
|
||||
void parseDump(ParseSink & sink, Source & source);
|
||||
WireFormatGenerator parseAndCopyDump(NARParseVisitor & sink, Source & source);
|
||||
void parseDump(NARParseVisitor & sink, Source & source);
|
||||
|
||||
void restorePath(const Path & path, Source & source);
|
||||
|
||||
|
|
|
@ -144,6 +144,7 @@ struct BrotliDecompressionSource : Source
|
|||
std::unique_ptr<char[]> buf;
|
||||
size_t avail_in = 0;
|
||||
const uint8_t * next_in;
|
||||
std::exception_ptr inputEofException = nullptr;
|
||||
|
||||
Source * inner;
|
||||
std::unique_ptr<BrotliDecoderState, void (*)(BrotliDecoderState *)> state;
|
||||
|
@ -167,23 +168,42 @@ struct BrotliDecompressionSource : Source
|
|||
while (len && !BrotliDecoderIsFinished(state.get())) {
|
||||
checkInterrupt();
|
||||
|
||||
while (avail_in == 0) {
|
||||
while (avail_in == 0 && inputEofException == nullptr) {
|
||||
try {
|
||||
avail_in = inner->read(buf.get(), BUF_SIZE);
|
||||
} catch (EndOfFile &) {
|
||||
// No more data, but brotli may still have output remaining
|
||||
// from the last call.
|
||||
inputEofException = std::current_exception();
|
||||
break;
|
||||
}
|
||||
next_in = charptr_cast<const uint8_t *>(buf.get());
|
||||
}
|
||||
|
||||
if (!BrotliDecoderDecompressStream(
|
||||
state.get(), &avail_in, &next_in, &len, &out, nullptr
|
||||
))
|
||||
{
|
||||
BrotliDecoderResult res = BrotliDecoderDecompressStream(
|
||||
state.get(), &avail_in, &next_in, &len, &out, nullptr
|
||||
);
|
||||
|
||||
switch (res) {
|
||||
case BROTLI_DECODER_RESULT_SUCCESS:
|
||||
// We're done here!
|
||||
goto finish;
|
||||
case BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT:
|
||||
// Grab more input. Don't try if we already have exhausted our input stream.
|
||||
if (inputEofException != nullptr) {
|
||||
std::rethrow_exception(inputEofException);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
case BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT:
|
||||
// Need more output space: we can only get another buffer by someone calling us again, so get out.
|
||||
goto finish;
|
||||
case BROTLI_DECODER_RESULT_ERROR:
|
||||
throw CompressionError("error while decompressing brotli file");
|
||||
}
|
||||
}
|
||||
|
||||
finish:
|
||||
if (begin != out) {
|
||||
return out - begin;
|
||||
} else {
|
||||
|
|
|
@ -247,7 +247,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
|||
.tag = Xp::ReplAutomation,
|
||||
.name = "repl-automation",
|
||||
.description = R"(
|
||||
Makes the repl not use readline/editline, print ENQ (U+0005) when ready for a command, and take commands followed by newline.
|
||||
Makes the repl not use editline, print ENQ (U+0005) when ready for a command, and take commands followed by newline.
|
||||
)",
|
||||
},
|
||||
}};
|
||||
|
|
|
@ -18,15 +18,19 @@ namespace fs = std::filesystem;
|
|||
|
||||
namespace nix {
|
||||
|
||||
Path getCwd() {
|
||||
char buf[PATH_MAX];
|
||||
if (!getcwd(buf, sizeof(buf))) {
|
||||
throw SysError("cannot get cwd");
|
||||
}
|
||||
return Path(buf);
|
||||
}
|
||||
|
||||
Path absPath(Path path, std::optional<PathView> dir, bool resolveSymlinks)
|
||||
{
|
||||
if (path.empty() || path[0] != '/') {
|
||||
if (!dir) {
|
||||
char buf[PATH_MAX];
|
||||
if (!getcwd(buf, sizeof(buf))) {
|
||||
throw SysError("cannot get cwd");
|
||||
}
|
||||
path = concatStrings(buf, "/", path);
|
||||
path = concatStrings(getCwd(), "/", path);
|
||||
} else {
|
||||
path = concatStrings(*dir, "/", path);
|
||||
}
|
||||
|
|
|
@ -29,6 +29,13 @@ namespace nix {
|
|||
struct Sink;
|
||||
struct Source;
|
||||
|
||||
/**
|
||||
* Get the current working directory.
|
||||
*
|
||||
* Throw an error if the current directory cannot get got.
|
||||
*/
|
||||
Path getCwd();
|
||||
|
||||
/**
|
||||
* @return An absolutized path, resolving paths relative to the
|
||||
* specified directory, or the current directory otherwise. The path
|
||||
|
@ -203,7 +210,7 @@ inline Paths createDirs(PathView path)
|
|||
}
|
||||
|
||||
/**
|
||||
* Create a symlink.
|
||||
* Create a symlink. Throws if the symlink exists.
|
||||
*/
|
||||
void createSymlink(const Path & target, const Path & link);
|
||||
|
||||
|
|
|
@ -105,6 +105,7 @@ libutil_headers = files(
|
|||
'regex-combinators.hh',
|
||||
'regex.hh',
|
||||
'repair-flag.hh',
|
||||
'result.hh',
|
||||
'serialise.hh',
|
||||
'shlex.hh',
|
||||
'signals.hh',
|
||||
|
|
24
src/libutil/result.hh
Normal file
24
src/libutil/result.hh
Normal file
|
@ -0,0 +1,24 @@
|
|||
#pragma once
|
||||
/// @file
|
||||
|
||||
#include <boost/outcome/std_outcome.hpp>
|
||||
#include <boost/outcome/std_result.hpp>
|
||||
#include <boost/outcome/success_failure.hpp>
|
||||
#include <exception>
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<typename T, typename E = std::exception_ptr>
|
||||
using Result = boost::outcome_v2::std_result<T, E>;
|
||||
|
||||
template<typename T, typename D, typename E = std::exception_ptr>
|
||||
using Outcome = boost::outcome_v2::std_outcome<T, D, E>;
|
||||
|
||||
namespace result {
|
||||
|
||||
using boost::outcome_v2::success;
|
||||
using boost::outcome_v2::failure;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -77,6 +77,11 @@ struct Source
|
|||
* Store up to ‘len’ in the buffer pointed to by ‘data’, and
|
||||
* return the number of bytes stored. It blocks until at least
|
||||
* one byte is available.
|
||||
*
|
||||
* Should not return 0 (generally you want to throw EndOfFile), but nothing
|
||||
* stops that.
|
||||
*
|
||||
* \throws EndOfFile if there is no more data.
|
||||
*/
|
||||
virtual size_t read(char * data, size_t len) = 0;
|
||||
|
||||
|
|
|
@ -200,8 +200,18 @@ std::string showBytes(uint64_t bytes);
|
|||
|
||||
|
||||
/**
|
||||
* Provide an addition operator between strings and string_views
|
||||
* Provide an addition operator between `std::string` and `std::string_view`
|
||||
* inexplicably omitted from the standard library.
|
||||
*
|
||||
* > The reason for this is given in n3512 string_ref: a non-owning reference
|
||||
* to a string, revision 2 by Jeffrey Yasskin:
|
||||
* >
|
||||
* > > I also omitted operator+(basic_string, basic_string_ref) because LLVM
|
||||
* > > returns a lightweight object from this overload and only performs the
|
||||
* > > concatenation lazily. If we define this overload, we'll have a hard time
|
||||
* > > introducing that lightweight concatenation later.
|
||||
*
|
||||
* See: https://stackoverflow.com/a/47735624
|
||||
*/
|
||||
inline std::string operator + (const std::string & s1, std::string_view s2)
|
||||
{
|
||||
|
|
|
@ -353,6 +353,9 @@ void mainWrapped(int argc, char * * argv)
|
|||
argv++; argc--;
|
||||
}
|
||||
|
||||
// Clean up the progress bar if shown using --log-format in a legacy command too.
|
||||
// Otherwise, this is a harmless no-op.
|
||||
Finally f([] { logger->pause(); });
|
||||
{
|
||||
auto legacy = (*RegisterLegacyCommand::commands)[programName];
|
||||
if (legacy) return legacy(argc, argv);
|
||||
|
@ -361,7 +364,6 @@ void mainWrapped(int argc, char * * argv)
|
|||
evalSettings.pureEval = true;
|
||||
|
||||
setLogFormat(LogFormat::bar);
|
||||
Finally f([] { logger->pause(); });
|
||||
settings.verboseBuild = false;
|
||||
// FIXME: stop messing about with log verbosity depending on if it is interactive use
|
||||
if (isatty(STDERR_FILENO)) {
|
||||
|
|
|
@ -82,6 +82,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
|
||||
void run(ref<Store> store, StorePaths && storePaths) override
|
||||
{
|
||||
// Wipe the progress bar to prevent interference with the output.
|
||||
// It's not needed any more because expensive evaluation or builds are already done here.
|
||||
logger->pause();
|
||||
|
||||
size_t pathLen = 0;
|
||||
for (auto & storePath : storePaths)
|
||||
pathLen = std::max(pathLen, store->printStorePath(storePath).size());
|
||||
|
|
|
@ -9,17 +9,10 @@
|
|||
#include <filesystem>
|
||||
|
||||
#include "types.hh"
|
||||
#include "test-data.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* The path to the unit test data directory. See the contributing guide
|
||||
* in the manual for further details.
|
||||
*/
|
||||
static Path getUnitTestData() {
|
||||
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we should update "golden masters" instead of running tests
|
||||
* against them. See the contributing guide in the manual for further
|
||||
|
|
16
tests/unit/libutil-support/tests/test-data.cc
Normal file
16
tests/unit/libutil-support/tests/test-data.cc
Normal file
|
@ -0,0 +1,16 @@
|
|||
#include "test-data.hh"
|
||||
#include "strings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
Path getUnitTestData()
|
||||
{
|
||||
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||
}
|
||||
|
||||
Path getUnitTestDataPath(std::string_view path)
|
||||
{
|
||||
return absPath(getUnitTestData() + "/" + path);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,25 +1,20 @@
|
|||
#pragma once
|
||||
|
||||
#include "environment-variables.hh"
|
||||
#include "types.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// TODO: These helpers should be available in all unit tests.
|
||||
|
||||
/**
|
||||
* The path to the unit test data directory. See the contributing guide
|
||||
* in the manual for further details.
|
||||
*/
|
||||
static Path getUnitTestData() {
|
||||
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||
}
|
||||
Path getUnitTestData();
|
||||
|
||||
/**
|
||||
* Resolve a path under the unit test data directory to an absolute path.
|
||||
*/
|
||||
static Path getUnitTestDataPath(std::string_view path) {
|
||||
return absPath(getUnitTestData() + "/" + path);
|
||||
}
|
||||
Path getUnitTestDataPath(std::string_view path);
|
||||
|
||||
}
|
|
@ -3,105 +3,161 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compress / decompress
|
||||
* --------------------------------------------------------------------------*/
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compress / decompress
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(compress, compressWithUnknownMethod) {
|
||||
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
|
||||
}
|
||||
|
||||
TEST(compress, noneMethodDoesNothingToTheInput) {
|
||||
auto o = compress("none", "this-is-a-test");
|
||||
|
||||
ASSERT_EQ(o, "this-is-a-test");
|
||||
}
|
||||
|
||||
TEST(decompress, decompressNoneCompressed) {
|
||||
auto method = "none";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressEmptyCompressed) {
|
||||
// Empty-method decompression used e.g. by S3 store
|
||||
// (Content-Encoding == "").
|
||||
auto method = "";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressXzCompressed) {
|
||||
auto method = "xz";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBzip2Compressed) {
|
||||
auto method = "bzip2";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBrCompressed) {
|
||||
auto method = "br";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
|
||||
auto method = "bzip2";
|
||||
auto str = "this is a string that does not qualify as valid bzip2 data";
|
||||
|
||||
ASSERT_THROW(decompress(method, str), CompressionError);
|
||||
}
|
||||
|
||||
TEST(decompress, veryLongBrotli) {
|
||||
auto method = "br";
|
||||
auto str = std::string(65536, 'a');
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
// This is just to not print 64k of "a" for most failures
|
||||
ASSERT_EQ(o.length(), str.length());
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compression sinks
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(makeCompressionSink, noneSinkDoesNothingToInput) {
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto sink = makeCompressionSink("none", strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
TEST(makeCompressionSink, compressAndDecompress) {
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
|
||||
StringSink strSink;
|
||||
auto sink = makeCompressionSink("bzip2", strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
StringSource strSource{strSink.s};
|
||||
auto decompressionSource = makeDecompressionSource("bzip2", strSource);
|
||||
|
||||
ASSERT_STREQ(decompressionSource->drain().c_str(), inputString);
|
||||
TEST(compress, compressWithUnknownMethod)
|
||||
{
|
||||
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
|
||||
}
|
||||
|
||||
TEST(compress, noneMethodDoesNothingToTheInput)
|
||||
{
|
||||
auto o = compress("none", "this-is-a-test");
|
||||
|
||||
ASSERT_EQ(o, "this-is-a-test");
|
||||
}
|
||||
|
||||
TEST(decompress, decompressEmptyString)
|
||||
{
|
||||
// Empty-method decompression used e.g. by S3 store
|
||||
// (Content-Encoding == "").
|
||||
auto o = decompress("", "this-is-a-test");
|
||||
|
||||
ASSERT_EQ(o, "this-is-a-test");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compression sinks
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(makeCompressionSink, noneSinkDoesNothingToInput)
|
||||
{
|
||||
auto method = "none";
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto sink = makeCompressionSink(method, strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
/** Tests applied to all compression types */
|
||||
class PerTypeCompressionTest : public testing::TestWithParam<const char *>
|
||||
{};
|
||||
|
||||
/** Tests applied to non-passthrough compression types */
|
||||
class PerTypeNonNullCompressionTest : public testing::TestWithParam<const char *>
|
||||
{};
|
||||
|
||||
constexpr const char * COMPRESSION_TYPES_NONNULL[] = {
|
||||
// libarchive
|
||||
"bzip2",
|
||||
"compress",
|
||||
"gzip",
|
||||
"lzip",
|
||||
"lzma",
|
||||
"xz",
|
||||
"zstd",
|
||||
// Uses external program via libarchive so cannot be used :(
|
||||
/*
|
||||
"grzip",
|
||||
"lrzip",
|
||||
"lzop",
|
||||
"lz4",
|
||||
*/
|
||||
// custom
|
||||
"br",
|
||||
};
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
compressionNonNull, PerTypeNonNullCompressionTest, testing::ValuesIn(COMPRESSION_TYPES_NONNULL)
|
||||
);
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
compressionNonNull, PerTypeCompressionTest, testing::ValuesIn(COMPRESSION_TYPES_NONNULL)
|
||||
);
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
compressionNull, PerTypeCompressionTest, testing::Values("none")
|
||||
);
|
||||
|
||||
/* ---------------------------------------
|
||||
* All compression types
|
||||
* --------------------------------------- */
|
||||
|
||||
TEST_P(PerTypeCompressionTest, roundTrips)
|
||||
{
|
||||
auto method = GetParam();
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST_P(PerTypeCompressionTest, longerThanBuffer)
|
||||
{
|
||||
// This is targeted originally at regression testing a brotli bug, but we might as well do it to
|
||||
// everything
|
||||
auto method = GetParam();
|
||||
auto str = std::string(65536, 'a');
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
// This is just to not print 64k of "a" for most failures
|
||||
ASSERT_EQ(o.length(), str.length());
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST_P(PerTypeCompressionTest, sinkAndSource)
|
||||
{
|
||||
auto method = GetParam();
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
|
||||
StringSink strSink;
|
||||
auto sink = makeCompressionSink(method, strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
StringSource strSource{strSink.s};
|
||||
auto decompressionSource = makeDecompressionSource(method, strSource);
|
||||
|
||||
ASSERT_STREQ(decompressionSource->drain().c_str(), inputString);
|
||||
}
|
||||
|
||||
/* ---------------------------------------
|
||||
* Non null compression types
|
||||
* --------------------------------------- */
|
||||
|
||||
TEST_P(PerTypeNonNullCompressionTest, bogusInputDecompression)
|
||||
{
|
||||
auto param = GetParam();
|
||||
|
||||
auto bogus = "this data is bogus and should throw when decompressing";
|
||||
ASSERT_THROW(decompress(param, bogus), CompressionError);
|
||||
}
|
||||
|
||||
TEST_P(PerTypeNonNullCompressionTest, truncatedValidInput)
|
||||
{
|
||||
auto method = GetParam();
|
||||
|
||||
auto inputString = "the quick brown fox jumps over the lazy doggos";
|
||||
auto compressed = compress(method, inputString);
|
||||
|
||||
/* n.b. This also tests zero-length input, which is also invalid.
|
||||
* As of the writing of this comment, it returns empty output, but is
|
||||
* allowed to throw a compression error instead. */
|
||||
for (int i = 0; i < compressed.length(); ++i) {
|
||||
auto newCompressed = compressed.substr(compressed.length() - i);
|
||||
try {
|
||||
decompress(method, newCompressed);
|
||||
// Success is acceptable as well, even though it is corrupt data.
|
||||
// The compression method is not expected to provide integrity,
|
||||
// just, not break explosively on bad input.
|
||||
} catch (CompressionError &) {
|
||||
// Acceptable
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
#include "config.hh"
|
||||
#include "args.hh"
|
||||
#include "file-system.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "logging.hh"
|
||||
#include "tests/test-data.hh"
|
||||
|
||||
#include <sstream>
|
||||
#include <gtest/gtest.h>
|
||||
|
@ -287,6 +291,35 @@ namespace nix {
|
|||
), Error);
|
||||
}
|
||||
|
||||
TEST(Config, includeRelativePath) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "puppy", "description"};
|
||||
|
||||
config.applyConfig("include puppy.conf", {
|
||||
.path = getUnitTestDataPath("nix.conf")
|
||||
});
|
||||
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["puppy"].value, "doggy");
|
||||
}
|
||||
|
||||
TEST(Config, includeTildePath) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "puppy", "description"};
|
||||
|
||||
config.applyConfig("include ~/puppy.conf", {
|
||||
.path = "/doesnt-exist",
|
||||
.home = getUnitTestData()
|
||||
});
|
||||
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["puppy"].value, "doggy");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigInvalidThrows) {
|
||||
Config config;
|
||||
ASSERT_THROW(config.applyConfig("value == key"), UsageError);
|
||||
|
|
1
tests/unit/libutil/data/puppy.conf
Normal file
1
tests/unit/libutil/data/puppy.conf
Normal file
|
@ -0,0 +1 @@
|
|||
puppy = doggy
|
|
@ -19,6 +19,7 @@ libutil_test_support_sources = files(
|
|||
'libutil-support/tests/cli-literate-parser.cc',
|
||||
'libutil-support/tests/hash.cc',
|
||||
'libutil-support/tests/terminal-code-eater.cc',
|
||||
'libutil-support/tests/test-data.cc',
|
||||
)
|
||||
libutil_test_support = library(
|
||||
'lixutil-test-support',
|
||||
|
@ -95,7 +96,6 @@ libstore_test_support_sources = files(
|
|||
'libstore-support/tests/derived-path.cc',
|
||||
'libstore-support/tests/outputs-spec.cc',
|
||||
'libstore-support/tests/path.cc',
|
||||
'libstore-support/tests/test-data.hh',
|
||||
)
|
||||
|
||||
libstore_test_support = library(
|
||||
|
|
Loading…
Reference in a new issue