forked from lix-project/lix
Merge branch 'path-info' into ca-drv-exotic
This commit is contained in:
commit
08b8657978
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -8,7 +8,7 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
# required to find all branches
|
||||
|
|
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
|
@ -14,10 +14,10 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v2.4.0
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v16
|
||||
- uses: cachix/install-nix-action@v17
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/cachix-action@v10
|
||||
if: needs.check_cachix.outputs.secret == 'true'
|
||||
|
@ -46,11 +46,11 @@ jobs:
|
|||
outputs:
|
||||
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2.4.0
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v16
|
||||
- uses: cachix/install-nix-action@v17
|
||||
- uses: cachix/cachix-action@v10
|
||||
with:
|
||||
name: '${{ env.CACHIX_NAME }}'
|
||||
|
@ -67,9 +67,9 @@ jobs:
|
|||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2.4.0
|
||||
- uses: actions/checkout@v3
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- uses: cachix/install-nix-action@v16
|
||||
- uses: cachix/install-nix-action@v17
|
||||
with:
|
||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||
|
@ -83,10 +83,10 @@ jobs:
|
|||
needs.check_cachix.outputs.secret == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2.4.0
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: cachix/install-nix-action@v16
|
||||
- uses: cachix/install-nix-action@v17
|
||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
|
||||
- uses: cachix/cachix-action@v10
|
||||
|
|
2
.github/workflows/hydra_status.yml
vendored
2
.github/workflows/hydra_status.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
|||
if: github.repository_owner == 'NixOS'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2.4.0
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: bash scripts/check-hydra-status.sh
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -90,6 +90,7 @@ perl/Makefile.config
|
|||
|
||||
/misc/systemd/nix-daemon.service
|
||||
/misc/systemd/nix-daemon.socket
|
||||
/misc/systemd/nix-daemon.conf
|
||||
/misc/upstart/nix-daemon.conf
|
||||
|
||||
/src/resolve-system-dependencies/resolve-system-dependencies
|
||||
|
|
|
@ -6,7 +6,8 @@ options:
|
|||
concatStrings (map
|
||||
(name:
|
||||
let option = options.${name}; in
|
||||
" - `${name}` \n\n"
|
||||
" - [`${name}`](#conf-${name})"
|
||||
+ "<p id=\"conf-${name}\"></p>\n\n"
|
||||
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
|
||||
+ (if option.documentDefault
|
||||
then " **Default:** " + (
|
||||
|
|
|
@ -72,6 +72,7 @@
|
|||
- [CLI guideline](contributing/cli-guideline.md)
|
||||
- [Release Notes](release-notes/release-notes.md)
|
||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||
- [Release 2.8 (2022-04-19)](release-notes/rl-2.8.md)
|
||||
- [Release 2.7 (2022-03-07)](release-notes/rl-2.7.md)
|
||||
- [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md)
|
||||
- [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Release X.Y (2022-03-07)
|
||||
# Release 2.7 (2022-03-07)
|
||||
|
||||
* Nix will now make some helpful suggestions when you mistype
|
||||
something on the command line. For instance, if you type `nix build
|
||||
|
|
53
doc/manual/src/release-notes/rl-2.8.md
Normal file
53
doc/manual/src/release-notes/rl-2.8.md
Normal file
|
@ -0,0 +1,53 @@
|
|||
# Release 2.8 (2022-04-19)
|
||||
|
||||
* New experimental command: `nix fmt`, which applies a formatter
|
||||
defined by the `formatter.<system>` flake output to the Nix
|
||||
expressions in a flake.
|
||||
|
||||
* Various Nix commands can now read expressions from standard input
|
||||
using `--file -`.
|
||||
|
||||
* New experimental builtin function `builtins.fetchClosure` that
|
||||
copies a closure from a binary cache at evaluation time and rewrites
|
||||
it to content-addressed form (if it isn't already). Like
|
||||
`builtins.storePath`, this allows importing pre-built store paths;
|
||||
the difference is that it doesn't require the user to configure
|
||||
binary caches and trusted public keys.
|
||||
|
||||
This function is only available if you enable the experimental
|
||||
feature `fetch-closure`.
|
||||
|
||||
* New experimental feature: *impure derivations*. These are
|
||||
derivations that can produce a different result every time they're
|
||||
built. Here is an example:
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "impure";
|
||||
__impure = true; # marks this derivation as impure
|
||||
buildCommand = "date > $out";
|
||||
}
|
||||
```
|
||||
|
||||
Running `nix build` twice on this expression will build the
|
||||
derivation twice, producing two different content-addressed store
|
||||
paths. Like fixed-output derivations, impure derivations have access
|
||||
to the network. Only fixed-output derivations and impure derivations
|
||||
can depend on an impure derivation.
|
||||
|
||||
* `nix store make-content-addressable` has been renamed to `nix store
|
||||
make-content-addressed`.
|
||||
|
||||
* The `nixosModule` flake output attribute has been renamed consistent
|
||||
with the `.default` renames in Nix 2.7.
|
||||
|
||||
* `nixosModule` → `nixosModules.default`
|
||||
|
||||
As before, the old output will continue to work, but `nix flake check` will
|
||||
issue a warning about it.
|
||||
|
||||
* `nix run` is now stricter in what it accepts: members of the `apps`
|
||||
flake output are now required to be apps (as defined in [the
|
||||
manual](https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-run.html#apps)),
|
||||
and members of `packages` or `legacyPackages` must be derivations
|
||||
(not apps).
|
|
@ -1,16 +1 @@
|
|||
# Release X.Y (202?-??-??)
|
||||
|
||||
* Various nix commands can now read expressions from stdin with `--file -`.
|
||||
|
||||
* `nix store make-content-addressable` has been renamed to `nix store
|
||||
make-content-addressed`.
|
||||
|
||||
* New experimental builtin function `builtins.fetchClosure` that
|
||||
copies a closure from a binary cache at evaluation time and rewrites
|
||||
it to content-addressed form (if it isn't already). Like
|
||||
`builtins.storePath`, this allows importing pre-built store paths;
|
||||
the difference is that it doesn't require the user to configure
|
||||
binary caches and trusted public keys.
|
||||
|
||||
This function is only available if you enable the experimental
|
||||
feature `fetch-closure`.
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1632864508,
|
||||
"narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=",
|
||||
"lastModified": 1645296114,
|
||||
"narHash": "sha256-y53N7TyIkXsjMpOG7RhvqJFGDacLs9HlyHeSTBioqYU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "82891b5e2c2359d7e58d08849e4c89511ab94234",
|
||||
"rev": "530a53dcbc9437363471167a5e4762c5fcfa34a1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
|
@ -15,7 +15,7 @@ function _complete_nix {
|
|||
else
|
||||
COMPREPLY+=("$completion")
|
||||
fi
|
||||
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]/#\~/$HOME}" 2>/dev/null)
|
||||
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}" 2>/dev/null)
|
||||
__ltrim_colon_completions "$cur"
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ Description=Nix Daemon
|
|||
Documentation=man:nix-daemon https://nixos.org/manual
|
||||
RequiresMountsFor=@storedir@
|
||||
RequiresMountsFor=@localstatedir@
|
||||
RequiresMountsFor=@localstatedir@/nix/db
|
||||
ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
|
||||
|
||||
[Service]
|
||||
|
|
|
@ -14,7 +14,7 @@ curl -sS -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master
|
|||
someBuildFailed=0
|
||||
|
||||
for buildId in $BUILDS_FOR_LATEST_EVAL; do
|
||||
buildInfo=$(curl -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId")
|
||||
buildInfo=$(curl --fail -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId")
|
||||
|
||||
finished=$(echo "$buildInfo" | jq -r '.finished')
|
||||
|
||||
|
|
|
@ -423,6 +423,18 @@ EOF
|
|||
fi
|
||||
done
|
||||
|
||||
if [ "$(uname -s)" = "Linux" ] && [ ! -e /run/systemd/system ]; then
|
||||
warning <<EOF
|
||||
We did not detect systemd on your system. With a multi-user install
|
||||
without systemd you will have to manually configure your init system to
|
||||
launch the Nix daemon after installation.
|
||||
EOF
|
||||
if ! ui_confirm "Do you want to proceed with a multi-user installation?"; then
|
||||
failure <<EOF
|
||||
You have aborted the installation.
|
||||
EOF
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
setup_report() {
|
||||
|
|
|
@ -90,7 +90,7 @@ poly_configure_nix_daemon_service() {
|
|||
ln -sfn /nix/var/nix/profiles/default/$TMPFILES_SRC $TMPFILES_DEST
|
||||
|
||||
_sudo "to run systemd-tmpfiles once to pick that path up" \
|
||||
sytemd-tmpfiles create --prefix=/nix/var/nix
|
||||
systemd-tmpfiles --create --prefix=/nix/var/nix
|
||||
|
||||
_sudo "to set up the nix-daemon service" \
|
||||
systemctl link "/nix/var/nix/profiles/default$SERVICE_SRC"
|
||||
|
|
|
@ -82,7 +82,7 @@ if [ "$(uname -s)" != "Darwin" ]; then
|
|||
fi
|
||||
|
||||
if command -v curl > /dev/null 2>&1; then
|
||||
fetch() { curl -L "$1" -o "$2"; }
|
||||
fetch() { curl --fail -L "$1" -o "$2"; }
|
||||
elif command -v wget > /dev/null 2>&1; then
|
||||
fetch() { wget "$1" -O "$2"; }
|
||||
else
|
||||
|
|
|
@ -85,11 +85,12 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
|
|||
{
|
||||
std::optional<Path> file;
|
||||
std::optional<std::string> expr;
|
||||
bool readOnlyMode = false;
|
||||
|
||||
// FIXME: move this; not all commands (e.g. 'nix run') use it.
|
||||
OperateOn operateOn = OperateOn::Output;
|
||||
|
||||
SourceExprCommand();
|
||||
SourceExprCommand(bool supportReadOnlyMode = false);
|
||||
|
||||
std::vector<std::shared_ptr<Installable>> parseInstallables(
|
||||
ref<Store> store, std::vector<std::string> ss);
|
||||
|
@ -128,13 +129,13 @@ struct InstallableCommand : virtual Args, SourceExprCommand
|
|||
{
|
||||
std::shared_ptr<Installable> installable;
|
||||
|
||||
InstallableCommand();
|
||||
InstallableCommand(bool supportReadOnlyMode = false);
|
||||
|
||||
void prepare() override;
|
||||
|
||||
std::optional<FlakeRef> getFlakeRefForCompletion() override
|
||||
{
|
||||
return parseFlakeRef(_installable, absPath("."));
|
||||
return parseFlakeRefWithFragment(_installable, absPath(".")).first;
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "registry.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
#include "store-api.hh"
|
||||
#include "command.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -59,6 +60,9 @@ MixEvalArgs::MixEvalArgs()
|
|||
fetchers::Attrs extraAttrs;
|
||||
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
|
||||
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
completeFlakeRef(openStore(), prefix);
|
||||
}}
|
||||
});
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
#include "globals.hh"
|
||||
#include "installables.hh"
|
||||
#include "util.hh"
|
||||
#include "command.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "common-eval-args.hh"
|
||||
|
@ -99,6 +101,14 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
parseFlakeRef(flakeRef, absPath("."), true));
|
||||
}},
|
||||
.completer = {[&](size_t n, std::string_view prefix) {
|
||||
if (n == 0) {
|
||||
if (auto flakeRef = getFlakeRefForCompletion())
|
||||
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
|
||||
} else if (n == 1) {
|
||||
completeFlakeRef(getEvalState()->store, prefix);
|
||||
}
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -129,7 +139,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
});
|
||||
}
|
||||
|
||||
SourceExprCommand::SourceExprCommand()
|
||||
SourceExprCommand::SourceExprCommand(bool supportReadOnlyMode)
|
||||
{
|
||||
addFlag({
|
||||
.longName = "file",
|
||||
|
@ -157,6 +167,17 @@ SourceExprCommand::SourceExprCommand()
|
|||
.category = installablesCategory,
|
||||
.handler = {&operateOn, OperateOn::Derivation},
|
||||
});
|
||||
|
||||
if (supportReadOnlyMode) {
|
||||
addFlag({
|
||||
.longName = "read-only",
|
||||
.description =
|
||||
"Do not instantiate each evaluated derivation. "
|
||||
"This improves performance, but can cause errors when accessing "
|
||||
"store paths of derivations during evaluation.",
|
||||
.handler = {&readOnlyMode, true},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
||||
|
@ -182,6 +203,8 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
|||
void SourceExprCommand::completeInstallable(std::string_view prefix)
|
||||
{
|
||||
if (file) {
|
||||
completionType = ctAttrs;
|
||||
|
||||
evalSettings.pureEval = false;
|
||||
auto state = getEvalState();
|
||||
Expr *e = state->parseExprFromFile(
|
||||
|
@ -210,13 +233,14 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
Value v2;
|
||||
state->autoCallFunction(*autoArgs, v1, v2);
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
if (v2.type() == nAttrs) {
|
||||
for (auto & i : *v2.attrs) {
|
||||
std::string name = i.name;
|
||||
if (name.find(searchWord) == 0) {
|
||||
completions->add(i.name);
|
||||
if (prefix_ == "")
|
||||
completions->add(name);
|
||||
else
|
||||
completions->add(prefix_ + "." + name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -244,10 +268,11 @@ void completeFlakeRefWithFragment(
|
|||
if (hash == std::string::npos) {
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
} else {
|
||||
completionType = ctAttrs;
|
||||
|
||||
auto fragment = prefix.substr(hash + 1);
|
||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||
// FIXME: do tilde expansion.
|
||||
auto flakeRef = parseFlakeRef(flakeRefS, absPath("."));
|
||||
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
|
||||
|
||||
auto evalCache = openEvalCache(*evalState,
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)));
|
||||
|
@ -259,8 +284,6 @@ void completeFlakeRefWithFragment(
|
|||
flake. */
|
||||
attrPathPrefixes.push_back("");
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
for (auto & attrPathPrefixS : attrPathPrefixes) {
|
||||
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
|
||||
auto attrPathS = attrPathPrefixS + std::string(fragment);
|
||||
|
@ -334,16 +357,16 @@ DerivedPath Installable::toDerivedPath()
|
|||
return std::move(buildables[0]);
|
||||
}
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
Installable::getCursors(EvalState & state)
|
||||
{
|
||||
auto evalCache =
|
||||
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
|
||||
[&]() { return toValue(state).first; });
|
||||
return {{evalCache->getRoot(), ""}};
|
||||
return {evalCache->getRoot()};
|
||||
}
|
||||
|
||||
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
|
||||
ref<eval_cache::AttrCursor>
|
||||
Installable::getCursor(EvalState & state)
|
||||
{
|
||||
auto cursors = getCursors(state);
|
||||
|
@ -566,43 +589,21 @@ InstallableFlake::InstallableFlake(
|
|||
|
||||
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
|
||||
{
|
||||
auto lockedFlake = getLockedFlake();
|
||||
auto attr = getCursor(*state);
|
||||
|
||||
auto cache = openEvalCache(*state, lockedFlake);
|
||||
auto root = cache->getRoot();
|
||||
auto attrPath = attr->getAttrPathStr();
|
||||
|
||||
Suggestions suggestions;
|
||||
if (!attr->isDerivation())
|
||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
debug("trying flake output attribute '%s'", attrPath);
|
||||
auto drvPath = attr->forceDerivation();
|
||||
|
||||
auto attrOrSuggestions = root->findAlongAttrPath(
|
||||
parseAttrPath(*state, attrPath),
|
||||
true
|
||||
);
|
||||
auto drvInfo = DerivationInfo {
|
||||
std::move(drvPath),
|
||||
attr->getAttr(state->sOutputName)->getString()
|
||||
};
|
||||
|
||||
if (!attrOrSuggestions) {
|
||||
suggestions += attrOrSuggestions.getSuggestions();
|
||||
continue;
|
||||
}
|
||||
|
||||
auto attr = *attrOrSuggestions;
|
||||
|
||||
if (!attr->isDerivation())
|
||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
||||
|
||||
auto drvPath = attr->forceDerivation();
|
||||
|
||||
auto drvInfo = DerivationInfo {
|
||||
std::move(drvPath),
|
||||
attr->getAttr(state->sOutputName)->getString()
|
||||
};
|
||||
|
||||
return {attrPath, lockedFlake->flake.lockedRef, std::move(drvInfo)};
|
||||
}
|
||||
|
||||
throw Error(suggestions, "flake '%s' does not provide attribute %s",
|
||||
flakeRef, showAttrPaths(getActualAttrPaths()));
|
||||
return {attrPath, getLockedFlake()->flake.lockedRef, std::move(drvInfo)};
|
||||
}
|
||||
|
||||
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
|
||||
|
@ -614,33 +615,10 @@ std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
|
|||
|
||||
std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
|
||||
{
|
||||
auto lockedFlake = getLockedFlake();
|
||||
|
||||
auto vOutputs = getFlakeOutputs(state, *lockedFlake);
|
||||
|
||||
auto emptyArgs = state.allocBindings(0);
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
try {
|
||||
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
||||
state.forceValue(*v, pos);
|
||||
return {v, pos};
|
||||
} catch (AttrPathNotFound & e) {
|
||||
suggestions += e.info().suggestions;
|
||||
}
|
||||
}
|
||||
|
||||
throw Error(
|
||||
suggestions,
|
||||
"flake '%s' does not provide attribute %s",
|
||||
flakeRef,
|
||||
showAttrPaths(getActualAttrPaths())
|
||||
);
|
||||
return {&getCursor(state)->forceValue(), noPos};
|
||||
}
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
InstallableFlake::getCursors(EvalState & state)
|
||||
{
|
||||
auto evalCache = openEvalCache(state,
|
||||
|
@ -648,21 +626,55 @@ InstallableFlake::getCursors(EvalState & state)
|
|||
|
||||
auto root = evalCache->getRoot();
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> res;
|
||||
std::vector<ref<eval_cache::AttrCursor>> res;
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
auto attr = root->findAlongAttrPath(parseAttrPath(state, attrPath));
|
||||
if (attr) res.push_back({*attr, attrPath});
|
||||
if (attr) res.push_back(ref(*attr));
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
ref<eval_cache::AttrCursor> InstallableFlake::getCursor(EvalState & state)
|
||||
{
|
||||
auto lockedFlake = getLockedFlake();
|
||||
|
||||
auto cache = openEvalCache(state, lockedFlake);
|
||||
auto root = cache->getRoot();
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
auto attrPaths = getActualAttrPaths();
|
||||
|
||||
for (auto & attrPath : attrPaths) {
|
||||
debug("trying flake output attribute '%s'", attrPath);
|
||||
|
||||
auto attrOrSuggestions = root->findAlongAttrPath(
|
||||
parseAttrPath(state, attrPath),
|
||||
true
|
||||
);
|
||||
|
||||
if (!attrOrSuggestions) {
|
||||
suggestions += attrOrSuggestions.getSuggestions();
|
||||
continue;
|
||||
}
|
||||
|
||||
return *attrOrSuggestions;
|
||||
}
|
||||
|
||||
throw Error(
|
||||
suggestions,
|
||||
"flake '%s' does not provide attribute %s",
|
||||
flakeRef,
|
||||
showAttrPaths(attrPaths));
|
||||
}
|
||||
|
||||
std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
|
||||
{
|
||||
flake::LockFlags lockFlagsApplyConfig = lockFlags;
|
||||
lockFlagsApplyConfig.applyNixConfig = true;
|
||||
if (!_lockedFlake) {
|
||||
flake::LockFlags lockFlagsApplyConfig = lockFlags;
|
||||
lockFlagsApplyConfig.applyNixConfig = true;
|
||||
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlagsApplyConfig));
|
||||
}
|
||||
return _lockedFlake;
|
||||
|
@ -687,6 +699,10 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
{
|
||||
std::vector<std::shared_ptr<Installable>> result;
|
||||
|
||||
if (readOnlyMode) {
|
||||
settings.readOnlyMode = true;
|
||||
}
|
||||
|
||||
if (file || expr) {
|
||||
if (file && expr)
|
||||
throw UsageError("'--file' and '--expr' are exclusive");
|
||||
|
@ -756,55 +772,20 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
|
|||
return installables.front();
|
||||
}
|
||||
|
||||
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
|
||||
BuiltPaths Installable::build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
BuildMode bMode)
|
||||
{
|
||||
BuiltPaths res;
|
||||
for (const auto & b : hopefullyBuiltPaths)
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(BuiltPath::Opaque{bo.path});
|
||||
},
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
||||
for (auto & output : bfd.outputs) {
|
||||
if (!outputHashes.count(output))
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
auto outputId =
|
||||
DrvOutput{outputHashes.at(output), output};
|
||||
auto realisation =
|
||||
store->queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw Error(
|
||||
"cannot operate on an output of unbuilt "
|
||||
"content-addressed derivation '%s'",
|
||||
outputId.to_string());
|
||||
outputs.insert_or_assign(
|
||||
output, realisation->outPath);
|
||||
} else {
|
||||
// If ca-derivations isn't enabled, assume that
|
||||
// the output path is statically known.
|
||||
assert(drvOutputs.count(output));
|
||||
assert(drvOutputs.at(output).second);
|
||||
outputs.insert_or_assign(
|
||||
output, *drvOutputs.at(output).second);
|
||||
}
|
||||
}
|
||||
res.push_back(BuiltPath::Built{bfd.drvPath, outputs});
|
||||
},
|
||||
},
|
||||
b.raw());
|
||||
|
||||
for (auto & [_, builtPath] : build2(evalStore, store, mode, installables, bMode))
|
||||
res.push_back(builtPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
BuiltPaths Installable::build(
|
||||
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> Installable::build2(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
|
@ -815,39 +796,93 @@ BuiltPaths Installable::build(
|
|||
settings.readOnlyMode = true;
|
||||
|
||||
std::vector<DerivedPath> pathsToBuild;
|
||||
std::map<DerivedPath, std::vector<std::shared_ptr<Installable>>> backmap;
|
||||
|
||||
for (auto & i : installables) {
|
||||
auto b = i->toDerivedPaths();
|
||||
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
|
||||
for (auto b : i->toDerivedPaths()) {
|
||||
pathsToBuild.push_back(b);
|
||||
backmap[b].push_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> res;
|
||||
|
||||
switch (mode) {
|
||||
|
||||
case Realise::Nothing:
|
||||
case Realise::Derivation:
|
||||
printMissing(store, pathsToBuild, lvlError);
|
||||
return getBuiltPaths(evalStore, store, pathsToBuild);
|
||||
|
||||
for (auto & path : pathsToBuild) {
|
||||
for (auto & installable : backmap[path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
auto drvOutputs = drv.outputsAndOptPaths(*store);
|
||||
for (auto & output : bfd.outputs) {
|
||||
if (!outputHashes.count(output))
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
DrvOutput outputId { outputHashes.at(output), output };
|
||||
auto realisation = store->queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw Error(
|
||||
"cannot operate on an output of unbuilt "
|
||||
"content-addressed derivation '%s'",
|
||||
outputId.to_string());
|
||||
outputs.insert_or_assign(output, realisation->outPath);
|
||||
} else {
|
||||
// If ca-derivations isn't enabled, assume that
|
||||
// the output path is statically known.
|
||||
assert(drvOutputs.count(output));
|
||||
assert(drvOutputs.at(output).second);
|
||||
outputs.insert_or_assign(
|
||||
output, *drvOutputs.at(output).second);
|
||||
}
|
||||
}
|
||||
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({installable, BuiltPath::Opaque { bo.path }});
|
||||
},
|
||||
}, path.raw());
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case Realise::Outputs: {
|
||||
BuiltPaths res;
|
||||
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
||||
if (!buildResult.success())
|
||||
buildResult.rethrow();
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & path : buildResult.builtOutputs)
|
||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||
res.push_back(BuiltPath::Built { bfd.drvPath, outputs });
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(BuiltPath::Opaque { bo.path });
|
||||
},
|
||||
}, buildResult.path.raw());
|
||||
|
||||
for (auto & installable : backmap[buildResult.path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & path : buildResult.builtOutputs)
|
||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||
res.push_back({installable, BuiltPath::Built { bfd.drvPath, outputs }});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({installable, BuiltPath::Opaque { bo.path }});
|
||||
},
|
||||
}, buildResult.path.raw());
|
||||
}
|
||||
}
|
||||
return res;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
BuiltPaths Installable::toBuiltPaths(
|
||||
|
@ -935,7 +970,7 @@ InstallablesCommand::InstallablesCommand()
|
|||
void InstallablesCommand::prepare()
|
||||
{
|
||||
if (_installables.empty() && useDefaultInstallables())
|
||||
// FIXME: commands like "nix install" should not have a
|
||||
// FIXME: commands like "nix profile install" should not have a
|
||||
// default, probably.
|
||||
_installables.push_back(".");
|
||||
installables = parseInstallables(getStore(), _installables);
|
||||
|
@ -945,13 +980,14 @@ std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
|
|||
{
|
||||
if (_installables.empty()) {
|
||||
if (useDefaultInstallables())
|
||||
return parseFlakeRef(".", absPath("."));
|
||||
return parseFlakeRefWithFragment(".", absPath(".")).first;
|
||||
return {};
|
||||
}
|
||||
return parseFlakeRef(_installables.front(), absPath("."));
|
||||
return parseFlakeRefWithFragment(_installables.front(), absPath(".")).first;
|
||||
}
|
||||
|
||||
InstallableCommand::InstallableCommand()
|
||||
InstallableCommand::InstallableCommand(bool supportReadOnlyMode)
|
||||
: SourceExprCommand(supportReadOnlyMode)
|
||||
{
|
||||
expectArgs({
|
||||
.label = "installable",
|
||||
|
|
|
@ -80,10 +80,10 @@ struct Installable
|
|||
return {};
|
||||
}
|
||||
|
||||
virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||
virtual std::vector<ref<eval_cache::AttrCursor>>
|
||||
getCursors(EvalState & state);
|
||||
|
||||
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
|
||||
virtual ref<eval_cache::AttrCursor>
|
||||
getCursor(EvalState & state);
|
||||
|
||||
virtual FlakeRef nixpkgsFlakeRef() const
|
||||
|
@ -98,6 +98,13 @@ struct Installable
|
|||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
static std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> build2(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
static std::set<StorePath> toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
|
@ -173,9 +180,15 @@ struct InstallableFlake : InstallableValue
|
|||
|
||||
std::pair<Value *, Pos> toValue(EvalState & state) override;
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||
/* Get a cursor to every attrpath in getActualAttrPaths() that
|
||||
exists. */
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
getCursors(EvalState & state) override;
|
||||
|
||||
/* Get a cursor to the first attrpath in getActualAttrPaths() that
|
||||
exists, or throw an exception with suggestions if none exists. */
|
||||
ref<eval_cache::AttrCursor> getCursor(EvalState & state) override;
|
||||
|
||||
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
|
||||
|
||||
FlakeRef nixpkgsFlakeRef() const override;
|
||||
|
@ -185,9 +198,4 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
EvalState & state,
|
||||
std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||
|
||||
BuiltPaths getBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
const DerivedPaths & hopefullyBuiltPaths);
|
||||
|
||||
}
|
||||
|
|
|
@ -306,9 +306,9 @@ Value * EvalCache::getRootValue()
|
|||
return *value;
|
||||
}
|
||||
|
||||
std::shared_ptr<AttrCursor> EvalCache::getRoot()
|
||||
ref<AttrCursor> EvalCache::getRoot()
|
||||
{
|
||||
return std::make_shared<AttrCursor>(ref(shared_from_this()), std::nullopt);
|
||||
return make_ref<AttrCursor>(ref(shared_from_this()), std::nullopt);
|
||||
}
|
||||
|
||||
AttrCursor::AttrCursor(
|
||||
|
|
|
@ -33,7 +33,7 @@ public:
|
|||
EvalState & state,
|
||||
RootLoader rootLoader);
|
||||
|
||||
std::shared_ptr<AttrCursor> getRoot();
|
||||
ref<AttrCursor> getRoot();
|
||||
};
|
||||
|
||||
enum AttrType {
|
||||
|
@ -104,6 +104,8 @@ public:
|
|||
|
||||
ref<AttrCursor> getAttr(std::string_view name);
|
||||
|
||||
/* Get an attribute along a chain of attrsets. Note that this does
|
||||
not auto-call functors or functions. */
|
||||
OrSuggestions<ref<AttrCursor>> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false);
|
||||
|
||||
std::string getString();
|
||||
|
|
|
@ -436,6 +436,7 @@ EvalState::EvalState(
|
|||
, sBuilder(symbols.create("builder"))
|
||||
, sArgs(symbols.create("args"))
|
||||
, sContentAddressed(symbols.create("__contentAddressed"))
|
||||
, sImpure(symbols.create("__impure"))
|
||||
, sOutputHash(symbols.create("outputHash"))
|
||||
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
|
||||
, sOutputHashMode(symbols.create("outputHashMode"))
|
||||
|
|
|
@ -78,7 +78,7 @@ public:
|
|||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
|
||||
sContentAddressed,
|
||||
sContentAddressed, sImpure,
|
||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||
sRecurseForDerivations,
|
||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
|
|
|
@ -989,9 +989,10 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
PathSet context;
|
||||
|
||||
bool contentAddressed = false;
|
||||
bool isImpure = false;
|
||||
std::optional<std::string> outputHash;
|
||||
std::string outputHashAlgo;
|
||||
ContentAddressMethod ingestionMethod = FileIngestionMethod::Flat;
|
||||
std::optional<ContentAddressMethod> ingestionMethod;
|
||||
|
||||
StringSet outputs;
|
||||
outputs.insert("out");
|
||||
|
@ -1052,6 +1053,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
}
|
||||
|
||||
else if (i->name == state.sImpure) {
|
||||
isImpure = state.forceBool(*i->value, pos);
|
||||
if (isImpure)
|
||||
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
||||
}
|
||||
|
||||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
else if (i->name == state.sArgs) {
|
||||
|
@ -1187,12 +1194,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
std::optional<HashType> ht = parseHashTypeOpt(outputHashAlgo);
|
||||
Hash h = newHashAllowEmpty(*outputHash, ht);
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
|
||||
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
|
||||
// FIXME non-trivial fixed refs set
|
||||
auto ca = contentAddressFromMethodHashAndRefs(
|
||||
ingestionMethod,
|
||||
method,
|
||||
std::move(h),
|
||||
{});
|
||||
|
||||
|
@ -1202,15 +1209,30 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
drv.outputs.insert_or_assign("out", dof);
|
||||
}
|
||||
|
||||
else if (contentAddressed) {
|
||||
HashType ht = parseHashType(outputHashAlgo);
|
||||
else if (contentAddressed || isImpure) {
|
||||
if (contentAddressed && isImpure)
|
||||
throw EvalError({
|
||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||
|
||||
for (auto & i : outputs) {
|
||||
drv.env[i] = hashPlaceholder(i);
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::CAFloating {
|
||||
.method = ingestionMethod,
|
||||
.hashType = ht,
|
||||
});
|
||||
if (isImpure)
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::Impure {
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
});
|
||||
else
|
||||
drv.outputs.insert_or_assign(i,
|
||||
DerivationOutput::CAFloating {
|
||||
.method = method,
|
||||
.hashType = ht,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1227,34 +1249,26 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
DerivationOutput::Deferred { });
|
||||
}
|
||||
|
||||
// Regular, non-CA derivation should always return a single hash and not
|
||||
// hash per output.
|
||||
auto hashModulo = hashDerivationModulo(*state.store, drv, true);
|
||||
std::visit(overloaded {
|
||||
[&](const DrvHash & drvHash) {
|
||||
auto & h = drvHash.hash;
|
||||
switch (drvHash.kind) {
|
||||
case DrvHash::Kind::Deferred:
|
||||
/* Outputs already deferred, nothing to do */
|
||||
break;
|
||||
case DrvHash::Kind::Regular:
|
||||
for (auto & [outputName, output] : drv.outputs) {
|
||||
auto outPath = state.store->makeOutputPath(outputName, h, drvName);
|
||||
drv.env[outputName] = state.store->printStorePath(outPath);
|
||||
output = DerivationOutput::InputAddressed {
|
||||
.path = std::move(outPath),
|
||||
};
|
||||
}
|
||||
break;
|
||||
}
|
||||
},
|
||||
[&](const CaOutputHashes &) {
|
||||
// Shouldn't happen as the toplevel derivation is not CA.
|
||||
assert(false);
|
||||
},
|
||||
},
|
||||
hashModulo.raw());
|
||||
|
||||
auto hashModulo = hashDerivationModulo(*state.store, Derivation(drv), true);
|
||||
switch (hashModulo.kind) {
|
||||
case DrvHash::Kind::Regular:
|
||||
for (auto & i : outputs) {
|
||||
auto h = hashModulo.hashes.at(i);
|
||||
auto outPath = state.store->makeOutputPath(i, h, drvName);
|
||||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign(
|
||||
i,
|
||||
DerivationOutputInputAddressed {
|
||||
.path = std::move(outPath),
|
||||
});
|
||||
}
|
||||
break;
|
||||
;
|
||||
case DrvHash::Kind::Deferred:
|
||||
for (auto & i : outputs) {
|
||||
drv.outputs.insert_or_assign(i, DerivationOutputDeferred {});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Write the resulting term into the Nix store directory. */
|
||||
|
|
|
@ -61,6 +61,12 @@ static void prim_fetchClosure(EvalState & state, const Pos & pos, Value * * args
|
|||
.errPos = pos
|
||||
});
|
||||
|
||||
if (!parsedURL.query.empty())
|
||||
throw Error({
|
||||
.msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
auto fromStore = openStore(parsedURL.to_string());
|
||||
|
||||
if (toCA) {
|
||||
|
@ -87,7 +93,8 @@ static void prim_fetchClosure(EvalState & state, const Pos & pos, Value * * args
|
|||
});
|
||||
}
|
||||
} else {
|
||||
copyClosure(*fromStore, *state.store, RealisedPath::Set { *fromPath });
|
||||
if (!state.store->isValidPath(*fromPath))
|
||||
copyClosure(*fromStore, *state.store, RealisedPath::Set { *fromPath });
|
||||
toPath = fromPath;
|
||||
}
|
||||
|
||||
|
|
|
@ -84,7 +84,8 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
||||
.errPos = v.determinePos(pos)
|
||||
});
|
||||
throw e.addTrace(pos, hintfmt("message for the trace"));
|
||||
e.addTrace(pos, hintfmt("message for the trace"));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -244,9 +244,18 @@ std::optional<std::string> Input::getRef() const
|
|||
|
||||
std::optional<Hash> Input::getRev() const
|
||||
{
|
||||
if (auto s = maybeGetStrAttr(attrs, "rev"))
|
||||
return Hash::parseAny(*s, htSHA1);
|
||||
return {};
|
||||
std::optional<Hash> hash = {};
|
||||
|
||||
if (auto s = maybeGetStrAttr(attrs, "rev")) {
|
||||
try {
|
||||
hash = Hash::parseAnyPrefixed(*s);
|
||||
} catch (BadHash &e) {
|
||||
// Default to sha1 for backwards compatibility with existing flakes
|
||||
hash = Hash::parseAny(*s, htSHA1);
|
||||
}
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
std::optional<uint64_t> Input::getRevCount() const
|
||||
|
|
|
@ -28,9 +28,7 @@ static std::string readHead(const Path & path)
|
|||
|
||||
static bool isNotDotGitDirectory(const Path & path)
|
||||
{
|
||||
static const std::regex gitDirRegex("^(?:.*/)?\\.git$");
|
||||
|
||||
return not std::regex_match(path, gitDirRegex);
|
||||
return baseNameOf(path) != ".git";
|
||||
}
|
||||
|
||||
struct GitInputScheme : InputScheme
|
||||
|
@ -189,8 +187,16 @@ struct GitInputScheme : InputScheme
|
|||
if (submodules) cacheType += "-submodules";
|
||||
if (allRefs) cacheType += "-all-refs";
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
|
||||
};
|
||||
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
checkHashType(input.getRev());
|
||||
|
||||
return Attrs({
|
||||
{"type", cacheType},
|
||||
{"name", name},
|
||||
|
@ -285,9 +291,11 @@ struct GitInputScheme : InputScheme
|
|||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("git", true, gitOpts), "\0"s);
|
||||
|
||||
Path actualPath(absPath(actualUrl));
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, actualUrl));
|
||||
std::string file(p, actualUrl.size() + 1);
|
||||
assert(hasPrefix(p, actualPath));
|
||||
std::string file(p, actualPath.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
|
@ -300,13 +308,13 @@ struct GitInputScheme : InputScheme
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
// FIXME: maybe we should use the timestamp of the last
|
||||
// modified dirty file?
|
||||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
hasHead ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||
hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
|
|
|
@ -178,9 +178,11 @@ struct MercurialInputScheme : InputScheme
|
|||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runHg({ "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
||||
|
||||
Path actualPath(absPath(actualUrl));
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, actualUrl));
|
||||
std::string file(p, actualUrl.size() + 1);
|
||||
assert(hasPrefix(p, actualPath));
|
||||
std::string file(p, actualPath.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
|
@ -193,7 +195,7 @@ struct MercurialInputScheme : InputScheme
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
|
@ -201,8 +203,17 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && hash->type != htSHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
|
||||
};
|
||||
|
||||
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
checkHashType(input.getRev());
|
||||
|
||||
return Attrs({
|
||||
{"type", "hg"},
|
||||
{"name", name},
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "realisation.hh"
|
||||
#include "derived-path.hh"
|
||||
|
||||
#include <string>
|
||||
#include <chrono>
|
||||
|
@ -30,6 +31,8 @@ struct BuildResult
|
|||
ResolvesToAlreadyValid,
|
||||
NoSubstituters,
|
||||
} status = MiscFailure;
|
||||
|
||||
// FIXME: include entire ErrorInfo object.
|
||||
std::string errorMsg;
|
||||
|
||||
std::string toString() const {
|
||||
|
|
|
@ -204,9 +204,34 @@ void DerivationGoal::haveDerivation()
|
|||
{
|
||||
trace("have derivation");
|
||||
|
||||
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
|
||||
|
||||
if (!drv->type().hasKnownOutputPaths())
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
|
||||
if (!drv->type().isPure()) {
|
||||
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
||||
|
||||
for (auto & [outputName, output] : drv->outputs) {
|
||||
auto randomPath = StorePath::random(outputPathName(drv->name, outputName));
|
||||
assert(!worker.store.isValidPath(randomPath));
|
||||
initialOutputs.insert({
|
||||
outputName,
|
||||
InitialOutput {
|
||||
.wanted = true,
|
||||
.outputHash = impureOutputHash,
|
||||
.known = InitialOutputStatus {
|
||||
.path = randomPath,
|
||||
.status = PathStatus::Absent
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
gaveUpOnSubstitution();
|
||||
return;
|
||||
}
|
||||
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||
if (i.second.second)
|
||||
worker.store.addTempRoot(*i.second.second);
|
||||
|
@ -230,9 +255,6 @@ void DerivationGoal::haveDerivation()
|
|||
return;
|
||||
}
|
||||
|
||||
parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
|
||||
|
||||
|
||||
/* We are first going to try to create the invalid output paths
|
||||
through substitutes. If that doesn't work, we'll build
|
||||
them. */
|
||||
|
@ -266,6 +288,8 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
{
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(drv->type().isPure());
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
done(BuildResult::TransientFailure, {},
|
||||
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
|
@ -315,9 +339,21 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
void DerivationGoal::gaveUpOnSubstitution()
|
||||
{
|
||||
/* The inputs must be built before we can build this goal. */
|
||||
inputDrvOutputs.clear();
|
||||
if (useDerivation)
|
||||
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs)
|
||||
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
|
||||
/* Ensure that pure, non-fixed-output derivations don't
|
||||
depend on impure derivations. */
|
||||
if (drv->type().isPure() && !drv->type().isFixed()) {
|
||||
auto inputDrv = worker.evalStore.readDerivation(i.first);
|
||||
if (!inputDrv.type().isPure())
|
||||
throw Error("pure derivation '%s' depends on impure derivation '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
worker.store.printStorePath(i.first));
|
||||
}
|
||||
|
||||
addWaitee(worker.makeDerivationGoal(i.first, i.second, buildMode == bmRepair ? bmRepair : bmNormal));
|
||||
}
|
||||
|
||||
/* Copy the input sources from the eval store to the build
|
||||
store. */
|
||||
|
@ -345,6 +381,8 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
|
||||
void DerivationGoal::repairClosure()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
Now check whether the other paths in the outputs closure are
|
||||
good. If not, then start derivation goals for the derivations
|
||||
|
@ -452,22 +490,24 @@ void DerivationGoal::inputsRealised()
|
|||
drvs. */
|
||||
: true);
|
||||
},
|
||||
[&](const DerivationType::Impure &) {
|
||||
return true;
|
||||
}
|
||||
}, drvType.raw());
|
||||
|
||||
if (resolveDrv)
|
||||
{
|
||||
if (resolveDrv && !fullDrv.inputDrvs.empty()) {
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
|
||||
/* We are be able to resolve this derivation based on the
|
||||
now-known results of dependencies. If so, we become a stub goal
|
||||
aliasing that resolved derivation goal */
|
||||
std::optional attempt = fullDrv.tryResolve(worker.store);
|
||||
now-known results of dependencies. If so, we become a
|
||||
stub goal aliasing that resolved derivation goal. */
|
||||
std::optional attempt = fullDrv.tryResolve(worker.store, inputDrvOutputs);
|
||||
assert(attempt);
|
||||
Derivation drvResolved { *std::move(attempt) };
|
||||
|
||||
auto pathResolved = writeDerivation(worker.store, drvResolved);
|
||||
|
||||
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
|
||||
auto msg = fmt("resolved derivation: '%s' -> '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
worker.store.printStorePath(pathResolved));
|
||||
act = std::make_unique<Activity>(*logger, lvlInfo, actBuildWaiting, msg,
|
||||
|
@ -488,21 +528,13 @@ void DerivationGoal::inputsRealised()
|
|||
/* Add the relevant output closures of the input derivation
|
||||
`i' as input paths. Only add the closures of output paths
|
||||
that are specified as inputs. */
|
||||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
auto outputs = worker.evalStore.queryPartialDerivationOutputMap(depDrvPath);
|
||||
for (auto & j : wantedDepOutputs) {
|
||||
if (outputs.count(j) > 0) {
|
||||
auto optRealizedInput = outputs.at(j);
|
||||
if (!optRealizedInput)
|
||||
throw Error(
|
||||
"derivation '%s' requires output '%s' from input derivation '%s', which is supposedly realized already, yet we still don't know what path corresponds to that output",
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath));
|
||||
worker.store.computeFSClosure(*optRealizedInput, inputPaths);
|
||||
} else
|
||||
for (auto & j : wantedDepOutputs)
|
||||
if (auto outPath = get(inputDrvOutputs, { depDrvPath, j }))
|
||||
worker.store.computeFSClosure(*outPath, inputPaths);
|
||||
else
|
||||
throw Error(
|
||||
"derivation '%s' requires non-existent output '%s' from input derivation '%s'",
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -923,7 +955,7 @@ void DerivationGoal::buildDone()
|
|||
st =
|
||||
dynamic_cast<NotDeterministic*>(&e) ? BuildResult::NotDeterministic :
|
||||
statusOk(status) ? BuildResult::OutputRejected :
|
||||
derivationType.isImpure() || diskFull ? BuildResult::TransientFailure :
|
||||
!derivationType.isSandboxed() || diskFull ? BuildResult::TransientFailure :
|
||||
BuildResult::PermanentFailure;
|
||||
}
|
||||
|
||||
|
@ -934,60 +966,53 @@ void DerivationGoal::buildDone()
|
|||
|
||||
void DerivationGoal::resolvedFinished()
|
||||
{
|
||||
trace("resolved derivation finished");
|
||||
|
||||
assert(resolvedDrvGoal);
|
||||
auto resolvedDrv = *resolvedDrvGoal->drv;
|
||||
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv);
|
||||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv.outputNames();
|
||||
auto & resolvedResult = resolvedDrvGoal->buildResult;
|
||||
|
||||
DrvOutputs builtOutputs;
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
assert(initialOutputs.count(wantedOutput) != 0);
|
||||
assert(resolvedHashes.count(wantedOutput) != 0);
|
||||
auto realisation = worker.store.queryRealisation(
|
||||
DrvOutput{resolvedHashes.at(wantedOutput), wantedOutput}
|
||||
);
|
||||
// We've just built it, but maybe the build failed, in which case the
|
||||
// realisation won't be there
|
||||
if (realisation) {
|
||||
auto newRealisation = *realisation;
|
||||
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
|
||||
newRealisation.signatures.clear();
|
||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
|
||||
signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
outputPaths.insert(realisation->outPath);
|
||||
builtOutputs.emplace(realisation->id, *realisation);
|
||||
} else {
|
||||
// If we don't have a realisation, then it must mean that something
|
||||
// failed when building the resolved drv
|
||||
assert(!buildResult.success());
|
||||
if (resolvedResult.success()) {
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv);
|
||||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv.outputNames();
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
assert(initialOutputs.count(wantedOutput) != 0);
|
||||
assert(resolvedHashes.count(wantedOutput) != 0);
|
||||
auto realisation = resolvedResult.builtOutputs.at(
|
||||
DrvOutput { resolvedHashes.at(wantedOutput), wantedOutput });
|
||||
if (drv->type().isPure()) {
|
||||
auto newRealisation = realisation;
|
||||
newRealisation.id = DrvOutput { initialOutputs.at(wantedOutput).outputHash, wantedOutput };
|
||||
newRealisation.signatures.clear();
|
||||
if (!drv->type().isFixed())
|
||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
|
||||
signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
}
|
||||
outputPaths.insert(realisation.outPath);
|
||||
builtOutputs.emplace(realisation.id, realisation);
|
||||
}
|
||||
|
||||
runPostBuildHook(
|
||||
worker.store,
|
||||
*logger,
|
||||
drvPath,
|
||||
outputPaths
|
||||
);
|
||||
}
|
||||
|
||||
runPostBuildHook(
|
||||
worker.store,
|
||||
*logger,
|
||||
drvPath,
|
||||
outputPaths
|
||||
);
|
||||
|
||||
auto status = [&]() {
|
||||
auto & resolvedResult = resolvedDrvGoal->buildResult;
|
||||
switch (resolvedResult.status) {
|
||||
case BuildResult::AlreadyValid:
|
||||
return BuildResult::ResolvesToAlreadyValid;
|
||||
default:
|
||||
return resolvedResult.status;
|
||||
}
|
||||
}();
|
||||
auto status = resolvedResult.status;
|
||||
if (status == BuildResult::AlreadyValid)
|
||||
status = BuildResult::ResolvesToAlreadyValid;
|
||||
|
||||
done(status, std::move(builtOutputs));
|
||||
}
|
||||
|
@ -1236,6 +1261,7 @@ void DerivationGoal::flushLine()
|
|||
|
||||
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
std::map<std::string, std::optional<StorePath>> res;
|
||||
for (auto & [name, output] : drv->outputs)
|
||||
|
@ -1248,6 +1274,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
|
|||
|
||||
OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
OutputPathMap res;
|
||||
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
||||
|
@ -1261,6 +1288,8 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
|||
|
||||
std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
{
|
||||
if (!drv->type().isPure()) return { false, {} };
|
||||
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
auto wantedOutputsLeft = wantedOutputs;
|
||||
DrvOutputs validOutputs;
|
||||
|
@ -1304,6 +1333,7 @@ std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
|||
if (info.wanted && info.known && info.known->isValid())
|
||||
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
||||
}
|
||||
|
||||
// If we requested all the outputs via the empty set, we are always fine.
|
||||
// If we requested specific elements, the loop above removes all the valid
|
||||
// ones, so any that are left must be invalid.
|
||||
|
@ -1341,9 +1371,7 @@ void DerivationGoal::done(
|
|||
{
|
||||
buildResult.status = status;
|
||||
if (ex)
|
||||
// FIXME: strip: "error: "
|
||||
buildResult.errorMsg = ex->what();
|
||||
amDone(buildResult.success() ? ecSuccess : ecFailed, ex);
|
||||
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));
|
||||
if (buildResult.status == BuildResult::TimedOut)
|
||||
worker.timedOut = true;
|
||||
if (buildResult.status == BuildResult::PermanentFailure)
|
||||
|
@ -1370,7 +1398,21 @@ void DerivationGoal::done(
|
|||
fs.open(traceBuiltOutputsFile, std::fstream::out);
|
||||
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
|
||||
}
|
||||
|
||||
amDone(buildResult.success() ? ecSuccess : ecFailed, ex);
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::waiteeDone(GoalPtr waitee, ExitCode result)
|
||||
{
|
||||
Goal::waiteeDone(waitee, result);
|
||||
|
||||
if (waitee->buildResult.success())
|
||||
if (auto bfd = std::get_if<DerivedPath::Built>(&waitee->buildResult.path))
|
||||
for (auto & [output, realisation] : waitee->buildResult.builtOutputs)
|
||||
inputDrvOutputs.insert_or_assign(
|
||||
{ bfd->drvPath, output.outputName },
|
||||
realisation.outPath);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -57,6 +57,11 @@ struct DerivationGoal : public Goal
|
|||
them. */
|
||||
StringSet wantedOutputs;
|
||||
|
||||
/* Mapping from input derivations + output names to actual store
|
||||
paths. This is filled in by waiteeDone() as each dependency
|
||||
finishes, before inputsRealised() is reached, */
|
||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||
|
||||
/* Whether additional wanted outputs have been added. */
|
||||
bool needRestart = false;
|
||||
|
||||
|
@ -224,6 +229,8 @@ struct DerivationGoal : public Goal
|
|||
DrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
void waiteeDone(GoalPtr waitee, ExitCode result) override;
|
||||
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||
};
|
||||
|
||||
|
|
|
@ -41,7 +41,7 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
if (subs.size() == 0) {
|
||||
/* None left. Terminate this goal and let someone else deal
|
||||
with it. */
|
||||
debug("drv output '%s' is required, but there is no substituter that can provide it", id.to_string());
|
||||
debug("derivation output '%s' is required, but there is no substituter that can provide it", id.to_string());
|
||||
|
||||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
|
|
|
@ -395,7 +395,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
else if (settings.sandboxMode == smDisabled)
|
||||
useChroot = false;
|
||||
else if (settings.sandboxMode == smRelaxed)
|
||||
useChroot = !(derivationType.isImpure()) && !noChroot;
|
||||
useChroot = derivationType.isSandboxed() && !noChroot;
|
||||
}
|
||||
|
||||
auto & localStore = getLocalStore();
|
||||
|
@ -608,7 +608,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
"nogroup:x:65534:\n", sandboxGid()));
|
||||
|
||||
/* Create /etc/hosts with localhost entry. */
|
||||
if (!(derivationType.isImpure()))
|
||||
if (derivationType.isSandboxed())
|
||||
writeFile(chrootRootDir + "/etc/hosts", "127.0.0.1 localhost\n::1 localhost\n");
|
||||
|
||||
/* Make the closure of the inputs available in the chroot,
|
||||
|
@ -704,6 +704,9 @@ void LocalDerivationGoal::startBuilder()
|
|||
|
||||
/* Run the builder. */
|
||||
printMsg(lvlChatty, "executing builder '%1%'", drv->builder);
|
||||
printMsg(lvlChatty, "using builder args '%1%'", concatStringsSep(" ", drv->args));
|
||||
for (auto & i : drv->env)
|
||||
printMsg(lvlVomit, "setting builder env variable '%1%'='%2%'", i.first, i.second);
|
||||
|
||||
/* Create the log file. */
|
||||
Path logFile = openLogFile();
|
||||
|
@ -796,7 +799,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
us.
|
||||
*/
|
||||
|
||||
if (!(derivationType.isImpure()))
|
||||
if (derivationType.isSandboxed())
|
||||
privateNetwork = true;
|
||||
|
||||
userNamespaceSync.create();
|
||||
|
@ -1060,7 +1063,7 @@ void LocalDerivationGoal::initEnv()
|
|||
to the builder is generally impure, but the output of
|
||||
fixed-output derivations is by definition pure (since we
|
||||
already know the cryptographic hash of the output). */
|
||||
if (derivationType.isImpure()) {
|
||||
if (!derivationType.isSandboxed()) {
|
||||
for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings()))
|
||||
env[i] = getEnv(i).value_or("");
|
||||
}
|
||||
|
@ -1674,7 +1677,7 @@ void LocalDerivationGoal::runChild()
|
|||
/* Fixed-output derivations typically need to access the
|
||||
network, so give them access to /etc/resolv.conf and so
|
||||
on. */
|
||||
if (derivationType.isImpure()) {
|
||||
if (!derivationType.isSandboxed()) {
|
||||
// Only use nss functions to resolve hosts and
|
||||
// services. Don’t use it for anything else that may
|
||||
// be configured for this system. This limits the
|
||||
|
@ -1918,7 +1921,7 @@ void LocalDerivationGoal::runChild()
|
|||
|
||||
sandboxProfile += "(import \"sandbox-defaults.sb\")\n";
|
||||
|
||||
if (derivationType.isImpure())
|
||||
if (!derivationType.isSandboxed())
|
||||
sandboxProfile += "(import \"sandbox-network.sb\")\n";
|
||||
|
||||
/* Add the output paths we'll use at build-time to the chroot */
|
||||
|
@ -2402,6 +2405,13 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
assert(false);
|
||||
},
|
||||
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
return newInfoFromCA(DerivationOutput::CAFloating {
|
||||
.method = doi.method,
|
||||
.hashType = doi.hashType,
|
||||
});
|
||||
},
|
||||
|
||||
}, output.raw());
|
||||
|
||||
/* FIXME: set proper permissions in restorePath() so
|
||||
|
@ -2612,7 +2622,9 @@ DrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
},
|
||||
.outPath = newInfo.path
|
||||
};
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)
|
||||
&& drv->type().isPure())
|
||||
{
|
||||
signRealisation(thisRealisation);
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
}
|
||||
|
|
|
@ -24,9 +24,16 @@ PathSubstitutionGoal::~PathSubstitutionGoal()
|
|||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::done(ExitCode result, BuildResult::Status status)
|
||||
void PathSubstitutionGoal::done(
|
||||
ExitCode result,
|
||||
BuildResult::Status status,
|
||||
std::optional<std::string> errorMsg)
|
||||
{
|
||||
buildResult.status = status;
|
||||
if (errorMsg) {
|
||||
debug(*errorMsg);
|
||||
buildResult.errorMsg = *errorMsg;
|
||||
}
|
||||
amDone(result);
|
||||
}
|
||||
|
||||
|
@ -67,12 +74,14 @@ void PathSubstitutionGoal::tryNext()
|
|||
if (subs.size() == 0) {
|
||||
/* None left. Terminate this goal and let someone else deal
|
||||
with it. */
|
||||
debug("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath));
|
||||
|
||||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
done(substituterFailed ? ecFailed : ecNoSubstituters, BuildResult::NoSubstituters);
|
||||
done(
|
||||
substituterFailed ? ecFailed : ecNoSubstituters,
|
||||
BuildResult::NoSubstituters,
|
||||
fmt("path '%s' is required, but there is no substituter that can build it", worker.store.printStorePath(storePath)));
|
||||
|
||||
if (substituterFailed) {
|
||||
worker.failedSubstitutions++;
|
||||
|
@ -171,10 +180,10 @@ void PathSubstitutionGoal::referencesValid()
|
|||
trace("all references realised");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath));
|
||||
done(
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
BuildResult::DependencyFailed);
|
||||
BuildResult::DependencyFailed,
|
||||
fmt("some references of path '%s' could not be realised", worker.store.printStorePath(storePath)));
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,10 @@ struct PathSubstitutionGoal : public Goal
|
|||
/* Content address for recomputing store path */
|
||||
std::optional<ContentAddress> ca;
|
||||
|
||||
void done(ExitCode result, BuildResult::Status status);
|
||||
void done(
|
||||
ExitCode result,
|
||||
BuildResult::Status status,
|
||||
std::optional<std::string> errorMsg = {});
|
||||
|
||||
public:
|
||||
PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
|
|
|
@ -26,11 +26,15 @@ std::optional<StorePath> DerivationOutput::path(const Store & store, std::string
|
|||
[](const DerivationOutput::Deferred &) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
[](const DerivationOutput::Impure &) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const {
|
||||
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||
{
|
||||
return store.makeFixedOutputPathFromCA(StorePathDescriptor {
|
||||
.name = outputPathName(drvName, outputName),
|
||||
.info = ca,
|
||||
|
@ -38,15 +42,27 @@ StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view
|
|||
}
|
||||
|
||||
|
||||
bool DerivationType::isCA() const {
|
||||
bool DerivationType::isCA() const
|
||||
{
|
||||
/* Normally we do the full `std::visit` to make sure we have
|
||||
exhaustively handled all variants, but so long as there is a
|
||||
variant called `ContentAddressed`, it must be the only one for
|
||||
which `isCA` is true for this to make sense!. */
|
||||
return std::holds_alternative<ContentAddressed>(raw());
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return false;
|
||||
},
|
||||
[](const ContentAddressed & ca) {
|
||||
return true;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return true;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
bool DerivationType::isFixed() const {
|
||||
bool DerivationType::isFixed() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return false;
|
||||
|
@ -54,10 +70,14 @@ bool DerivationType::isFixed() const {
|
|||
[](const ContentAddressed & ca) {
|
||||
return ca.fixed;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
bool DerivationType::hasKnownOutputPaths() const {
|
||||
bool DerivationType::hasKnownOutputPaths() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return !ia.deferred;
|
||||
|
@ -65,17 +85,40 @@ bool DerivationType::hasKnownOutputPaths() const {
|
|||
[](const ContentAddressed & ca) {
|
||||
return ca.fixed;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
bool DerivationType::isImpure() const {
|
||||
bool DerivationType::isSandboxed() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
[](const ContentAddressed & ca) {
|
||||
return !ca.pure;
|
||||
return ca.sandboxed;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
||||
|
||||
bool DerivationType::isPure() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return true;
|
||||
},
|
||||
[](const ContentAddressed & ca) {
|
||||
return true;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
},
|
||||
}, raw());
|
||||
}
|
||||
|
@ -174,7 +217,14 @@ static DerivationOutput parseDerivationOutput(const Store & store,
|
|||
if (hashAlgo != "") {
|
||||
ContentAddressMethod method = parseContentAddressingPrefix(hashAlgo);
|
||||
const auto hashType = parseHashType(hashAlgo);
|
||||
if (hashS != "") {
|
||||
if (hashS == "impure") {
|
||||
settings.requireExperimentalFeature(Xp::ImpureDerivations);
|
||||
assert(pathS == "");
|
||||
return DerivationOutput::Impure {
|
||||
.method = std::move(method),
|
||||
.hashType = std::move(hashType),
|
||||
};
|
||||
} else if (hashS != "") {
|
||||
validatePath(pathS);
|
||||
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
|
||||
return DerivationOutput::CAFixed {
|
||||
|
@ -343,6 +393,12 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](const DerivationOutputImpure & doi) {
|
||||
// FIXME
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, makeContentAddressingPrefix(doi.method) + printHashType(doi.hashType));
|
||||
s += ','; printUnquotedString(s, "impure");
|
||||
}
|
||||
}, i.second.raw());
|
||||
s += ')';
|
||||
|
@ -408,8 +464,14 @@ std::string outputPathName(std::string_view drvName, std::string_view outputName
|
|||
|
||||
DerivationType BasicDerivation::type() const
|
||||
{
|
||||
std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs, deferredIAOutputs;
|
||||
std::set<std::string_view>
|
||||
inputAddressedOutputs,
|
||||
fixedCAOutputs,
|
||||
floatingCAOutputs,
|
||||
deferredIAOutputs,
|
||||
impureOutputs;
|
||||
std::optional<HashType> floatingHashType;
|
||||
|
||||
for (auto & i : outputs) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivationOutput::InputAddressed &) {
|
||||
|
@ -424,43 +486,78 @@ DerivationType BasicDerivation::type() const
|
|||
floatingHashType = dof.hashType;
|
||||
} else {
|
||||
if (*floatingHashType != dof.hashType)
|
||||
throw Error("All floating outputs must use the same hash type");
|
||||
throw Error("all floating outputs must use the same hash type");
|
||||
}
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {
|
||||
deferredIAOutputs.insert(i.first);
|
||||
deferredIAOutputs.insert(i.first);
|
||||
},
|
||||
[&](const DerivationOutput::Impure &) {
|
||||
impureOutputs.insert(i.first);
|
||||
},
|
||||
}, i.second.raw());
|
||||
}
|
||||
|
||||
if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) {
|
||||
throw Error("Must have at least one output");
|
||||
} else if (! inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) {
|
||||
if (inputAddressedOutputs.empty()
|
||||
&& fixedCAOutputs.empty()
|
||||
&& floatingCAOutputs.empty()
|
||||
&& deferredIAOutputs.empty()
|
||||
&& impureOutputs.empty())
|
||||
throw Error("must have at least one output");
|
||||
|
||||
if (!inputAddressedOutputs.empty()
|
||||
&& fixedCAOutputs.empty()
|
||||
&& floatingCAOutputs.empty()
|
||||
&& deferredIAOutputs.empty()
|
||||
&& impureOutputs.empty())
|
||||
return DerivationType::InputAddressed {
|
||||
.deferred = false,
|
||||
};
|
||||
} else if (inputAddressedOutputs.empty() && ! fixedCAOutputs.empty() && floatingCAOutputs.empty() && deferredIAOutputs.empty()) {
|
||||
|
||||
if (inputAddressedOutputs.empty()
|
||||
&& !fixedCAOutputs.empty()
|
||||
&& floatingCAOutputs.empty()
|
||||
&& deferredIAOutputs.empty()
|
||||
&& impureOutputs.empty())
|
||||
{
|
||||
if (fixedCAOutputs.size() > 1)
|
||||
// FIXME: Experimental feature?
|
||||
throw Error("Only one fixed output is allowed for now");
|
||||
throw Error("only one fixed output is allowed for now");
|
||||
if (*fixedCAOutputs.begin() != "out")
|
||||
throw Error("Single fixed output must be named \"out\"");
|
||||
throw Error("single fixed output must be named \"out\"");
|
||||
return DerivationType::ContentAddressed {
|
||||
.pure = false,
|
||||
.sandboxed = false,
|
||||
.fixed = true,
|
||||
};
|
||||
} else if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && ! floatingCAOutputs.empty() && deferredIAOutputs.empty()) {
|
||||
}
|
||||
|
||||
if (inputAddressedOutputs.empty()
|
||||
&& fixedCAOutputs.empty()
|
||||
&& !floatingCAOutputs.empty()
|
||||
&& deferredIAOutputs.empty()
|
||||
&& impureOutputs.empty())
|
||||
return DerivationType::ContentAddressed {
|
||||
.pure = true,
|
||||
.sandboxed = true,
|
||||
.fixed = false,
|
||||
};
|
||||
} else if (inputAddressedOutputs.empty() && fixedCAOutputs.empty() && floatingCAOutputs.empty() && !deferredIAOutputs.empty()) {
|
||||
|
||||
if (inputAddressedOutputs.empty()
|
||||
&& fixedCAOutputs.empty()
|
||||
&& floatingCAOutputs.empty()
|
||||
&& !deferredIAOutputs.empty()
|
||||
&& impureOutputs.empty())
|
||||
return DerivationType::InputAddressed {
|
||||
.deferred = true,
|
||||
};
|
||||
} else {
|
||||
throw Error("Can't mix derivation output types");
|
||||
}
|
||||
|
||||
if (inputAddressedOutputs.empty()
|
||||
&& fixedCAOutputs.empty()
|
||||
&& floatingCAOutputs.empty()
|
||||
&& deferredIAOutputs.empty()
|
||||
&& !impureOutputs.empty())
|
||||
return DerivationType::Impure { };
|
||||
|
||||
throw Error("can't mix derivation output types");
|
||||
}
|
||||
|
||||
|
||||
|
@ -472,7 +569,7 @@ Sync<DrvHashes> drvHashes;
|
|||
/* Look up the derivation by value and memoize the
|
||||
`hashDerivationModulo` call.
|
||||
*/
|
||||
static const DrvHashModulo pathDerivationModulo(Store & store, const StorePath & drvPath)
|
||||
static const DrvHash pathDerivationModulo(Store & store, const StorePath & drvPath)
|
||||
{
|
||||
{
|
||||
auto hashes = drvHashes.lock();
|
||||
|
@ -507,7 +604,7 @@ static const DrvHashModulo pathDerivationModulo(Store & store, const StorePath &
|
|||
don't leak the provenance of fixed outputs, reducing pointless cache
|
||||
misses as the build itself won't know this.
|
||||
*/
|
||||
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs)
|
||||
DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs)
|
||||
{
|
||||
auto type = drv.type();
|
||||
|
||||
|
@ -522,7 +619,20 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
|||
+ store.printStorePath(dof.path(store, drv.name, i.first)));
|
||||
outputHashes.insert_or_assign(i.first, std::move(hash));
|
||||
}
|
||||
return outputHashes;
|
||||
return DrvHash {
|
||||
.hashes = outputHashes,
|
||||
.kind = DrvHash::Kind::Regular,
|
||||
};
|
||||
}
|
||||
|
||||
if (!type.isPure()) {
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs)
|
||||
outputHashes.insert_or_assign(outputName, impureOutputHash);
|
||||
return DrvHash {
|
||||
.hashes = outputHashes,
|
||||
.kind = DrvHash::Kind::Deferred,
|
||||
};
|
||||
}
|
||||
|
||||
auto kind = std::visit(overloaded {
|
||||
|
@ -536,67 +646,41 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
|||
? DrvHash::Kind::Regular
|
||||
: DrvHash::Kind::Deferred;
|
||||
},
|
||||
[](const DerivationType::Impure &) -> DrvHash::Kind {
|
||||
assert(false);
|
||||
}
|
||||
}, drv.type().raw());
|
||||
|
||||
/* For other derivations, replace the inputs paths with recursive
|
||||
calls to this function. */
|
||||
std::map<std::string, StringSet> inputs2;
|
||||
for (auto & [drvPath, inputOutputs0] : drv.inputDrvs) {
|
||||
// Avoid lambda capture restriction with standard / Clang
|
||||
auto & inputOutputs = inputOutputs0;
|
||||
const auto & res = pathDerivationModulo(store, drvPath);
|
||||
std::visit(overloaded {
|
||||
// Regular non-CA derivation, replace derivation
|
||||
[&](const DrvHash & drvHash) {
|
||||
kind |= drvHash.kind;
|
||||
inputs2.insert_or_assign(drvHash.hash.to_string(Base16, false), inputOutputs);
|
||||
},
|
||||
// CA derivation's output hashes
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
std::set<std::string> justOut = { "out" };
|
||||
for (auto & output : inputOutputs) {
|
||||
/* Put each one in with a single "out" output.. */
|
||||
const auto h = outputHashes.at(output);
|
||||
inputs2.insert_or_assign(
|
||||
h.to_string(Base16, false),
|
||||
justOut);
|
||||
}
|
||||
},
|
||||
}, res.raw());
|
||||
if (res.kind == DrvHash::Kind::Deferred)
|
||||
kind = DrvHash::Kind::Deferred;
|
||||
for (auto & outputName : inputOutputs) {
|
||||
const auto h = res.hashes.at(outputName);
|
||||
inputs2[h.to_string(Base16, false)].insert(outputName);
|
||||
}
|
||||
}
|
||||
|
||||
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
|
||||
|
||||
return DrvHash { .hash = hash, .kind = kind };
|
||||
}
|
||||
|
||||
|
||||
void operator |= (DrvHash::Kind & self, const DrvHash::Kind & other) noexcept
|
||||
{
|
||||
switch (other) {
|
||||
case DrvHash::Kind::Regular:
|
||||
break;
|
||||
case DrvHash::Kind::Deferred:
|
||||
self = other;
|
||||
break;
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs) {
|
||||
outputHashes.insert_or_assign(outputName, hash);
|
||||
}
|
||||
|
||||
return DrvHash {
|
||||
.hashes = outputHashes,
|
||||
.kind = kind,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv)
|
||||
{
|
||||
std::map<std::string, Hash> res;
|
||||
std::visit(overloaded {
|
||||
[&](const DrvHash & drvHash) {
|
||||
for (auto & outputName : drv.outputNames()) {
|
||||
res.insert({outputName, drvHash.hash});
|
||||
}
|
||||
},
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
res = outputHashes;
|
||||
},
|
||||
}, hashDerivationModulo(store, drv, true).raw());
|
||||
return res;
|
||||
return hashDerivationModulo(store, drv, true).hashes;
|
||||
}
|
||||
|
||||
|
||||
|
@ -623,7 +707,8 @@ StringSet BasicDerivation::outputNames() const
|
|||
return names;
|
||||
}
|
||||
|
||||
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
|
||||
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const
|
||||
{
|
||||
DerivationOutputsAndOptPaths outsAndOptPaths;
|
||||
for (auto output : outputs)
|
||||
outsAndOptPaths.insert(std::make_pair(
|
||||
|
@ -634,7 +719,8 @@ DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & s
|
|||
return outsAndOptPaths;
|
||||
}
|
||||
|
||||
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
||||
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath)
|
||||
{
|
||||
auto nameWithSuffix = drvPath.name();
|
||||
constexpr std::string_view extension = ".drv";
|
||||
assert(hasSuffix(nameWithSuffix, extension));
|
||||
|
@ -696,6 +782,11 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
<< ""
|
||||
<< "";
|
||||
},
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
out << ""
|
||||
<< (makeContentAddressingPrefix(doi.method) + printHashType(doi.hashType))
|
||||
<< "impure";
|
||||
},
|
||||
}, i.second.raw());
|
||||
}
|
||||
worker_proto::write(store, out, drv.inputSrcs);
|
||||
|
@ -721,21 +812,19 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath
|
|||
}
|
||||
|
||||
|
||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
|
||||
|
||||
debug("Rewriting the derivation");
|
||||
|
||||
for (auto &rewrite: rewrites) {
|
||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
||||
{
|
||||
for (auto & rewrite : rewrites) {
|
||||
debug("rewriting %s as %s", rewrite.first, rewrite.second);
|
||||
}
|
||||
|
||||
drv.builder = rewriteStrings(drv.builder, rewrites);
|
||||
for (auto & arg: drv.args) {
|
||||
for (auto & arg : drv.args) {
|
||||
arg = rewriteStrings(arg, rewrites);
|
||||
}
|
||||
|
||||
StringPairs newEnv;
|
||||
for (auto & envVar: drv.env) {
|
||||
for (auto & envVar : drv.env) {
|
||||
auto envName = rewriteStrings(envVar.first, rewrites);
|
||||
auto envValue = rewriteStrings(envVar.second, rewrites);
|
||||
newEnv.emplace(envName, envValue);
|
||||
|
@ -745,7 +834,7 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
|||
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
|
||||
for (auto & [outputName, output] : drv.outputs) {
|
||||
if (std::holds_alternative<DerivationOutput::Deferred>(output.raw())) {
|
||||
auto & h = hashModulo.requireNoFixedNonDeferred();
|
||||
auto & h = hashModulo.hashes.at(outputName);
|
||||
auto outPath = store.makeOutputPath(outputName, h, drv.name);
|
||||
drv.env[outputName] = store.printStorePath(outPath);
|
||||
output = DerivationOutput::InputAddressed {
|
||||
|
@ -756,55 +845,48 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
|
|||
|
||||
}
|
||||
|
||||
const Hash & DrvHashModulo::requireNoFixedNonDeferred() const {
|
||||
auto * drvHashOpt = std::get_if<DrvHash>(&raw());
|
||||
assert(drvHashOpt);
|
||||
assert(drvHashOpt->kind == DrvHash::Kind::Regular);
|
||||
return drvHashOpt->hash;
|
||||
}
|
||||
|
||||
static bool tryResolveInput(
|
||||
Store & store, StorePathSet & inputSrcs, StringMap & inputRewrites,
|
||||
const StorePath & inputDrv, const StringSet & inputOutputs)
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) const
|
||||
{
|
||||
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(inputDrv);
|
||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||
|
||||
auto getOutput = [&](const std::string & outputName) {
|
||||
auto & actualPathOpt = inputDrvOutputs.at(outputName);
|
||||
if (!actualPathOpt)
|
||||
warn("output %s of input %s missing, aborting the resolving",
|
||||
outputName,
|
||||
store.printStorePath(inputDrv)
|
||||
);
|
||||
return actualPathOpt;
|
||||
};
|
||||
for (auto & input : inputDrvs)
|
||||
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(input.first))
|
||||
if (outputPath)
|
||||
inputDrvOutputs.insert_or_assign({input.first, outputName}, *outputPath);
|
||||
|
||||
for (auto & outputName : inputOutputs) {
|
||||
auto actualPathOpt = getOutput(outputName);
|
||||
if (!actualPathOpt) return false;
|
||||
auto actualPath = *actualPathOpt;
|
||||
inputRewrites.emplace(
|
||||
downstreamPlaceholder(store, inputDrv, outputName),
|
||||
store.printStorePath(actualPath));
|
||||
inputSrcs.insert(std::move(actualPath));
|
||||
}
|
||||
|
||||
return true;
|
||||
return tryResolve(store, inputDrvOutputs);
|
||||
}
|
||||
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
|
||||
std::optional<BasicDerivation> Derivation::tryResolve(
|
||||
Store & store,
|
||||
const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const
|
||||
{
|
||||
BasicDerivation resolved { *this };
|
||||
|
||||
// Input paths that we'll want to rewrite in the derivation
|
||||
StringMap inputRewrites;
|
||||
|
||||
for (auto & [inputDrv, inputOutputs] : inputDrvs)
|
||||
if (!tryResolveInput(store, resolved.inputSrcs, inputRewrites, inputDrv, inputOutputs))
|
||||
return std::nullopt;
|
||||
for (auto & [inputDrv, inputOutputs] : inputDrvs) {
|
||||
for (auto & outputName : inputOutputs) {
|
||||
if (auto actualPath = get(inputDrvOutputs, { inputDrv, outputName })) {
|
||||
inputRewrites.emplace(
|
||||
downstreamPlaceholder(store, inputDrv, outputName),
|
||||
store.printStorePath(*actualPath));
|
||||
resolved.inputSrcs.insert(*actualPath);
|
||||
} else {
|
||||
warn("output '%s' of input '%s' missing, aborting the resolving",
|
||||
outputName,
|
||||
store.printStorePath(inputDrv));
|
||||
return {};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rewriteDerivation(store, resolved, inputRewrites);
|
||||
|
||||
return resolved;
|
||||
}
|
||||
|
||||
const Hash impureOutputHash = hashString(htSHA256, "impure");
|
||||
|
||||
}
|
||||
|
|
|
@ -41,15 +41,26 @@ struct DerivationOutputCAFloating
|
|||
};
|
||||
|
||||
/* Input-addressed output which depends on a (CA) derivation whose hash isn't
|
||||
* known atm
|
||||
* known yet.
|
||||
*/
|
||||
struct DerivationOutputDeferred {};
|
||||
|
||||
/* Impure output which is moved to a content-addressed location (like
|
||||
CAFloating) but isn't registered as a realization.
|
||||
*/
|
||||
struct DerivationOutputImpure
|
||||
{
|
||||
/* information used for expected hash computation */
|
||||
ContentAddressMethod method;
|
||||
HashType hashType;
|
||||
};
|
||||
|
||||
typedef std::variant<
|
||||
DerivationOutputInputAddressed,
|
||||
DerivationOutputCAFixed,
|
||||
DerivationOutputCAFloating,
|
||||
DerivationOutputDeferred
|
||||
DerivationOutputDeferred,
|
||||
DerivationOutputImpure
|
||||
> _DerivationOutputRaw;
|
||||
|
||||
struct DerivationOutput : _DerivationOutputRaw
|
||||
|
@ -61,6 +72,7 @@ struct DerivationOutput : _DerivationOutputRaw
|
|||
using CAFixed = DerivationOutputCAFixed;
|
||||
using CAFloating = DerivationOutputCAFloating;
|
||||
using Deferred = DerivationOutputDeferred;
|
||||
using Impure = DerivationOutputImpure;
|
||||
|
||||
/* Note, when you use this function you should make sure that you're passing
|
||||
the right derivation name. When in doubt, you should use the safer
|
||||
|
@ -90,13 +102,17 @@ struct DerivationType_InputAddressed {
|
|||
};
|
||||
|
||||
struct DerivationType_ContentAddressed {
|
||||
bool pure;
|
||||
bool sandboxed;
|
||||
bool fixed;
|
||||
};
|
||||
|
||||
struct DerivationType_Impure {
|
||||
};
|
||||
|
||||
typedef std::variant<
|
||||
DerivationType_InputAddressed,
|
||||
DerivationType_ContentAddressed
|
||||
DerivationType_ContentAddressed,
|
||||
DerivationType_Impure
|
||||
> _DerivationTypeRaw;
|
||||
|
||||
struct DerivationType : _DerivationTypeRaw {
|
||||
|
@ -104,7 +120,7 @@ struct DerivationType : _DerivationTypeRaw {
|
|||
using Raw::Raw;
|
||||
using InputAddressed = DerivationType_InputAddressed;
|
||||
using ContentAddressed = DerivationType_ContentAddressed;
|
||||
|
||||
using Impure = DerivationType_Impure;
|
||||
|
||||
/* Do the outputs of the derivation have paths calculated from their content,
|
||||
or from the derivation itself? */
|
||||
|
@ -114,10 +130,18 @@ struct DerivationType : _DerivationTypeRaw {
|
|||
non-CA derivations. */
|
||||
bool isFixed() const;
|
||||
|
||||
/* Is the derivation impure and needs to access non-deterministic resources, or
|
||||
pure and can be sandboxed? Note that whether or not we actually sandbox the
|
||||
derivation is controlled separately. Never true for non-CA derivations. */
|
||||
bool isImpure() const;
|
||||
/* Whether the derivation is fully sandboxed. If false, the
|
||||
sandbox is opened up, e.g. the derivation has access to the
|
||||
network. Note that whether or not we actually sandbox the
|
||||
derivation is controlled separately. Always true for non-CA
|
||||
derivations. */
|
||||
bool isSandboxed() const;
|
||||
|
||||
/* Whether the derivation is expected to produce the same result
|
||||
every time, and therefore it only needs to be built once. This
|
||||
is only false for derivations that have the attribute '__impure
|
||||
= true'. */
|
||||
bool isPure() const;
|
||||
|
||||
/* Does the derivation knows its own output paths?
|
||||
Only true when there's no floating-ca derivation involved in the
|
||||
|
@ -173,7 +197,14 @@ struct Derivation : BasicDerivation
|
|||
added directly to input sources.
|
||||
|
||||
2. Input placeholders are replaced with realized input store paths. */
|
||||
std::optional<BasicDerivation> tryResolve(Store & store);
|
||||
std::optional<BasicDerivation> tryResolve(Store & store) const;
|
||||
|
||||
/* Like the above, but instead of querying the Nix database for
|
||||
realisations, uses a given mapping from input derivation paths
|
||||
+ output names to actual output store paths. */
|
||||
std::optional<BasicDerivation> tryResolve(
|
||||
Store & store,
|
||||
const std::map<std::pair<StorePath, std::string>, StorePath> & inputDrvOutputs) const;
|
||||
|
||||
Derivation() = default;
|
||||
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
|
||||
|
@ -202,14 +233,16 @@ bool isDerivation(const std::string & fileName);
|
|||
the output name is "out". */
|
||||
std::string outputPathName(std::string_view drvName, std::string_view outputName);
|
||||
|
||||
// known CA drv's output hashes, current just for fixed-output derivations
|
||||
// whose output hashes are always known since they are fixed up-front.
|
||||
typedef std::map<std::string, Hash> CaOutputHashes;
|
||||
|
||||
// The hashes modulo of a derivation.
|
||||
//
|
||||
// Each output is given a hash, although in practice only the content-addressed
|
||||
// derivations (fixed-output or not) will have a different hash for each
|
||||
// output.
|
||||
struct DrvHash {
|
||||
Hash hash;
|
||||
std::map<std::string, Hash> hashes;
|
||||
|
||||
enum struct Kind: bool {
|
||||
enum struct Kind : bool {
|
||||
// Statically determined derivations.
|
||||
// This hash will be directly used to compute the output paths
|
||||
Regular,
|
||||
|
@ -222,28 +255,6 @@ struct DrvHash {
|
|||
|
||||
void operator |= (DrvHash::Kind & self, const DrvHash::Kind & other) noexcept;
|
||||
|
||||
typedef std::variant<
|
||||
// Regular normalized derivation hash, and whether it was deferred (because
|
||||
// an ancestor derivation is a floating content addressed derivation).
|
||||
DrvHash,
|
||||
// Fixed-output derivation hashes
|
||||
CaOutputHashes
|
||||
> _DrvHashModuloRaw;
|
||||
|
||||
struct DrvHashModulo : _DrvHashModuloRaw {
|
||||
using Raw = _DrvHashModuloRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
/* Get hash, throwing if it is per-output CA hashes or a
|
||||
deferred Drv hash.
|
||||
*/
|
||||
const Hash & requireNoFixedNonDeferred() const;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
};
|
||||
|
||||
/* Returns hashes with the details of fixed-output subderivations
|
||||
expunged.
|
||||
|
||||
|
@ -267,16 +278,18 @@ struct DrvHashModulo : _DrvHashModuloRaw {
|
|||
ATerm, after subderivations have been likewise expunged from that
|
||||
derivation.
|
||||
*/
|
||||
DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
|
||||
DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutputs);
|
||||
|
||||
/*
|
||||
Return a map associating each output to a hash that uniquely identifies its
|
||||
derivation (modulo the self-references).
|
||||
|
||||
FIXME: what is the Hash in this map?
|
||||
*/
|
||||
std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& drv);
|
||||
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv);
|
||||
|
||||
/* Memoisation of hashDerivationModulo(). */
|
||||
typedef std::map<StorePath, DrvHashModulo> DrvHashes;
|
||||
typedef std::map<StorePath, DrvHash> DrvHashes;
|
||||
|
||||
// FIXME: global, though at least thread-safe.
|
||||
extern Sync<DrvHashes> drvHashes;
|
||||
|
@ -306,4 +319,6 @@ std::string hashPlaceholder(const std::string_view outputName);
|
|||
dependency which is a CA derivation. */
|
||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
|
||||
|
||||
extern const Hash impureOutputHash;
|
||||
|
||||
}
|
||||
|
|
|
@ -26,6 +26,9 @@ struct DerivedPathOpaque {
|
|||
nlohmann::json toJSON(ref<Store> store) const;
|
||||
std::string to_string(const Store & store) const;
|
||||
static DerivedPathOpaque parse(const Store & store, std::string_view);
|
||||
|
||||
bool operator < (const DerivedPathOpaque & b) const
|
||||
{ return path < b.path; }
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -47,6 +50,9 @@ struct DerivedPathBuilt {
|
|||
std::string to_string(const Store & store) const;
|
||||
static DerivedPathBuilt parse(const Store & store, std::string_view);
|
||||
nlohmann::json toJSON(ref<Store> store) const;
|
||||
|
||||
bool operator < (const DerivedPathBuilt & b) const
|
||||
{ return std::make_pair(drvPath, outputs) < std::make_pair(b.drvPath, b.outputs); }
|
||||
};
|
||||
|
||||
using _DerivedPathRaw = std::variant<
|
||||
|
|
|
@ -31,7 +31,7 @@ struct FileTransferSettings : Config
|
|||
R"(
|
||||
The timeout (in seconds) for establishing connections in the
|
||||
binary cache substituter. It corresponds to `curl`’s
|
||||
`--connect-timeout` option.
|
||||
`--connect-timeout` option. A value of 0 means no limit.
|
||||
)"};
|
||||
|
||||
Setting<unsigned long> stalledDownloadTimeout{
|
||||
|
@ -123,8 +123,6 @@ public:
|
|||
|
||||
template<typename... Args>
|
||||
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
|
||||
|
||||
virtual const char* sname() const override { return "FileTransferError"; }
|
||||
};
|
||||
|
||||
bool isUri(std::string_view s);
|
||||
|
|
|
@ -695,16 +695,15 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
// combinations that are currently prohibited.
|
||||
drv.type();
|
||||
|
||||
std::optional<Hash> h;
|
||||
std::optional<DrvHash> hashesModulo;
|
||||
for (auto & i : drv.outputs) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivationOutput::InputAddressed & doia) {
|
||||
if (!h) {
|
||||
if (!hashesModulo) {
|
||||
// somewhat expensive so we do lazily
|
||||
auto h0 = hashDerivationModulo(*this, drv, true);
|
||||
h = h0.requireNoFixedNonDeferred();
|
||||
hashesModulo = hashDerivationModulo(*this, drv, true);
|
||||
}
|
||||
StorePath recomputed = makeOutputPath(i.first, *h, drvName);
|
||||
StorePath recomputed = makeOutputPath(i.first, hashesModulo->hashes.at(i.first), drvName);
|
||||
if (doia.path != recomputed)
|
||||
throw Error("derivation '%s' has incorrect output '%s', should be '%s'",
|
||||
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
|
||||
|
@ -720,6 +719,9 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
[&](const DerivationOutput::Deferred &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
[&](const DerivationOutput::Impure &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
}, i.second.raw());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -288,15 +288,15 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
{
|
||||
std::set<Realisation> inputRealisations;
|
||||
|
||||
for (const auto& [inputDrv, outputNames] : drv.inputDrvs) {
|
||||
for (const auto & [inputDrv, outputNames] : drv.inputDrvs) {
|
||||
auto outputHashes =
|
||||
staticOutputHashes(store, store.readDerivation(inputDrv));
|
||||
for (const auto& outputName : outputNames) {
|
||||
for (const auto & outputName : outputNames) {
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{outputHashes.at(outputName), outputName});
|
||||
if (!thisRealisation)
|
||||
throw Error(
|
||||
"output '%s' of derivation '%s' isn’t built", outputName,
|
||||
"output '%s' of derivation '%s' isn't built", outputName,
|
||||
store.printStorePath(inputDrv));
|
||||
inputRealisations.insert(*thisRealisation);
|
||||
}
|
||||
|
@ -306,4 +306,5 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
|
||||
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "store-api.hh"
|
||||
|
||||
#include <sodium.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void checkName(std::string_view path, std::string_view name)
|
||||
|
@ -41,6 +43,13 @@ bool StorePath::isDerivation() const
|
|||
|
||||
StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
|
||||
|
||||
StorePath StorePath::random(std::string_view name)
|
||||
{
|
||||
Hash hash(htSHA1);
|
||||
randombytes_buf(hash.hash, hash.hashSize);
|
||||
return StorePath(hash, name);
|
||||
}
|
||||
|
||||
StorePath Store::parseStorePath(std::string_view path) const
|
||||
{
|
||||
auto p = canonPath(std::string(path));
|
||||
|
|
|
@ -59,6 +59,8 @@ public:
|
|||
}
|
||||
|
||||
static StorePath dummy;
|
||||
|
||||
static StorePath random(std::string_view name);
|
||||
};
|
||||
|
||||
typedef std::set<StorePath> StorePathSet;
|
||||
|
|
|
@ -215,7 +215,6 @@ void handleSQLiteBusy(const SQLiteBusy & e)
|
|||
if (now > lastWarned + 10) {
|
||||
lastWarned = now;
|
||||
logWarning({
|
||||
.name = "Sqlite busy",
|
||||
.msg = hintfmt(e.what())
|
||||
});
|
||||
}
|
||||
|
|
|
@ -127,11 +127,11 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
if (flag.handler.arity == ArityAny) break;
|
||||
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
|
||||
}
|
||||
if (flag.completer)
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
anyCompleted = true;
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
anyCompleted = true;
|
||||
if (flag.completer)
|
||||
flag.completer(n, *prefix);
|
||||
}
|
||||
}
|
||||
args.push_back(*pos++);
|
||||
}
|
||||
if (!anyCompleted)
|
||||
|
@ -146,6 +146,7 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
&& hasPrefix(name, std::string(*prefix, 2)))
|
||||
completions->add("--" + name, flag->description);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
auto i = longFlags.find(std::string(*pos, 2));
|
||||
if (i == longFlags.end()) return false;
|
||||
|
@ -187,10 +188,12 @@ bool Args::processArgs(const Strings & args, bool finish)
|
|||
{
|
||||
std::vector<std::string> ss;
|
||||
for (const auto &[n, s] : enumerate(args)) {
|
||||
ss.push_back(s);
|
||||
if (exp.completer)
|
||||
if (auto prefix = needsCompletion(s))
|
||||
if (auto prefix = needsCompletion(s)) {
|
||||
ss.push_back(*prefix);
|
||||
if (exp.completer)
|
||||
exp.completer(n, *prefix);
|
||||
} else
|
||||
ss.push_back(s);
|
||||
}
|
||||
exp.handler.fun(ss);
|
||||
expectedArgs.pop_front();
|
||||
|
@ -279,21 +282,22 @@ static void _completePath(std::string_view prefix, bool onlyDirs)
|
|||
{
|
||||
completionType = ctFilenames;
|
||||
glob_t globbuf;
|
||||
int flags = GLOB_NOESCAPE | GLOB_TILDE;
|
||||
int flags = GLOB_NOESCAPE;
|
||||
#ifdef GLOB_ONLYDIR
|
||||
if (onlyDirs)
|
||||
flags |= GLOB_ONLYDIR;
|
||||
#endif
|
||||
if (glob((std::string(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) {
|
||||
// using expandTilde here instead of GLOB_TILDE(_CHECK) so that ~<Tab> expands to /home/user/
|
||||
if (glob((expandTilde(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) {
|
||||
for (size_t i = 0; i < globbuf.gl_pathc; ++i) {
|
||||
if (onlyDirs) {
|
||||
auto st = lstat(globbuf.gl_pathv[i]);
|
||||
auto st = stat(globbuf.gl_pathv[i]);
|
||||
if (!S_ISDIR(st.st_mode)) continue;
|
||||
}
|
||||
completions->add(globbuf.gl_pathv[i]);
|
||||
}
|
||||
globfree(&globbuf);
|
||||
}
|
||||
globfree(&globbuf);
|
||||
}
|
||||
|
||||
void completePath(size_t, std::string_view prefix)
|
||||
|
@ -322,11 +326,6 @@ MultiCommand::MultiCommand(const Commands & commands_)
|
|||
.optional = true,
|
||||
.handler = {[=](std::string s) {
|
||||
assert(!command);
|
||||
if (auto prefix = needsCompletion(s)) {
|
||||
for (auto & [name, command] : commands)
|
||||
if (hasPrefix(name, *prefix))
|
||||
completions->add(name);
|
||||
}
|
||||
auto i = commands.find(s);
|
||||
if (i == commands.end()) {
|
||||
std::set<std::string> commandNames;
|
||||
|
@ -337,6 +336,11 @@ MultiCommand::MultiCommand(const Commands & commands_)
|
|||
}
|
||||
command = {s, i->second()};
|
||||
command->second->parent = this;
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
for (auto & [name, command] : commands)
|
||||
if (hasPrefix(name, prefix))
|
||||
completions->add(name);
|
||||
}}
|
||||
});
|
||||
|
||||
|
|
|
@ -9,10 +9,9 @@ namespace nix {
|
|||
|
||||
const std::string nativeSystem = SYSTEM;
|
||||
|
||||
BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
|
||||
void BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
|
||||
{
|
||||
err.traces.push_front(Trace { .pos = e, .hint = hint });
|
||||
return *this;
|
||||
}
|
||||
|
||||
// c++ std::exception descendants must have a 'const char* what()' function.
|
||||
|
@ -22,12 +21,9 @@ const std::string & BaseError::calcWhat() const
|
|||
if (what_.has_value())
|
||||
return *what_;
|
||||
else {
|
||||
err.name = sname();
|
||||
|
||||
std::ostringstream oss;
|
||||
showErrorInfo(oss, err, loggerSettings.showTrace);
|
||||
what_ = oss.str();
|
||||
|
||||
return *what_;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,7 +109,6 @@ struct Trace {
|
|||
|
||||
struct ErrorInfo {
|
||||
Verbosity level;
|
||||
std::string name; // FIXME: rename
|
||||
hintformat msg;
|
||||
std::optional<ErrPos> errPos;
|
||||
std::list<Trace> traces;
|
||||
|
@ -162,8 +161,6 @@ public:
|
|||
: err(e)
|
||||
{ }
|
||||
|
||||
virtual const char* sname() const { return "BaseError"; }
|
||||
|
||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
||||
~BaseError() throw () { };
|
||||
const char * what() const throw () { return calcWhat().c_str(); }
|
||||
|
@ -175,12 +172,12 @@ public:
|
|||
const ErrorInfo & info() const { calcWhat(); return err; }
|
||||
|
||||
template<typename... Args>
|
||||
BaseError & addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args)
|
||||
void addTrace(std::optional<ErrPos> e, const std::string & fs, const Args & ... args)
|
||||
{
|
||||
return addTrace(e, hintfmt(fs, args...));
|
||||
addTrace(e, hintfmt(fs, args...));
|
||||
}
|
||||
|
||||
BaseError & addTrace(std::optional<ErrPos> e, hintformat hint);
|
||||
void addTrace(std::optional<ErrPos> e, hintformat hint);
|
||||
|
||||
bool hasTrace() const { return !err.traces.empty(); }
|
||||
};
|
||||
|
@ -190,7 +187,6 @@ public:
|
|||
{ \
|
||||
public: \
|
||||
using superClass::superClass; \
|
||||
virtual const char* sname() const override { return #newClass; } \
|
||||
}
|
||||
|
||||
MakeError(Error, BaseError);
|
||||
|
@ -209,8 +205,6 @@ public:
|
|||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||
}
|
||||
|
||||
virtual const char* sname() const override { return "SysError"; }
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ namespace nix {
|
|||
|
||||
std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
|
||||
{ Xp::CaDerivations, "ca-derivations" },
|
||||
{ Xp::ImpureDerivations, "impure-derivations" },
|
||||
{ Xp::Flakes, "flakes" },
|
||||
{ Xp::NixCommand, "nix-command" },
|
||||
{ Xp::RecursiveNix, "recursive-nix" },
|
||||
|
|
|
@ -16,6 +16,7 @@ namespace nix {
|
|||
enum struct ExperimentalFeature
|
||||
{
|
||||
CaDerivations,
|
||||
ImpureDerivations,
|
||||
Flakes,
|
||||
NixCommand,
|
||||
RecursiveNix,
|
||||
|
@ -48,10 +49,6 @@ public:
|
|||
ExperimentalFeature missingFeature;
|
||||
|
||||
MissingExperimentalFeature(ExperimentalFeature);
|
||||
virtual const char * sname() const override
|
||||
{
|
||||
return "MissingExperimentalFeature";
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_
|
|||
{
|
||||
bool isSRI = false;
|
||||
|
||||
// Parse the has type before the separater, if there was one.
|
||||
// Parse the hash type before the separator, if there was one.
|
||||
std::optional<HashType> optParsedType;
|
||||
{
|
||||
auto hashRaw = splitPrefixTo(rest, ':');
|
||||
|
|
|
@ -93,13 +93,11 @@ public:
|
|||
|
||||
std::string gitRev() const
|
||||
{
|
||||
assert(type == htSHA1);
|
||||
return to_string(Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
{
|
||||
assert(type == htSHA1);
|
||||
return std::string(to_string(Base16, false), 0, 7);
|
||||
}
|
||||
|
||||
|
|
|
@ -357,7 +357,7 @@ Sink & operator << (Sink & sink, const Error & ex)
|
|||
sink
|
||||
<< "Error"
|
||||
<< info.level
|
||||
<< info.name
|
||||
<< "Error" // removed
|
||||
<< info.msg.str()
|
||||
<< 0 // FIXME: info.errPos
|
||||
<< info.traces.size();
|
||||
|
@ -426,11 +426,10 @@ Error readError(Source & source)
|
|||
auto type = readString(source);
|
||||
assert(type == "Error");
|
||||
auto level = (Verbosity) readInt(source);
|
||||
auto name = readString(source);
|
||||
auto name = readString(source); // removed
|
||||
auto msg = readString(source);
|
||||
ErrorInfo info {
|
||||
.level = level,
|
||||
.name = name,
|
||||
.msg = hintformat(std::move(format("%s") % msg)),
|
||||
};
|
||||
auto havePos = readNum<size_t>(source);
|
||||
|
|
|
@ -178,7 +178,7 @@ namespace nix {
|
|||
}
|
||||
|
||||
TEST(parseURL, parseFileURLWithQueryAndFragment) {
|
||||
auto s = "file:///none/of/your/business";
|
||||
auto s = "file:///none/of//your/business";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
|
@ -186,7 +186,7 @@ namespace nix {
|
|||
.base = "",
|
||||
.scheme = "file",
|
||||
.authority = "",
|
||||
.path = "/none/of/your/business",
|
||||
.path = "/none/of//your/business",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
|
|
@ -18,7 +18,7 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod
|
|||
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
|
||||
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
|
||||
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
|
||||
const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
|
||||
const static std::string segmentRegex = "(?:" + pcharRegex + "*)";
|
||||
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
|
||||
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
|
||||
|
||||
|
|
|
@ -71,13 +71,11 @@ void clearEnv()
|
|||
unsetenv(name.first.c_str());
|
||||
}
|
||||
|
||||
void replaceEnv(std::map<std::string, std::string> newEnv)
|
||||
void replaceEnv(const std::map<std::string, std::string> & newEnv)
|
||||
{
|
||||
clearEnv();
|
||||
for (auto newEnvVar : newEnv)
|
||||
{
|
||||
for (auto & newEnvVar : newEnv)
|
||||
setenv(newEnvVar.first.c_str(), newEnvVar.second.c_str(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -200,6 +198,17 @@ std::string_view baseNameOf(std::string_view path)
|
|||
}
|
||||
|
||||
|
||||
std::string expandTilde(std::string_view path)
|
||||
{
|
||||
// TODO: expand ~user ?
|
||||
auto tilde = path.substr(0, 2);
|
||||
if (tilde == "~/" || tilde == "~")
|
||||
return getHome() + std::string(path.substr(1));
|
||||
else
|
||||
return std::string(path);
|
||||
}
|
||||
|
||||
|
||||
bool isInDir(std::string_view path, std::string_view dir)
|
||||
{
|
||||
return path.substr(0, 1) == "/"
|
||||
|
@ -215,6 +224,15 @@ bool isDirOrInDir(std::string_view path, std::string_view dir)
|
|||
}
|
||||
|
||||
|
||||
struct stat stat(const Path & path)
|
||||
{
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st))
|
||||
throw SysError("getting status of '%1%'", path);
|
||||
return st;
|
||||
}
|
||||
|
||||
|
||||
struct stat lstat(const Path & path)
|
||||
{
|
||||
struct stat st;
|
||||
|
@ -1261,9 +1279,9 @@ template<class C> C tokenizeString(std::string_view s, std::string_view separato
|
|||
{
|
||||
C result;
|
||||
auto pos = s.find_first_not_of(separators, 0);
|
||||
while (pos != std::string::npos) {
|
||||
while (pos != std::string_view::npos) {
|
||||
auto end = s.find_first_of(separators, pos + 1);
|
||||
if (end == std::string::npos) end = s.size();
|
||||
if (end == std::string_view::npos) end = s.size();
|
||||
result.insert(result.end(), std::string(s, pos, end - pos));
|
||||
pos = s.find_first_not_of(separators, end);
|
||||
}
|
||||
|
@ -1473,6 +1491,7 @@ constexpr char base64Chars[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuv
|
|||
std::string base64Encode(std::string_view s)
|
||||
{
|
||||
std::string res;
|
||||
res.reserve((s.size() + 2) / 3 * 4);
|
||||
int data = 0, nbits = 0;
|
||||
|
||||
for (char c : s) {
|
||||
|
@ -1504,6 +1523,9 @@ std::string base64Decode(std::string_view s)
|
|||
}();
|
||||
|
||||
std::string res;
|
||||
// Some sequences are missing the padding consisting of up to two '='.
|
||||
// vvv
|
||||
res.reserve((s.size() + 2) / 4 * 3);
|
||||
unsigned int d = 0, bits = 0;
|
||||
|
||||
for (char c : s) {
|
||||
|
@ -1690,7 +1712,9 @@ void setStackSize(size_t stackSize)
|
|||
#endif
|
||||
}
|
||||
|
||||
#if __linux__
|
||||
static AutoCloseFD fdSavedMountNamespace;
|
||||
#endif
|
||||
|
||||
void saveMountNamespace()
|
||||
{
|
||||
|
@ -1709,8 +1733,13 @@ void restoreMountNamespace()
|
|||
{
|
||||
#if __linux__
|
||||
try {
|
||||
auto savedCwd = absPath(".");
|
||||
|
||||
if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1)
|
||||
throw SysError("restoring parent mount namespace");
|
||||
if (chdir(savedCwd.c_str()) == -1) {
|
||||
throw SysError("restoring cwd");
|
||||
}
|
||||
} catch (Error & e) {
|
||||
debug(e.msg());
|
||||
}
|
||||
|
|
|
@ -68,6 +68,9 @@ Path dirOf(const PathView path);
|
|||
following the final `/' (trailing slashes are removed). */
|
||||
std::string_view baseNameOf(std::string_view path);
|
||||
|
||||
/* Perform tilde expansion on a path. */
|
||||
std::string expandTilde(std::string_view path);
|
||||
|
||||
/* Check whether 'path' is a descendant of 'dir'. Both paths must be
|
||||
canonicalized. */
|
||||
bool isInDir(std::string_view path, std::string_view dir);
|
||||
|
@ -77,6 +80,7 @@ bool isInDir(std::string_view path, std::string_view dir);
|
|||
bool isDirOrInDir(std::string_view path, std::string_view dir);
|
||||
|
||||
/* Get status of `path'. */
|
||||
struct stat stat(const Path & path);
|
||||
struct stat lstat(const Path & path);
|
||||
|
||||
/* Return true iff the given path exists. */
|
||||
|
|
|
@ -35,7 +35,7 @@ struct InstallableDerivedPath : Installable
|
|||
|
||||
/**
|
||||
* Return the rewrites that are needed to resolve a string whose context is
|
||||
* included in `dependencies`
|
||||
* included in `dependencies`.
|
||||
*/
|
||||
StringPairs resolveRewrites(Store & store, const BuiltPaths dependencies)
|
||||
{
|
||||
|
@ -51,7 +51,7 @@ StringPairs resolveRewrites(Store & store, const BuiltPaths dependencies)
|
|||
}
|
||||
|
||||
/**
|
||||
* Resolve the given string assuming the given context
|
||||
* Resolve the given string assuming the given context.
|
||||
*/
|
||||
std::string resolveString(Store & store, const std::string & toResolve, const BuiltPaths dependencies)
|
||||
{
|
||||
|
@ -61,14 +61,18 @@ std::string resolveString(Store & store, const std::string & toResolve, const Bu
|
|||
|
||||
UnresolvedApp Installable::toApp(EvalState & state)
|
||||
{
|
||||
auto [cursor, attrPath] = getCursor(state);
|
||||
auto cursor = getCursor(state);
|
||||
auto attrPath = cursor->getAttrPath();
|
||||
|
||||
auto type = cursor->getAttr("type")->getString();
|
||||
|
||||
std::string expected = !attrPath.empty() && attrPath[0] == "apps" ? "app" : "derivation";
|
||||
if (type != expected)
|
||||
throw Error("attribute '%s' should have type '%s'", cursor->getAttrPathStr(), expected);
|
||||
|
||||
if (type == "app") {
|
||||
auto [program, context] = cursor->getAttr("program")->getStringWithContext();
|
||||
|
||||
|
||||
std::vector<StorePathWithOutputs> context2;
|
||||
for (auto & [path, name] : context)
|
||||
context2.push_back({path, {name}});
|
||||
|
@ -101,7 +105,7 @@ UnresolvedApp Installable::toApp(EvalState & state)
|
|||
}
|
||||
|
||||
else
|
||||
throw Error("attribute '%s' has unsupported type '%s'", attrPath, type);
|
||||
throw Error("attribute '%s' has unsupported type '%s'", cursor->getAttrPathStr(), type);
|
||||
}
|
||||
|
||||
// FIXME: move to libcmd
|
||||
|
|
|
@ -9,7 +9,7 @@ using namespace nix;
|
|||
|
||||
struct CmdBundle : InstallableCommand
|
||||
{
|
||||
std::string bundler = "github:matthewbauer/nix-bundle";
|
||||
std::string bundler = "github:NixOS/bundlers";
|
||||
std::optional<Path> outLink;
|
||||
|
||||
CmdBundle()
|
||||
|
|
|
@ -204,10 +204,10 @@ static StorePath getDerivationEnvironment(ref<Store> store, ref<Store> evalStore
|
|||
output.second = DerivationOutput::Deferred { };
|
||||
drv.env[output.first] = "";
|
||||
}
|
||||
auto h0 = hashDerivationModulo(*evalStore, drv, true);
|
||||
const Hash & h = h0.requireNoFixedNonDeferred();
|
||||
auto hashesModulo = hashDerivationModulo(*evalStore, drv, true);
|
||||
|
||||
for (auto & output : drv.outputs) {
|
||||
Hash h = hashesModulo.hashes.at(output.first);
|
||||
auto outPath = store->makeOutputPath(output.first, h, drv.name);
|
||||
output.second = DerivationOutput::InputAddressed {
|
||||
.path = outPath,
|
||||
|
|
|
@ -24,12 +24,12 @@ std::string formatProtocol(unsigned int proto)
|
|||
}
|
||||
|
||||
bool checkPass(const std::string & msg) {
|
||||
logger->log(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg);
|
||||
notice(ANSI_GREEN "[PASS] " ANSI_NORMAL + msg);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool checkFail(const std::string & msg) {
|
||||
logger->log(ANSI_RED "[FAIL] " ANSI_NORMAL + msg);
|
||||
notice(ANSI_RED "[FAIL] " ANSI_NORMAL + msg);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ struct CmdEval : MixJSON, InstallableCommand
|
|||
std::optional<std::string> apply;
|
||||
std::optional<Path> writeTo;
|
||||
|
||||
CmdEval()
|
||||
CmdEval() : InstallableCommand(true /* supportReadOnlyMode */)
|
||||
{
|
||||
addFlag({
|
||||
.longName = "raw",
|
||||
|
|
|
@ -463,7 +463,7 @@ struct CmdFlakeCheck : FlakeCommand
|
|||
|
||||
for (auto & attr : *v.attrs) {
|
||||
std::string name(attr.name);
|
||||
if (name != "path" && name != "description")
|
||||
if (name != "path" && name != "description" && name != "welcomeText")
|
||||
throw Error("template '%s' has unsupported attribute '%s'", attrPath, name);
|
||||
}
|
||||
} catch (Error & e) {
|
||||
|
@ -508,6 +508,7 @@ struct CmdFlakeCheck : FlakeCommand
|
|||
name == "defaultBundler" ? "bundlers.<system>.default" :
|
||||
name == "overlay" ? "overlays.default" :
|
||||
name == "devShell" ? "devShells.<system>.default" :
|
||||
name == "nixosModule" ? "nixosModules.default" :
|
||||
"";
|
||||
if (replacement != "")
|
||||
warn("flake output attribute '%s' is deprecated; use '%s' instead", name, replacement);
|
||||
|
@ -527,6 +528,16 @@ struct CmdFlakeCheck : FlakeCommand
|
|||
}
|
||||
}
|
||||
|
||||
else if (name == "formatter") {
|
||||
state->forceAttrs(vOutput, pos);
|
||||
for (auto & attr : *vOutput.attrs) {
|
||||
checkSystemName(attr.name, *attr.pos);
|
||||
checkApp(
|
||||
fmt("%s.%s", name, attr.name),
|
||||
*attr.value, *attr.pos);
|
||||
}
|
||||
}
|
||||
|
||||
else if (name == "packages" || name == "devShells") {
|
||||
state->forceAttrs(vOutput, pos);
|
||||
for (auto & attr : *vOutput.attrs) {
|
||||
|
@ -704,7 +715,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
|||
defaultTemplateAttrPathsPrefixes,
|
||||
lockFlags);
|
||||
|
||||
auto [cursor, attrPath] = installable.getCursor(*evalState);
|
||||
auto cursor = installable.getCursor(*evalState);
|
||||
|
||||
auto templateDirAttr = cursor->getAttr("path");
|
||||
auto templateDir = templateDirAttr->getString();
|
||||
|
@ -1010,6 +1021,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
|||
|| (attrPath.size() == 1 && (
|
||||
attrPath[0] == "defaultPackage"
|
||||
|| attrPath[0] == "devShell"
|
||||
|| attrPath[0] == "formatter"
|
||||
|| attrPath[0] == "nixosConfigurations"
|
||||
|| attrPath[0] == "nixosModules"
|
||||
|| attrPath[0] == "defaultApp"
|
||||
|
@ -1026,7 +1038,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON
|
|||
}
|
||||
|
||||
else if (
|
||||
(attrPath.size() == 2 && (attrPath[0] == "defaultPackage" || attrPath[0] == "devShell"))
|
||||
(attrPath.size() == 2 && (attrPath[0] == "defaultPackage" || attrPath[0] == "devShell" || attrPath[0] == "formatter"))
|
||||
|| (attrPath.size() == 3 && (attrPath[0] == "checks" || attrPath[0] == "packages" || attrPath[0] == "devShells"))
|
||||
)
|
||||
{
|
||||
|
|
|
@ -177,8 +177,8 @@ Currently the `type` attribute can be one of the following:
|
|||
attribute `url`.
|
||||
|
||||
In URL form, the schema must be `http://`, `https://` or `file://`
|
||||
URLs and the extension must be `.zip`, `.tar`, `.tar.gz`, `.tar.xz`,
|
||||
`.tar.bz2` or `.tar.zst`.
|
||||
URLs and the extension must be `.zip`, `.tar`, `.tgz`, `.tar.gz`,
|
||||
`.tar.xz`, `.tar.bz2` or `.tar.zst`.
|
||||
|
||||
* `github`: A more efficient way to fetch repositories from
|
||||
GitHub. The following attributes are required:
|
||||
|
|
53
src/nix/fmt.cc
Normal file
53
src/nix/fmt.cc
Normal file
|
@ -0,0 +1,53 @@
|
|||
#include "command.hh"
|
||||
#include "run.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
struct CmdFmt : SourceExprCommand {
|
||||
std::vector<std::string> args;
|
||||
|
||||
CmdFmt() { expectArgs({.label = "args", .handler = {&args}}); }
|
||||
|
||||
std::string description() override {
|
||||
return "reformat your code in the standard style";
|
||||
}
|
||||
|
||||
std::string doc() override {
|
||||
return
|
||||
#include "fmt.md"
|
||||
;
|
||||
}
|
||||
|
||||
Category category() override { return catSecondary; }
|
||||
|
||||
Strings getDefaultFlakeAttrPaths() override {
|
||||
return Strings{"formatter." + settings.thisSystem.get()};
|
||||
}
|
||||
|
||||
Strings getDefaultFlakeAttrPathPrefixes() override { return Strings{}; }
|
||||
|
||||
void run(ref<Store> store) {
|
||||
auto evalState = getEvalState();
|
||||
auto evalStore = getEvalStore();
|
||||
|
||||
auto installable = parseInstallable(store, ".");
|
||||
auto app = installable->toApp(*evalState).resolve(evalStore, store);
|
||||
|
||||
Strings programArgs{app.program};
|
||||
|
||||
// Propagate arguments from the CLI
|
||||
if (args.empty()) {
|
||||
// Format the current flake out of the box
|
||||
programArgs.push_back(".");
|
||||
} else {
|
||||
// User wants more power, let them decide which paths to include/exclude
|
||||
for (auto &i : args) {
|
||||
programArgs.push_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
runProgramInStore(store, app.program, programArgs);
|
||||
};
|
||||
};
|
||||
|
||||
static auto r2 = registerCommand<CmdFmt>("fmt");
|
53
src/nix/fmt.md
Normal file
53
src/nix/fmt.md
Normal file
|
@ -0,0 +1,53 @@
|
|||
R""(
|
||||
|
||||
# Examples
|
||||
|
||||
With [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt):
|
||||
|
||||
```nix
|
||||
# flake.nix
|
||||
{
|
||||
outputs = { nixpkgs, self }: {
|
||||
formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.nixpkgs-fmt;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
- Format the current flake: `$ nix fmt`
|
||||
|
||||
- Format a specific folder or file: `$ nix fmt ./folder ./file.nix`
|
||||
|
||||
With [nixfmt](https://github.com/serokell/nixfmt):
|
||||
|
||||
```nix
|
||||
# flake.nix
|
||||
{
|
||||
outputs = { nixpkgs, self }: {
|
||||
formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.nixfmt;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
- Format specific files: `$ nix fmt ./file1.nix ./file2.nix`
|
||||
|
||||
With [Alejandra](https://github.com/kamadorueda/alejandra):
|
||||
|
||||
```nix
|
||||
# flake.nix
|
||||
{
|
||||
outputs = { nixpkgs, self }: {
|
||||
formatter.x86_64-linux = nixpkgs.legacyPackages.x86_64-linux.alejandra;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
- Format the current flake: `$ nix fmt`
|
||||
|
||||
- Format a specific folder or file: `$ nix fmt ./folder ./file.nix`
|
||||
|
||||
# Description
|
||||
|
||||
`nix fmt` will rewrite all Nix files (\*.nix) to a canonical format
|
||||
using the formatter specified in your flake.
|
||||
|
||||
)""
|
|
@ -62,22 +62,21 @@ struct ProfileElement
|
|||
return std::tuple(describe(), storePaths) < std::tuple(other.describe(), other.storePaths);
|
||||
}
|
||||
|
||||
void updateStorePaths(ref<Store> evalStore, ref<Store> store, Installable & installable)
|
||||
void updateStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
const BuiltPaths & builtPaths)
|
||||
{
|
||||
// FIXME: respect meta.outputsToInstall
|
||||
storePaths.clear();
|
||||
for (auto & buildable : getBuiltPaths(evalStore, store, installable.toDerivedPaths())) {
|
||||
for (auto & buildable : builtPaths) {
|
||||
std::visit(overloaded {
|
||||
[&](const BuiltPath::Opaque & bo) {
|
||||
storePaths.insert(bo.path);
|
||||
},
|
||||
[&](const BuiltPath::Built & bfd) {
|
||||
// TODO: Why are we querying if we know the output
|
||||
// names already? Is it just to figure out what the
|
||||
// default one is?
|
||||
for (auto & output : store->queryDerivationOutputMap(bfd.drvPath)) {
|
||||
for (auto & output : bfd.outputs)
|
||||
storePaths.insert(output.second);
|
||||
}
|
||||
},
|
||||
}, buildable.raw());
|
||||
}
|
||||
|
@ -98,19 +97,30 @@ struct ProfileManifest
|
|||
auto json = nlohmann::json::parse(readFile(manifestPath));
|
||||
|
||||
auto version = json.value("version", 0);
|
||||
if (version != 1)
|
||||
throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version);
|
||||
std::string sUrl;
|
||||
std::string sOriginalUrl;
|
||||
switch(version){
|
||||
case 1:
|
||||
sUrl = "uri";
|
||||
sOriginalUrl = "originalUri";
|
||||
break;
|
||||
case 2:
|
||||
sUrl = "url";
|
||||
sOriginalUrl = "originalUrl";
|
||||
break;
|
||||
default:
|
||||
throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version);
|
||||
}
|
||||
|
||||
for (auto & e : json["elements"]) {
|
||||
ProfileElement element;
|
||||
for (auto & p : e["storePaths"])
|
||||
element.storePaths.insert(state.store->parseStorePath((std::string) p));
|
||||
element.active = e["active"];
|
||||
if (e.value("uri", "") != "") {
|
||||
auto originalUrl = e.value("originalUrl", e["originalUri"]);
|
||||
if (e.value(sUrl,"") != "") {
|
||||
element.source = ProfileElementSource{
|
||||
parseFlakeRef(originalUrl),
|
||||
parseFlakeRef(e["uri"]),
|
||||
parseFlakeRef(e[sOriginalUrl]),
|
||||
parseFlakeRef(e[sUrl]),
|
||||
e["attrPath"]
|
||||
};
|
||||
}
|
||||
|
@ -145,13 +155,13 @@ struct ProfileManifest
|
|||
obj["active"] = element.active;
|
||||
if (element.source) {
|
||||
obj["originalUrl"] = element.source->originalRef.to_string();
|
||||
obj["uri"] = element.source->resolvedRef.to_string();
|
||||
obj["url"] = element.source->resolvedRef.to_string();
|
||||
obj["attrPath"] = element.source->attrPath;
|
||||
}
|
||||
array.push_back(obj);
|
||||
}
|
||||
nlohmann::json json;
|
||||
json["version"] = 1;
|
||||
json["version"] = 2;
|
||||
json["elements"] = array;
|
||||
return json.dump();
|
||||
}
|
||||
|
@ -245,6 +255,16 @@ struct ProfileManifest
|
|||
}
|
||||
};
|
||||
|
||||
static std::map<Installable *, BuiltPaths>
|
||||
builtPathsPerInstallable(
|
||||
const std::vector<std::pair<std::shared_ptr<Installable>, BuiltPath>> & builtPaths)
|
||||
{
|
||||
std::map<Installable *, BuiltPaths> res;
|
||||
for (auto & [installable, builtPath] : builtPaths)
|
||||
res[installable.get()].push_back(builtPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
||||
{
|
||||
std::string description() override
|
||||
|
@ -263,7 +283,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
|||
{
|
||||
ProfileManifest manifest(*getEvalState(), *profile);
|
||||
|
||||
auto builtPaths = Installable::build(getEvalStore(), store, Realise::Outputs, installables, bmNormal);
|
||||
auto builtPaths = builtPathsPerInstallable(
|
||||
Installable::build2(
|
||||
getEvalStore(), store, Realise::Outputs, installables, bmNormal));
|
||||
|
||||
for (auto & installable : installables) {
|
||||
ProfileElement element;
|
||||
|
@ -278,7 +300,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
|||
};
|
||||
}
|
||||
|
||||
element.updateStorePaths(getEvalStore(), store, *installable);
|
||||
element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]);
|
||||
|
||||
manifest.elements.push_back(std::move(element));
|
||||
}
|
||||
|
@ -466,12 +488,14 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
|||
warn ("Use 'nix profile list' to see the current profile.");
|
||||
}
|
||||
|
||||
auto builtPaths = Installable::build(getEvalStore(), store, Realise::Outputs, installables, bmNormal);
|
||||
auto builtPaths = builtPathsPerInstallable(
|
||||
Installable::build2(
|
||||
getEvalStore(), store, Realise::Outputs, installables, bmNormal));
|
||||
|
||||
for (size_t i = 0; i < installables.size(); ++i) {
|
||||
auto & installable = installables.at(i);
|
||||
auto & element = manifest.elements[indices.at(i)];
|
||||
element.updateStorePaths(getEvalStore(), store, *installable);
|
||||
element.updateStorePaths(getEvalStore(), store, builtPaths[installable.get()]);
|
||||
}
|
||||
|
||||
updateProfile(manifest.build(store));
|
||||
|
|
|
@ -38,9 +38,12 @@ void runProgramInStore(ref<Store> store,
|
|||
unshare(CLONE_NEWUSER) doesn't work in a multithreaded program
|
||||
(which "nix" is), so we exec() a single-threaded helper program
|
||||
(chrootHelper() below) to do the work. */
|
||||
auto store2 = store.dynamic_pointer_cast<LocalStore>();
|
||||
auto store2 = store.dynamic_pointer_cast<LocalFSStore>();
|
||||
|
||||
if (store2 && store->storeDir != store2->getRealStoreDir()) {
|
||||
if (!store2)
|
||||
throw Error("store '%s' is not a local store so it does not support command execution", store->getUri());
|
||||
|
||||
if (store->storeDir != store2->getRealStoreDir()) {
|
||||
Strings helperArgs = { chrootHelperName, store->storeDir, store2->getRealStoreDir(), program };
|
||||
for (auto & arg : args) helperArgs.push_back(arg);
|
||||
|
||||
|
@ -179,6 +182,7 @@ struct CmdRun : InstallableCommand
|
|||
{
|
||||
auto state = getEvalState();
|
||||
|
||||
lockFlags.applyNixConfig = true;
|
||||
auto app = installable->toApp(*state).resolve(getEvalStore(), store);
|
||||
|
||||
Strings allArgs{app.program};
|
||||
|
|
|
@ -165,8 +165,8 @@ struct CmdSearch : InstallableCommand, MixJSON
|
|||
}
|
||||
};
|
||||
|
||||
for (auto & [cursor, prefix] : installable->getCursors(*state))
|
||||
visit(*cursor, parseAttrPath(*state, prefix), true);
|
||||
for (auto & cursor : installable->getCursors(*state))
|
||||
visit(*cursor, cursor->getAttrPath(), true);
|
||||
|
||||
if (!json && !results)
|
||||
throw Error("no results for the given search term(s)!");
|
||||
|
|
|
@ -78,6 +78,10 @@ struct CmdShowDerivation : InstallablesCommand
|
|||
outputObj.attr("hashAlgo", makeContentAddressingPrefix(dof.method) + printHashType(dof.hashType));
|
||||
},
|
||||
[&](const DerivationOutput::Deferred &) {},
|
||||
[&](const DerivationOutput::Impure & doi) {
|
||||
outputObj.attr("hashAlgo", makeContentAddressingPrefix(doi.method) + printHashType(doi.hashType));
|
||||
outputObj.attr("impure", true);
|
||||
},
|
||||
}, output.raw());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,8 +64,7 @@ rec {
|
|||
dependentFixedOutput = mkDerivation {
|
||||
name = "dependent-fixed-output";
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
outputHash = "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=";
|
||||
outputHash = "sha512-7aJcmSuEuYP5tGKcmGY8bRr/lrCjJlOxP2mIUjO/vMQeg6gx/65IbzRWES8EKiPDOs9z+wF30lEfcwxM/cT4pw==";
|
||||
buildCommand = ''
|
||||
cat ${dependentCA}/dep
|
||||
echo foo > $out
|
||||
|
|
|
@ -56,3 +56,15 @@ nix copy --to file://$cacheDir $caPath
|
|||
fromPath = $caPath;
|
||||
}
|
||||
") = $caPath ]]
|
||||
|
||||
# Check that URL query parameters aren't allowed.
|
||||
clearStore
|
||||
narCache=$TEST_ROOT/nar-cache
|
||||
rm -rf $narCache
|
||||
(! nix eval -v --raw --expr "
|
||||
builtins.fetchClosure {
|
||||
fromStore = \"file://$cacheDir?local-nar-cache=$narCache\";
|
||||
fromPath = $caPath;
|
||||
}
|
||||
")
|
||||
(! [ -e $narCache ])
|
||||
|
|
|
@ -7,7 +7,9 @@ fi
|
|||
|
||||
clearStore
|
||||
|
||||
repo=$TEST_ROOT/git
|
||||
# Intentionally not in a canonical form
|
||||
# See https://github.com/NixOS/nix/issues/6195
|
||||
repo=$TEST_ROOT/./git
|
||||
|
||||
export _NIX_FORCE_HTTP=1
|
||||
|
||||
|
|
|
@ -7,7 +7,9 @@ fi
|
|||
|
||||
clearStore
|
||||
|
||||
repo=$TEST_ROOT/hg
|
||||
# Intentionally not in a canonical form
|
||||
# See https://github.com/NixOS/nix/issues/6195
|
||||
repo=$TEST_ROOT/./hg
|
||||
|
||||
rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix
|
||||
|
||||
|
@ -28,6 +30,12 @@ echo world > $repo/hello
|
|||
hg commit --cwd $repo -m 'Bla2'
|
||||
rev2=$(hg log --cwd $repo -r tip --template '{node}')
|
||||
|
||||
# Fetch an unclean branch.
|
||||
echo unclean > $repo/hello
|
||||
path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
|
||||
[[ $(cat $path/hello) = unclean ]]
|
||||
hg revert --cwd $repo --all
|
||||
|
||||
# Fetch the default branch.
|
||||
path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
|
||||
[[ $(cat $path/hello) = world ]]
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
source common.sh
|
||||
|
||||
touch foo -t 202211111111
|
||||
touch $TEST_ROOT/foo -t 202211111111
|
||||
# We only check whether 2022-11-1* **:**:** is the last modified date since
|
||||
# `lastModified` is transformed into UTC in `builtins.fetchTarball`.
|
||||
[[ "$(nix eval --impure --raw --expr "(builtins.fetchTree \"path://$PWD/foo\").lastModifiedDate")" =~ 2022111.* ]]
|
||||
[[ "$(nix eval --impure --raw --expr "(builtins.fetchTree \"path://$TEST_ROOT/foo\").lastModifiedDate")" =~ 2022111.* ]]
|
||||
|
|
29
tests/flakes-run.sh
Normal file
29
tests/flakes-run.sh
Normal file
|
@ -0,0 +1,29 @@
|
|||
source common.sh
|
||||
|
||||
clearStore
|
||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
||||
cp ./shell-hello.nix ./config.nix $TEST_HOME
|
||||
cd $TEST_HOME
|
||||
|
||||
cat <<EOF > flake.nix
|
||||
{
|
||||
outputs = {self}: {
|
||||
packages.$system.pkgAsPkg = (import ./shell-hello.nix).hello;
|
||||
packages.$system.appAsApp = self.packages.$system.appAsApp;
|
||||
|
||||
apps.$system.pkgAsApp = self.packages.$system.pkgAsPkg;
|
||||
apps.$system.appAsApp = {
|
||||
type = "app";
|
||||
program = "\${(import ./shell-hello.nix).hello}/bin/hello";
|
||||
};
|
||||
};
|
||||
}
|
||||
EOF
|
||||
nix run --no-write-lock-file .#appAsApp
|
||||
nix run --no-write-lock-file .#pkgAsPkg
|
||||
|
||||
! nix run --no-write-lock-file .#pkgAsApp || fail "'nix run' shouldn’t accept an 'app' defined under 'packages'"
|
||||
! nix run --no-write-lock-file .#appAsPkg || fail "elements of 'apps' should be of type 'app'"
|
||||
|
||||
clearStore
|
||||
|
|
@ -376,6 +376,9 @@ cat > $templatesDir/flake.nix <<EOF
|
|||
trivial = {
|
||||
path = ./trivial;
|
||||
description = "A trivial flake";
|
||||
welcomeText = ''
|
||||
Welcome to my trivial flake
|
||||
'';
|
||||
};
|
||||
default = trivial;
|
||||
};
|
||||
|
|
30
tests/fmt.sh
Normal file
30
tests/fmt.sh
Normal file
|
@ -0,0 +1,30 @@
|
|||
source common.sh
|
||||
|
||||
set -o pipefail
|
||||
|
||||
clearStore
|
||||
rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local
|
||||
|
||||
cp ./simple.nix ./simple.builder.sh ./fmt.simple.sh ./config.nix $TEST_HOME
|
||||
|
||||
cd $TEST_HOME
|
||||
|
||||
nix fmt --help | grep "Format"
|
||||
|
||||
cat << EOF > flake.nix
|
||||
{
|
||||
outputs = _: {
|
||||
formatter.$system =
|
||||
with import ./config.nix;
|
||||
mkDerivation {
|
||||
name = "formatter";
|
||||
buildCommand = "mkdir -p \$out/bin; cp \${./fmt.simple.sh} \$out/bin/formatter";
|
||||
};
|
||||
};
|
||||
}
|
||||
EOF
|
||||
nix fmt ./file ./folder | grep 'Formatting: ./file ./folder'
|
||||
nix flake check
|
||||
nix flake show | grep -P "package 'formatter'"
|
||||
|
||||
clearStore
|
1
tests/fmt.simple.sh
Executable file
1
tests/fmt.simple.sh
Executable file
|
@ -0,0 +1 @@
|
|||
echo Formatting: "${@}"
|
63
tests/impure-derivations.nix
Normal file
63
tests/impure-derivations.nix
Normal file
|
@ -0,0 +1,63 @@
|
|||
with import ./config.nix;
|
||||
|
||||
rec {
|
||||
|
||||
impure = mkDerivation {
|
||||
name = "impure";
|
||||
outputs = [ "out" "stuff" ];
|
||||
buildCommand =
|
||||
''
|
||||
echo impure
|
||||
x=$(< $TEST_ROOT/counter)
|
||||
mkdir $out $stuff
|
||||
echo $x > $out/n
|
||||
ln -s $out/n $stuff/bla
|
||||
printf $((x + 1)) > $TEST_ROOT/counter
|
||||
'';
|
||||
__impure = true;
|
||||
impureEnvVars = [ "TEST_ROOT" ];
|
||||
};
|
||||
|
||||
impureOnImpure = mkDerivation {
|
||||
name = "impure-on-impure";
|
||||
buildCommand =
|
||||
''
|
||||
echo impure-on-impure
|
||||
x=$(< ${impure}/n)
|
||||
mkdir $out
|
||||
printf X$x > $out/n
|
||||
ln -s ${impure.stuff} $out/symlink
|
||||
ln -s $out $out/self
|
||||
'';
|
||||
__impure = true;
|
||||
};
|
||||
|
||||
# This is not allowed.
|
||||
inputAddressed = mkDerivation {
|
||||
name = "input-addressed";
|
||||
buildCommand =
|
||||
''
|
||||
cat ${impure} > $out
|
||||
'';
|
||||
};
|
||||
|
||||
contentAddressed = mkDerivation {
|
||||
name = "content-addressed";
|
||||
buildCommand =
|
||||
''
|
||||
echo content-addressed
|
||||
x=$(< ${impureOnImpure}/n)
|
||||
printf ''${x:0:1} > $out
|
||||
'';
|
||||
outputHashMode = "recursive";
|
||||
outputHash = "sha256-eBYxcgkuWuiqs4cKNgKwkb3vY/HR0vVsJnqe8itJGcQ=";
|
||||
};
|
||||
|
||||
inputAddressedAfterCA = mkDerivation {
|
||||
name = "input-addressed-after-ca";
|
||||
buildCommand =
|
||||
''
|
||||
cat ${contentAddressed} > $out
|
||||
'';
|
||||
};
|
||||
}
|
57
tests/impure-derivations.sh
Normal file
57
tests/impure-derivations.sh
Normal file
|
@ -0,0 +1,57 @@
|
|||
source common.sh
|
||||
|
||||
requireDaemonNewerThan "2.8pre20220311"
|
||||
|
||||
enableFeatures "ca-derivations ca-references impure-derivations"
|
||||
restartDaemon
|
||||
|
||||
set -o pipefail
|
||||
|
||||
clearStore
|
||||
|
||||
# Basic test of impure derivations: building one a second time should not use the previous result.
|
||||
printf 0 > $TEST_ROOT/counter
|
||||
|
||||
json=$(nix build -L --no-link --json --file ./impure-derivations.nix impure.all)
|
||||
path1=$(echo $json | jq -r .[].outputs.out)
|
||||
path1_stuff=$(echo $json | jq -r .[].outputs.stuff)
|
||||
[[ $(< $path1/n) = 0 ]]
|
||||
[[ $(< $path1_stuff/bla) = 0 ]]
|
||||
|
||||
[[ $(nix path-info --json $path1 | jq .[].ca) =~ fixed:r:sha256: ]]
|
||||
|
||||
path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out)
|
||||
[[ $(< $path2/n) = 1 ]]
|
||||
|
||||
# Test impure derivations that depend on impure derivations.
|
||||
path3=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnImpure | jq -r .[].outputs.out)
|
||||
[[ $(< $path3/n) = X2 ]]
|
||||
|
||||
path4=$(nix build -L --no-link --json --file ./impure-derivations.nix impureOnImpure | jq -r .[].outputs.out)
|
||||
[[ $(< $path4/n) = X3 ]]
|
||||
|
||||
# Test that (self-)references work.
|
||||
[[ $(< $path4/symlink/bla) = 3 ]]
|
||||
[[ $(< $path4/self/n) = X3 ]]
|
||||
|
||||
# Input-addressed derivations cannot depend on impure derivations directly.
|
||||
(! nix build -L --no-link --json --file ./impure-derivations.nix inputAddressed 2>&1) | grep 'depends on impure derivation'
|
||||
|
||||
drvPath=$(nix eval --json --file ./impure-derivations.nix impure.drvPath | jq -r .)
|
||||
[[ $(nix show-derivation $drvPath | jq ".[\"$drvPath\"].outputs.out.impure") = true ]]
|
||||
[[ $(nix show-derivation $drvPath | jq ".[\"$drvPath\"].outputs.stuff.impure") = true ]]
|
||||
|
||||
# Fixed-output derivations *can* depend on impure derivations.
|
||||
path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out)
|
||||
[[ $(< $path5) = X ]]
|
||||
[[ $(< $TEST_ROOT/counter) = 5 ]]
|
||||
|
||||
# And they should not be rebuilt.
|
||||
path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAddressed | jq -r .[].outputs.out)
|
||||
[[ $(< $path5) = X ]]
|
||||
[[ $(< $TEST_ROOT/counter) = 5 ]]
|
||||
|
||||
# Input-addressed derivations can depend on fixed-output derivations that depend on impure derivations.
|
||||
path6=$(nix build -L --no-link --json --file ./impure-derivations.nix inputAddressedAfterCA | jq -r .[].outputs.out)
|
||||
[[ $(< $path6) = X ]]
|
||||
[[ $(< $TEST_ROOT/counter) = 5 ]]
|
|
@ -1,5 +1,6 @@
|
|||
nix_tests = \
|
||||
flakes.sh \
|
||||
flakes-run.sh \
|
||||
ca/gc.sh \
|
||||
gc.sh \
|
||||
remote-store.sh \
|
||||
|
@ -79,6 +80,7 @@ nix_tests = \
|
|||
post-hook.sh \
|
||||
function-trace.sh \
|
||||
flake-local-settings.sh \
|
||||
fmt.sh \
|
||||
eval-store.sh \
|
||||
why-depends.sh \
|
||||
import-derivation.sh \
|
||||
|
@ -98,7 +100,8 @@ nix_tests = \
|
|||
nix-profile.sh \
|
||||
suggestions.sh \
|
||||
store-ping.sh \
|
||||
fetchClosure.sh
|
||||
fetchClosure.sh \
|
||||
impure-derivations.sh
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
nix_tests += compute-levels.sh
|
||||
|
|
Loading…
Reference in a new issue