forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into trustless-remote-builder-simple
This commit is contained in:
commit
b0d52855dc
35
.github/STALE-BOT.md
vendored
Normal file
35
.github/STALE-BOT.md
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Stale bot information
|
||||
|
||||
- Thanks for your contribution!
|
||||
- To remove the stale label, just leave a new comment.
|
||||
- _How to find the right people to ping?_ → [`git blame`](https://git-scm.com/docs/git-blame) to the rescue! (or GitHub's history and blame buttons.)
|
||||
- You can always ask for help on [our Discourse Forum](https://discourse.nixos.org/) or on the [#nixos IRC channel](https://webchat.freenode.net/#nixos).
|
||||
|
||||
## Suggestions for PRs
|
||||
|
||||
1. GitHub sometimes doesn't notify people who commented / reviewed a PR previously, when you (force) push commits. If you have addressed the reviews you can [officially ask for a review](https://docs.github.com/en/free-pro-team@latest/github/collaborating-with-issues-and-pull-requests/requesting-a-pull-request-review) from those who commented to you or anyone else.
|
||||
2. If it is unfinished but you plan to finish it, please mark it as a draft.
|
||||
3. If you don't expect to work on it any time soon, closing it with a short comment may encourage someone else to pick up your work.
|
||||
4. To get things rolling again, rebase the PR against the target branch and address valid comments.
|
||||
5. If you need a review to move forward, ask in [the Discourse thread for PRs that need help](https://discourse.nixos.org/t/prs-in-distress/3604).
|
||||
6. If all you need is a merge, check the git history to find and [request reviews](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/requesting-a-pull-request-review) from people who usually merge related contributions.
|
||||
|
||||
## Suggestions for issues
|
||||
|
||||
1. If it is resolved (either for you personally, or in general), please consider closing it.
|
||||
2. If this might still be an issue, but you are not interested in promoting its resolution, please consider closing it while encouraging others to take over and reopen an issue if they care enough.
|
||||
3. If you still have interest in resolving it, try to ping somebody who you believe might have an interest in the topic. Consider discussing the problem in [our Discourse Forum](https://discourse.nixos.org/).
|
||||
4. As with all open source projects, your best option is to submit a Pull Request that addresses this issue. We :heart: this attitude!
|
||||
|
||||
**Memorandum on closing issues**
|
||||
|
||||
Don't be afraid to close an issue that holds valuable information. Closed issues stay in the system for people to search, read, cross-reference, or even reopen--nothing is lost! Closing obsolete issues is an important way to help maintainers focus their time and effort.
|
||||
|
||||
## Useful GitHub search queries
|
||||
|
||||
- [Open PRs with any stale-bot interaction](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+commenter%3Aapp%2Fstale+)
|
||||
- [Open PRs with any stale-bot interaction and `stale`](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+commenter%3Aapp%2Fstale+label%3A%22stale%22)
|
||||
- [Open PRs with any stale-bot interaction and NOT `stale`](https://github.com/NixOS/nix/pulls?q=is%3Apr+is%3Aopen+commenter%3Aapp%2Fstale+-label%3A%22stale%22+)
|
||||
- [Open Issues with any stale-bot interaction](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+commenter%3Aapp%2Fstale+)
|
||||
- [Open Issues with any stale-bot interaction and `stale`](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+commenter%3Aapp%2Fstale+label%3A%22stale%22+)
|
||||
- [Open Issues with any stale-bot interaction and NOT `stale`](https://github.com/NixOS/nix/issues?q=is%3Aissue+is%3Aopen+commenter%3Aapp%2Fstale+-label%3A%22stale%22+)
|
10
.github/stale.yml
vendored
Normal file
10
.github/stale.yml
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
daysUntilStale: 180
|
||||
daysUntilClose: 365
|
||||
exemptLabels:
|
||||
- "critical"
|
||||
staleLabel: "stale"
|
||||
markComment: |
|
||||
I marked this as stale due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
|
||||
closeComment: |
|
||||
I closed this issue due to inactivity. → [More info](https://github.com/NixOS/nix/blob/master/.github/STALE-BOT.md)
|
1
Makefile
1
Makefile
|
@ -7,6 +7,7 @@ makefiles = \
|
|||
src/libfetchers/local.mk \
|
||||
src/libmain/local.mk \
|
||||
src/libexpr/local.mk \
|
||||
src/libcmd/local.mk \
|
||||
src/nix/local.mk \
|
||||
src/resolve-system-dependencies/local.mk \
|
||||
scripts/local.mk \
|
||||
|
|
|
@ -13,41 +13,64 @@ let
|
|||
+ showSynopsis { inherit command; args = def.args; }
|
||||
+ (if def.commands or {} != {}
|
||||
then
|
||||
let
|
||||
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
|
||||
listCommands = cmds:
|
||||
concatStrings (map (name:
|
||||
"* [`${command} ${name}`](./${appendName filename name}.md) - ${cmds.${name}.description}\n")
|
||||
(attrNames cmds));
|
||||
in
|
||||
"where *subcommand* is one of the following:\n\n"
|
||||
# FIXME: group by category
|
||||
+ concatStrings (map (name:
|
||||
"* [`${command} ${name}`](./${appendName filename name}.md) - ${def.commands.${name}.description}\n")
|
||||
(attrNames def.commands))
|
||||
+ "\n"
|
||||
+ (if length categories > 1
|
||||
then
|
||||
concatStrings (map
|
||||
(cat:
|
||||
"**${toString cat.description}:**\n\n"
|
||||
+ listCommands (filterAttrs (n: v: v.category == cat) def.commands)
|
||||
+ "\n"
|
||||
) categories)
|
||||
+ "\n"
|
||||
else
|
||||
listCommands def.commands
|
||||
+ "\n")
|
||||
else "")
|
||||
+ (if def ? doc
|
||||
then def.doc + "\n\n"
|
||||
else "")
|
||||
+ (let s = showFlags def.flags; in
|
||||
+ (let s = showOptions def.flags; in
|
||||
if s != ""
|
||||
then "# Flags\n\n${s}"
|
||||
then "# Options\n\n${s}"
|
||||
else "")
|
||||
;
|
||||
|
||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||
|
||||
showFlags = flags:
|
||||
concatStrings
|
||||
(map (longName:
|
||||
let flag = flags.${longName}; in
|
||||
if flag.category or "" != "config"
|
||||
then
|
||||
" - `--${longName}`"
|
||||
+ (if flag ? shortName then " / `-${flag.shortName}`" else "")
|
||||
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
|
||||
+ " \n"
|
||||
+ " " + flag.description + "\n\n"
|
||||
else "")
|
||||
(attrNames flags));
|
||||
showOptions = flags:
|
||||
let
|
||||
categories = sort builtins.lessThan (unique (map (cmd: cmd.category) (attrValues flags)));
|
||||
in
|
||||
concatStrings (map
|
||||
(cat:
|
||||
(if cat != ""
|
||||
then "**${cat}:**\n\n"
|
||||
else "")
|
||||
+ concatStrings
|
||||
(map (longName:
|
||||
let
|
||||
flag = flags.${longName};
|
||||
in
|
||||
" - `--${longName}`"
|
||||
+ (if flag ? shortName then " / `-${flag.shortName}`" else "")
|
||||
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
|
||||
+ " \n"
|
||||
+ " " + flag.description + "\n\n"
|
||||
) (attrNames (filterAttrs (n: v: v.category == cat) flags))))
|
||||
categories);
|
||||
|
||||
showSynopsis =
|
||||
{ command, args }:
|
||||
"`${command}` [*flags*...] ${concatStringsSep " "
|
||||
"`${command}` [*option*...] ${concatStringsSep " "
|
||||
(map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
|
||||
|
||||
processCommand = { command, def, filename }:
|
||||
|
|
|
@ -25,5 +25,4 @@ order of precedence (from strongest to weakest binding).
|
|||
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
||||
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
||||
| Logical OR | *e1* `\|\|` *e2* | left | Logical OR. | 13 |
|
||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to `!e1 \|\|
|
||||
e2`). | 14 |
|
||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to `!e1 \|\| e2`). | 14 |
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
with builtins;
|
||||
|
||||
{
|
||||
rec {
|
||||
splitLines = s: filter (x: !isList x) (split "\n" s);
|
||||
|
||||
concatStrings = concatStringsSep "";
|
||||
|
||||
# FIXME: O(n^2)
|
||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
||||
|
||||
nameValuePair = name: value: { inherit name value; };
|
||||
|
||||
filterAttrs = pred: set:
|
||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
||||
}
|
||||
|
|
17
flake.lock
17
flake.lock
|
@ -1,21 +1,5 @@
|
|||
{
|
||||
"nodes": {
|
||||
"lowdown-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1598695561,
|
||||
"narHash": "sha256-gyH/5j+h/nWw0W8AcR2WKvNBUsiQ7QuxqSJNXAwV+8E=",
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"rev": "1705b4a26fbf065d9574dce47a94e8c7c79e052f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "kristapsdz",
|
||||
"repo": "lowdown",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1602702596,
|
||||
|
@ -33,7 +17,6 @@
|
|||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"lowdown-src": "lowdown-src",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
|
|
21
flake.nix
21
flake.nix
|
@ -2,9 +2,9 @@
|
|||
description = "The purely functional package manager";
|
||||
|
||||
inputs.nixpkgs.url = "nixpkgs/nixos-20.09-small";
|
||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||
#inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||
|
||||
outputs = { self, nixpkgs, lowdown-src }:
|
||||
outputs = { self, nixpkgs }:
|
||||
|
||||
let
|
||||
|
||||
|
@ -115,7 +115,7 @@
|
|||
# 'nix.perl-bindings' packages.
|
||||
overlay = final: prev: {
|
||||
|
||||
nix = with final; with commonDeps pkgs; (stdenv.mkDerivation {
|
||||
nix = with final; with commonDeps pkgs; stdenv.mkDerivation {
|
||||
name = "nix-${version}";
|
||||
inherit version;
|
||||
|
||||
|
@ -163,9 +163,8 @@
|
|||
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||
|
||||
separateDebugInfo = true;
|
||||
}) // {
|
||||
|
||||
perl-bindings = with final; stdenv.mkDerivation {
|
||||
passthru.perl-bindings = with final; stdenv.mkDerivation {
|
||||
name = "nix-perl-${version}";
|
||||
|
||||
src = self;
|
||||
|
@ -199,17 +198,15 @@
|
|||
|
||||
};
|
||||
|
||||
lowdown = with final; stdenv.mkDerivation {
|
||||
name = "lowdown-0.7.1";
|
||||
lowdown = with final; stdenv.mkDerivation rec {
|
||||
name = "lowdown-0.8.0";
|
||||
|
||||
/*
|
||||
src = fetchurl {
|
||||
url = https://kristaps.bsd.lv/lowdown/snapshots/lowdown-0.7.1.tar.gz;
|
||||
hash = "sha512-1daoAQfYD0LdhK6aFhrSQvadjc5GsSPBZw0fJDb+BEHYMBLjqiUl2A7H8N+l0W4YfGKqbsPYSrCy4vct+7U6FQ==";
|
||||
url = "https://kristaps.bsd.lv/lowdown/snapshots/${name}.tar.gz";
|
||||
hash = "sha512-U9WeGoInT9vrawwa57t6u9dEdRge4/P+0wLxmQyOL9nhzOEUU2FRz2Be9H0dCjYE7p2v3vCXIYk40M+jjULATw==";
|
||||
};
|
||||
*/
|
||||
|
||||
src = lowdown-src;
|
||||
#src = lowdown-src;
|
||||
|
||||
outputs = [ "out" "bin" "dev" ];
|
||||
|
||||
|
|
|
@ -166,9 +166,15 @@ fi
|
|||
mkdir -p $dest/store
|
||||
|
||||
printf "copying Nix to %s..." "${dest}/store" >&2
|
||||
# Insert a newline if no progress is shown.
|
||||
if [ ! -t 0 ]; then
|
||||
echo ""
|
||||
fi
|
||||
|
||||
for i in $(cd "$self/store" >/dev/null && echo ./*); do
|
||||
printf "." >&2
|
||||
if [ -t 0 ]; then
|
||||
printf "." >&2
|
||||
fi
|
||||
i_tmp="$dest/store/$i.$$"
|
||||
if [ -e "$i_tmp" ]; then
|
||||
rm -rf "$i_tmp"
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "derivations.hh"
|
||||
#include "local-store.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
|
@ -71,11 +71,15 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
initPlugins();
|
||||
|
||||
auto store = openStore().cast<LocalStore>();
|
||||
auto store = openStore();
|
||||
|
||||
/* It would be more appropriate to use $XDG_RUNTIME_DIR, since
|
||||
that gets cleared on reboot, but it wouldn't work on macOS. */
|
||||
currentLoad = store->stateDir + "/current-load";
|
||||
auto currentLoadName = "/current-load";
|
||||
if (auto localStore = store.dynamic_pointer_cast<LocalFSStore>())
|
||||
currentLoad = std::string { localStore->stateDir } + currentLoadName;
|
||||
else
|
||||
currentLoad = settings.nixStateDir + currentLoadName;
|
||||
|
||||
std::shared_ptr<Store> sshStore;
|
||||
AutoCloseFD bestSlotLock;
|
||||
|
@ -172,13 +176,14 @@ static int main_build_remote(int argc, char * * argv)
|
|||
else
|
||||
{
|
||||
// build the hint template.
|
||||
string hintstring = "derivation: %s\nrequired (system, features): (%s, %s)";
|
||||
hintstring += "\n%s available machines:";
|
||||
hintstring += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
|
||||
string errorText =
|
||||
"Failed to find a machine for remote build!\n"
|
||||
"derivation: %s\nrequired (system, features): (%s, %s)";
|
||||
errorText += "\n%s available machines:";
|
||||
errorText += "\n(systems, maxjobs, supportedFeatures, mandatoryFeatures)";
|
||||
|
||||
for (unsigned int i = 0; i < machines.size(); ++i) {
|
||||
hintstring += "\n(%s, %s, %s, %s)";
|
||||
}
|
||||
for (unsigned int i = 0; i < machines.size(); ++i)
|
||||
errorText += "\n(%s, %s, %s, %s)";
|
||||
|
||||
// add the template values.
|
||||
string drvstr;
|
||||
|
@ -187,25 +192,21 @@ static int main_build_remote(int argc, char * * argv)
|
|||
else
|
||||
drvstr = "<unknown>";
|
||||
|
||||
auto hint = hintformat(hintstring);
|
||||
hint
|
||||
% drvstr
|
||||
% neededSystem
|
||||
% concatStringsSep<StringSet>(", ", requiredFeatures)
|
||||
% machines.size();
|
||||
auto error = hintformat(errorText);
|
||||
error
|
||||
% drvstr
|
||||
% neededSystem
|
||||
% concatStringsSep<StringSet>(", ", requiredFeatures)
|
||||
% machines.size();
|
||||
|
||||
for (auto & m : machines) {
|
||||
hint % concatStringsSep<vector<string>>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
}
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<vector<string>>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
||||
logErrorInfo(canBuildLocally ? lvlChatty : lvlWarn, {
|
||||
.name = "Remote build",
|
||||
.description = "Failed to find a machine for remote build!",
|
||||
.hint = hint
|
||||
});
|
||||
printMsg(canBuildLocally ? lvlChatty : lvlWarn, error);
|
||||
|
||||
std::cerr << "# decline\n";
|
||||
}
|
||||
|
@ -230,12 +231,9 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
} catch (std::exception & e) {
|
||||
auto msg = chomp(drainFD(5, false));
|
||||
logError({
|
||||
.name = "Remote build",
|
||||
.hint = hintfmt("cannot build on '%s': %s%s",
|
||||
bestMachine->storeUri, e.what(),
|
||||
(msg.empty() ? "" : ": " + msg))
|
||||
});
|
||||
printError("cannot build on '%s': %s%s",
|
||||
bestMachine->storeUri, e.what(),
|
||||
msg.empty() ? "" : ": " + msg);
|
||||
bestMachine->enabled = false;
|
||||
continue;
|
||||
}
|
||||
|
@ -298,10 +296,11 @@ connected:
|
|||
|
||||
if (!missing.empty()) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
|
||||
for (auto & i : missing)
|
||||
store->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
|
||||
if (auto localStore = store.dynamic_pointer_cast<LocalStore>())
|
||||
for (auto & i : missing)
|
||||
localStore->locksHeld.insert(store->printStorePath(i)); /* FIXME: ugly */
|
||||
/* No `copyStorePathImpl` because we always trust ourselves. */
|
||||
copyPaths(sshStore2, store, missing, NoRepair, NoCheckSigs, NoSubstitute);
|
||||
copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
|
|
@ -27,11 +27,6 @@ nix::Commands RegisterCommand::getCommandsFor(const std::vector<std::string> & p
|
|||
return res;
|
||||
}
|
||||
|
||||
void NixMultiCommand::printHelp(const string & programName, std::ostream & out)
|
||||
{
|
||||
MultiCommand::printHelp(programName, out);
|
||||
}
|
||||
|
||||
nlohmann::json NixMultiCommand::toJSON()
|
||||
{
|
||||
// FIXME: use Command::toJSON() as well.
|
||||
|
@ -59,13 +54,14 @@ void StoreCommand::run()
|
|||
run(getStore());
|
||||
}
|
||||
|
||||
StorePathsCommand::StorePathsCommand(bool recursive)
|
||||
RealisedPathsCommand::RealisedPathsCommand(bool recursive)
|
||||
: recursive(recursive)
|
||||
{
|
||||
if (recursive)
|
||||
addFlag({
|
||||
.longName = "no-recursive",
|
||||
.description = "Apply operation to specified paths only.",
|
||||
.category = installablesCategory,
|
||||
.handler = {&this->recursive, false},
|
||||
});
|
||||
else
|
||||
|
@ -73,36 +69,52 @@ StorePathsCommand::StorePathsCommand(bool recursive)
|
|||
.longName = "recursive",
|
||||
.shortName = 'r',
|
||||
.description = "Apply operation to closure of the specified paths.",
|
||||
.category = installablesCategory,
|
||||
.handler = {&this->recursive, true},
|
||||
});
|
||||
|
||||
mkFlag(0, "all", "Apply the operation to every store path.", &all);
|
||||
addFlag({
|
||||
.longName = "all",
|
||||
.description = "Apply the operation to every store path.",
|
||||
.category = installablesCategory,
|
||||
.handler = {&all, true},
|
||||
});
|
||||
}
|
||||
|
||||
void StorePathsCommand::run(ref<Store> store)
|
||||
void RealisedPathsCommand::run(ref<Store> store)
|
||||
{
|
||||
StorePaths storePaths;
|
||||
|
||||
std::vector<RealisedPath> paths;
|
||||
if (all) {
|
||||
if (installables.size())
|
||||
throw UsageError("'--all' does not expect arguments");
|
||||
// XXX: Only uses opaque paths, ignores all the realisations
|
||||
for (auto & p : store->queryAllValidPaths())
|
||||
storePaths.push_back(p);
|
||||
}
|
||||
|
||||
else {
|
||||
for (auto & p : toStorePaths(store, realiseMode, operateOn, installables))
|
||||
storePaths.push_back(p);
|
||||
|
||||
paths.push_back(p);
|
||||
} else {
|
||||
auto pathSet = toRealisedPaths(store, realiseMode, operateOn, installables);
|
||||
if (recursive) {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(StorePathSet(storePaths.begin(), storePaths.end()), closure, false, false);
|
||||
storePaths.clear();
|
||||
for (auto & p : closure)
|
||||
storePaths.push_back(p);
|
||||
auto roots = std::move(pathSet);
|
||||
pathSet = {};
|
||||
RealisedPath::closure(*store, roots, pathSet);
|
||||
}
|
||||
for (auto & path : pathSet)
|
||||
paths.push_back(path);
|
||||
}
|
||||
|
||||
run(store, std::move(paths));
|
||||
}
|
||||
|
||||
StorePathsCommand::StorePathsCommand(bool recursive)
|
||||
: RealisedPathsCommand(recursive)
|
||||
{
|
||||
}
|
||||
|
||||
void StorePathsCommand::run(ref<Store> store, std::vector<RealisedPath> paths)
|
||||
{
|
||||
StorePaths storePaths;
|
||||
for (auto & p : paths)
|
||||
storePaths.push_back(p.path());
|
||||
|
||||
run(store, std::move(storePaths));
|
||||
}
|
||||
|
|
@ -23,10 +23,10 @@ static constexpr Command::Category catSecondary = 100;
|
|||
static constexpr Command::Category catUtility = 101;
|
||||
static constexpr Command::Category catNixInstallation = 102;
|
||||
|
||||
static constexpr auto installablesCategory = "Options that change the interpretation of installables";
|
||||
|
||||
struct NixMultiCommand : virtual MultiCommand, virtual Command
|
||||
{
|
||||
void printHelp(const string & programName, std::ostream & out) override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
};
|
||||
|
||||
|
@ -141,7 +141,7 @@ private:
|
|||
};
|
||||
|
||||
/* A command that operates on zero or more store paths. */
|
||||
struct StorePathsCommand : public InstallablesCommand
|
||||
struct RealisedPathsCommand : public InstallablesCommand
|
||||
{
|
||||
private:
|
||||
|
||||
|
@ -154,17 +154,28 @@ protected:
|
|||
|
||||
public:
|
||||
|
||||
StorePathsCommand(bool recursive = false);
|
||||
RealisedPathsCommand(bool recursive = false);
|
||||
|
||||
using StoreCommand::run;
|
||||
|
||||
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
|
||||
virtual void run(ref<Store> store, std::vector<RealisedPath> paths) = 0;
|
||||
|
||||
void run(ref<Store> store) override;
|
||||
|
||||
bool useDefaultInstallables() override { return !all; }
|
||||
};
|
||||
|
||||
struct StorePathsCommand : public RealisedPathsCommand
|
||||
{
|
||||
StorePathsCommand(bool recursive = false);
|
||||
|
||||
using RealisedPathsCommand::run;
|
||||
|
||||
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
|
||||
|
||||
void run(ref<Store> store, std::vector<RealisedPath> paths) override;
|
||||
};
|
||||
|
||||
/* A command that operates on exactly one store path. */
|
||||
struct StorePathCommand : public InstallablesCommand
|
||||
{
|
||||
|
@ -218,6 +229,12 @@ std::set<StorePath> toDerivations(ref<Store> store,
|
|||
std::vector<std::shared_ptr<Installable>> installables,
|
||||
bool useDeriver = false);
|
||||
|
||||
std::set<RealisedPath> toRealisedPaths(
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
std::vector<std::shared_ptr<Installable>> installables);
|
||||
|
||||
/* Helper function to generate args that invoke $EDITOR on
|
||||
filename:lineno. */
|
||||
Strings editorFor(const Pos & pos);
|
|
@ -58,39 +58,47 @@ void completeFlakeInputPath(
|
|||
|
||||
MixFlakeOptions::MixFlakeOptions()
|
||||
{
|
||||
auto category = "Common flake-related options";
|
||||
|
||||
addFlag({
|
||||
.longName = "recreate-lock-file",
|
||||
.description = "Recreate the flake's lock file from scratch.",
|
||||
.category = category,
|
||||
.handler = {&lockFlags.recreateLockFile, true}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "no-update-lock-file",
|
||||
.description = "Do not allow any updates to the flake's lock file.",
|
||||
.category = category,
|
||||
.handler = {&lockFlags.updateLockFile, false}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "no-write-lock-file",
|
||||
.description = "Do not write the flake's newly generated lock file.",
|
||||
.category = category,
|
||||
.handler = {&lockFlags.writeLockFile, false}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "no-registries",
|
||||
.description = "Don't allow lookups in the flake registries.",
|
||||
.category = category,
|
||||
.handler = {&lockFlags.useRegistries, false}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "commit-lock-file",
|
||||
.description = "Commit changes to the flake's lock file.",
|
||||
.category = category,
|
||||
.handler = {&lockFlags.commitLockFile, true}
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "update-input",
|
||||
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
|
||||
.category = category,
|
||||
.labels = {"input-path"},
|
||||
.handler = {[&](std::string s) {
|
||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||
|
@ -104,6 +112,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
addFlag({
|
||||
.longName = "override-input",
|
||||
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`).",
|
||||
.category = category,
|
||||
.labels = {"input-path", "flake-url"},
|
||||
.handler = {[&](std::string inputPath, std::string flakeRef) {
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
|
@ -115,6 +124,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
addFlag({
|
||||
.longName = "inputs-from",
|
||||
.description = "Use the inputs of the specified flake as registry entries.",
|
||||
.category = category,
|
||||
.labels = {"flake-url"},
|
||||
.handler = {[&](std::string flakeRef) {
|
||||
auto evalState = getEvalState();
|
||||
|
@ -144,6 +154,7 @@ SourceExprCommand::SourceExprCommand()
|
|||
.longName = "file",
|
||||
.shortName = 'f',
|
||||
.description = "Interpret installables as attribute paths relative to the Nix expression stored in *file*.",
|
||||
.category = installablesCategory,
|
||||
.labels = {"file"},
|
||||
.handler = {&file},
|
||||
.completer = completePath
|
||||
|
@ -152,6 +163,7 @@ SourceExprCommand::SourceExprCommand()
|
|||
addFlag({
|
||||
.longName = "expr",
|
||||
.description = "Interpret installables as attribute paths relative to the Nix expression *expr*.",
|
||||
.category = installablesCategory,
|
||||
.labels = {"expr"},
|
||||
.handler = {&expr}
|
||||
});
|
||||
|
@ -159,6 +171,7 @@ SourceExprCommand::SourceExprCommand()
|
|||
addFlag({
|
||||
.longName = "derivation",
|
||||
.description = "Operate on the store derivation rather than its outputs.",
|
||||
.category = installablesCategory,
|
||||
.handler = {&operateOn, OperateOn::Derivation},
|
||||
});
|
||||
}
|
||||
|
@ -691,23 +704,42 @@ Buildables build(ref<Store> store, Realise mode,
|
|||
return buildables;
|
||||
}
|
||||
|
||||
StorePathSet toStorePaths(ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
std::set<RealisedPath> toRealisedPaths(
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
std::vector<std::shared_ptr<Installable>> installables)
|
||||
{
|
||||
StorePathSet outPaths;
|
||||
|
||||
std::set<RealisedPath> res;
|
||||
if (operateOn == OperateOn::Output) {
|
||||
for (auto & b : build(store, mode, installables))
|
||||
std::visit(overloaded {
|
||||
[&](BuildableOpaque bo) {
|
||||
outPaths.insert(bo.path);
|
||||
res.insert(bo.path);
|
||||
},
|
||||
[&](BuildableFromDrv bfd) {
|
||||
auto drv = store->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*store, drv);
|
||||
for (auto & output : bfd.outputs) {
|
||||
if (!output.second)
|
||||
throw Error("Cannot operate on output of unbuilt CA drv");
|
||||
outPaths.insert(*output.second);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (!outputHashes.count(output.first))
|
||||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath),
|
||||
output.first);
|
||||
auto outputId = DrvOutput{outputHashes.at(output.first), output.first};
|
||||
auto realisation = store->queryRealisation(outputId);
|
||||
if (!realisation)
|
||||
throw Error("cannot operate on an output of unbuilt content-addresed derivation '%s'", outputId.to_string());
|
||||
res.insert(RealisedPath{*realisation});
|
||||
}
|
||||
else {
|
||||
// If ca-derivations isn't enabled, behave as if
|
||||
// all the paths are opaque to keep the default
|
||||
// behavior
|
||||
assert(output.second);
|
||||
res.insert(*output.second);
|
||||
}
|
||||
}
|
||||
},
|
||||
}, b);
|
||||
|
@ -718,9 +750,19 @@ StorePathSet toStorePaths(ref<Store> store,
|
|||
for (auto & i : installables)
|
||||
for (auto & b : i->toBuildables())
|
||||
if (auto bfd = std::get_if<BuildableFromDrv>(&b))
|
||||
outPaths.insert(bfd->drvPath);
|
||||
res.insert(bfd->drvPath);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
StorePathSet toStorePaths(ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
std::vector<std::shared_ptr<Installable>> installables)
|
||||
{
|
||||
StorePathSet outPaths;
|
||||
for (auto & path : toRealisedPaths(store, mode, operateOn, installables))
|
||||
outPaths.insert(path.path());
|
||||
return outPaths;
|
||||
}
|
||||
|
15
src/libcmd/local.mk
Normal file
15
src/libcmd/local.mk
Normal file
|
@ -0,0 +1,15 @@
|
|||
libraries += libcmd
|
||||
|
||||
libcmd_NAME = libnixcmd
|
||||
|
||||
libcmd_DIR := $(d)
|
||||
|
||||
libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
||||
|
||||
libcmd_LDFLAGS = -llowdown
|
||||
|
||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(prefix)/lib/pkgconfig, 0644))
|
|
@ -3,9 +3,7 @@
|
|||
#include "finally.hh"
|
||||
|
||||
#include <sys/queue.h>
|
||||
extern "C" {
|
||||
#include <lowdown.h>
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -42,7 +40,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
|||
throw Error("cannot allocate Markdown output buffer");
|
||||
Finally freeBuffer([&]() { lowdown_buf_free(buf); });
|
||||
|
||||
lowdown_term_rndr(buf, nullptr, renderer, node);
|
||||
int rndr_res = lowdown_term_rndr(buf, nullptr, renderer, node);
|
||||
if (!rndr_res)
|
||||
throw Error("allocation error while rendering Markdown");
|
||||
|
||||
return std::string(buf->data, buf->size);
|
||||
}
|
9
src/libcmd/nix-cmd.pc.in
Normal file
9
src/libcmd/nix-cmd.pc.in
Normal file
|
@ -0,0 +1,9 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixcmd
|
||||
Cflags: -I${includedir}/nix -std=c++17
|
|
@ -77,7 +77,7 @@ public:
|
|||
auto a = get(name);
|
||||
if (!a)
|
||||
throw Error({
|
||||
.hint = hintfmt("attribute '%s' missing", name),
|
||||
.msg = hintfmt("attribute '%s' missing", name),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
|
|
@ -12,9 +12,12 @@ namespace nix {
|
|||
|
||||
MixEvalArgs::MixEvalArgs()
|
||||
{
|
||||
auto category = "Common evaluation options";
|
||||
|
||||
addFlag({
|
||||
.longName = "arg",
|
||||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "expr"},
|
||||
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
||||
});
|
||||
|
@ -22,6 +25,7 @@ MixEvalArgs::MixEvalArgs()
|
|||
addFlag({
|
||||
.longName = "argstr",
|
||||
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "string"},
|
||||
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
||||
});
|
||||
|
@ -30,6 +34,7 @@ MixEvalArgs::MixEvalArgs()
|
|||
.longName = "include",
|
||||
.shortName = 'I',
|
||||
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
|
||||
.category = category,
|
||||
.labels = {"path"},
|
||||
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
||||
});
|
||||
|
@ -37,6 +42,7 @@ MixEvalArgs::MixEvalArgs()
|
|||
addFlag({
|
||||
.longName = "impure",
|
||||
.description = "Allow access to mutable paths and repositories.",
|
||||
.category = category,
|
||||
.handler = {[&]() {
|
||||
evalSettings.pureEval = false;
|
||||
}},
|
||||
|
@ -45,6 +51,7 @@ MixEvalArgs::MixEvalArgs()
|
|||
addFlag({
|
||||
.longName = "override-flake",
|
||||
.description = "Override the flake registries, redirecting *original-ref* to *resolved-ref*.",
|
||||
.category = category,
|
||||
.labels = {"original-ref", "resolved-ref"},
|
||||
.handler = {[&](std::string _from, std::string _to) {
|
||||
auto from = parseFlakeRef(_from, absPath("."));
|
||||
|
|
|
@ -10,7 +10,7 @@ namespace nix {
|
|||
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
|
||||
{
|
||||
throw EvalError({
|
||||
.hint = hintfmt(s),
|
||||
.msg = hintfmt(s),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
|
|||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
|
||||
{
|
||||
throw TypeError({
|
||||
.hint = hintfmt(s, showType(v)),
|
||||
.msg = hintfmt(s, showType(v)),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
|
|
@ -622,7 +622,7 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2))
|
|||
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2))
|
||||
{
|
||||
throw EvalError({
|
||||
.hint = hintfmt(s, s2),
|
||||
.msg = hintfmt(s, s2),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -635,7 +635,7 @@ LocalNoInlineNoReturn(void throwEvalError(const char * s, const string & s2, con
|
|||
LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s, const string & s2, const string & s3))
|
||||
{
|
||||
throw EvalError({
|
||||
.hint = hintfmt(s, s2, s3),
|
||||
.msg = hintfmt(s, s2, s3),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -644,7 +644,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
|
|||
{
|
||||
// p1 is where the error occurred; p2 is a position mentioned in the message.
|
||||
throw EvalError({
|
||||
.hint = hintfmt(s, sym, p2),
|
||||
.msg = hintfmt(s, sym, p2),
|
||||
.errPos = p1
|
||||
});
|
||||
}
|
||||
|
@ -652,7 +652,7 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & p1, const char * s, const
|
|||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
|
||||
{
|
||||
throw TypeError({
|
||||
.hint = hintfmt(s),
|
||||
.msg = hintfmt(s),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -660,7 +660,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s))
|
|||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const ExprLambda & fun, const Symbol & s2))
|
||||
{
|
||||
throw TypeError({
|
||||
.hint = hintfmt(s, fun.showNamePos(), s2),
|
||||
.msg = hintfmt(s, fun.showNamePos(), s2),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -668,7 +668,7 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
|
|||
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1))
|
||||
{
|
||||
throw AssertionError({
|
||||
.hint = hintfmt(s, s1),
|
||||
.msg = hintfmt(s, s1),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -676,7 +676,7 @@ LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s,
|
|||
LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char * s, const string & s1))
|
||||
{
|
||||
throw UndefinedVarError({
|
||||
.hint = hintfmt(s, s1),
|
||||
.msg = hintfmt(s, s1),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -684,7 +684,7 @@ LocalNoInlineNoReturn(void throwUndefinedVarError(const Pos & pos, const char *
|
|||
LocalNoInlineNoReturn(void throwMissingArgumentError(const Pos & pos, const char * s, const string & s1))
|
||||
{
|
||||
throw MissingArgumentError({
|
||||
.hint = hintfmt(s, s1),
|
||||
.msg = hintfmt(s, s1),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -2057,7 +2057,7 @@ void EvalState::printStats()
|
|||
string ExternalValueBase::coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
throw TypeError({
|
||||
.hint = hintfmt("cannot coerce %1% to a string", showType()),
|
||||
.msg = hintfmt("cannot coerce %1% to a string", showType()),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
|
|
@ -298,284 +298,298 @@ LockedFlake lockFlake(
|
|||
|
||||
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
|
||||
|
||||
// FIXME: symlink attack
|
||||
auto oldLockFile = LockFile::read(
|
||||
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
|
||||
try {
|
||||
|
||||
debug("old lock file: %s", oldLockFile);
|
||||
// FIXME: symlink attack
|
||||
auto oldLockFile = LockFile::read(
|
||||
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
|
||||
|
||||
// FIXME: check whether all overrides are used.
|
||||
std::map<InputPath, FlakeInput> overrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
debug("old lock file: %s", oldLockFile);
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
|
||||
// FIXME: check whether all overrides are used.
|
||||
std::map<InputPath, FlakeInput> overrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
|
||||
LockFile newLockFile;
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
|
||||
|
||||
std::vector<FlakeRef> parents;
|
||||
LockFile newLockFile;
|
||||
|
||||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode)>
|
||||
computeLocks;
|
||||
std::vector<FlakeRef> parents;
|
||||
|
||||
computeLocks = [&](
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode)>
|
||||
computeLocks;
|
||||
|
||||
/* Get the overrides (i.e. attributes of the form
|
||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||
// FIXME: check this
|
||||
for (auto & [id, input] : flake.inputs) {
|
||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||
computeLocks = [&](
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
|
||||
/* Get the overrides (i.e. attributes of the form
|
||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||
// FIXME: check this
|
||||
for (auto & [id, input] : flake.inputs) {
|
||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
inputPath.push_back(idOverride);
|
||||
overrides.insert_or_assign(inputPath, inputOverride);
|
||||
}
|
||||
}
|
||||
|
||||
/* Go over the flake inputs, resolve/fetch them if
|
||||
necessary (i.e. if they're new or the flakeref changed
|
||||
from what's in the lock file). */
|
||||
for (auto & [id, input2] : flakeInputs) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
inputPath.push_back(idOverride);
|
||||
overrides.insert_or_assign(inputPath, inputOverride);
|
||||
}
|
||||
}
|
||||
auto inputPathS = printInputPath(inputPath);
|
||||
debug("computing input '%s'", inputPathS);
|
||||
|
||||
/* Go over the flake inputs, resolve/fetch them if
|
||||
necessary (i.e. if they're new or the flakeref changed
|
||||
from what's in the lock file). */
|
||||
for (auto & [id, input2] : flakeInputs) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
auto inputPathS = printInputPath(inputPath);
|
||||
debug("computing input '%s'", inputPathS);
|
||||
try {
|
||||
|
||||
/* Do we have an override for this input from one of the
|
||||
ancestors? */
|
||||
auto i = overrides.find(inputPath);
|
||||
bool hasOverride = i != overrides.end();
|
||||
if (hasOverride) overridesUsed.insert(inputPath);
|
||||
auto & input = hasOverride ? i->second : input2;
|
||||
/* Do we have an override for this input from one of the
|
||||
ancestors? */
|
||||
auto i = overrides.find(inputPath);
|
||||
bool hasOverride = i != overrides.end();
|
||||
if (hasOverride) overridesUsed.insert(inputPath);
|
||||
auto & input = hasOverride ? i->second : input2;
|
||||
|
||||
/* Resolve 'follows' later (since it may refer to an input
|
||||
path we haven't processed yet. */
|
||||
if (input.follows) {
|
||||
InputPath target;
|
||||
if (hasOverride || input.absolute)
|
||||
/* 'follows' from an override is relative to the
|
||||
root of the graph. */
|
||||
target = *input.follows;
|
||||
else {
|
||||
/* Otherwise, it's relative to the current flake. */
|
||||
target = inputPathPrefix;
|
||||
for (auto & i : *input.follows) target.push_back(i);
|
||||
}
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
continue;
|
||||
}
|
||||
/* Resolve 'follows' later (since it may refer to an input
|
||||
path we haven't processed yet. */
|
||||
if (input.follows) {
|
||||
InputPath target;
|
||||
if (hasOverride || input.absolute)
|
||||
/* 'follows' from an override is relative to the
|
||||
root of the graph. */
|
||||
target = *input.follows;
|
||||
else {
|
||||
/* Otherwise, it's relative to the current flake. */
|
||||
target = inputPathPrefix;
|
||||
for (auto & i : *input.follows) target.push_back(i);
|
||||
}
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
continue;
|
||||
}
|
||||
|
||||
assert(input.ref);
|
||||
assert(input.ref);
|
||||
|
||||
/* Do we have an entry in the existing lock file? And we
|
||||
don't have a --update-input flag for this input? */
|
||||
std::shared_ptr<LockedNode> oldLock;
|
||||
/* Do we have an entry in the existing lock file? And we
|
||||
don't have a --update-input flag for this input? */
|
||||
std::shared_ptr<LockedNode> oldLock;
|
||||
|
||||
updatesUsed.insert(inputPath);
|
||||
updatesUsed.insert(inputPath);
|
||||
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
|
||||
if (auto oldLock2 = get(oldNode->inputs, id))
|
||||
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||
oldLock = *oldLock3;
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
|
||||
if (auto oldLock2 = get(oldNode->inputs, id))
|
||||
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||
oldLock = *oldLock3;
|
||||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef == *input.ref
|
||||
&& !hasOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputPathS);
|
||||
if (oldLock
|
||||
&& oldLock->originalRef == *input.ref
|
||||
&& !hasOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputPathS);
|
||||
|
||||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
higher level flake. */
|
||||
auto childNode = std::make_shared<LockedNode>(
|
||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
||||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
higher level flake. */
|
||||
auto childNode = std::make_shared<LockedNode>(
|
||||
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
/* If we have an --update-input flag for an input
|
||||
of this input, then we must fetch the flake to
|
||||
update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
/* If we have an --update-input flag for an input
|
||||
of this input, then we must fetch the flake to
|
||||
update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
|
||||
auto hasChildUpdate =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
auto hasChildUpdate =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
|
||||
if (hasChildUpdate) {
|
||||
auto inputFlake = getFlake(
|
||||
state, oldLock->lockedRef, false, flakeCache);
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
|
||||
} else {
|
||||
/* No need to fetch this flake, we can be
|
||||
lazy. However there may be new overrides on the
|
||||
inputs of this flake, so we need to check
|
||||
those. */
|
||||
FlakeInputs fakeInputs;
|
||||
if (hasChildUpdate) {
|
||||
auto inputFlake = getFlake(
|
||||
state, oldLock->lockedRef, false, flakeCache);
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
|
||||
} else {
|
||||
/* No need to fetch this flake, we can be
|
||||
lazy. However there may be new overrides on the
|
||||
inputs of this flake, so we need to check
|
||||
those. */
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
for (auto & i : oldLock->inputs) {
|
||||
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.ref = (*lockedNode)->originalRef,
|
||||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = *follows,
|
||||
.absolute = true
|
||||
});
|
||||
for (auto & i : oldLock->inputs) {
|
||||
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.ref = (*lockedNode)->originalRef,
|
||||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = *follows,
|
||||
.absolute = true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
|
||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||
|
||||
if (input.isFlake) {
|
||||
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
"original" field, rather than the
|
||||
override. This ensures that the override isn't
|
||||
nuked the next time we update the lock
|
||||
file. That is, overrides are sticky unless you
|
||||
use --no-write-lock-file. */
|
||||
auto childNode = std::make_shared<LockedNode>(
|
||||
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
/* Guard against circular flake imports. */
|
||||
for (auto & parent : parents)
|
||||
if (parent == *input.ref)
|
||||
throw Error("found circular import of flake '%s'", parent);
|
||||
parents.push_back(*input.ref);
|
||||
Finally cleanup([&]() { parents.pop_back(); });
|
||||
|
||||
/* Recursively process the inputs of this
|
||||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
own lock file. */
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
|
||||
}
|
||||
|
||||
else {
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||
node->inputs.insert_or_assign(id,
|
||||
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
|
||||
}
|
||||
}
|
||||
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
|
||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||
|
||||
if (input.isFlake) {
|
||||
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
"original" field, rather than the
|
||||
override. This ensures that the override isn't
|
||||
nuked the next time we update the lock
|
||||
file. That is, overrides are sticky unless you
|
||||
use --no-write-lock-file. */
|
||||
auto childNode = std::make_shared<LockedNode>(
|
||||
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
/* Guard against circular flake imports. */
|
||||
for (auto & parent : parents)
|
||||
if (parent == *input.ref)
|
||||
throw Error("found circular import of flake '%s'", parent);
|
||||
parents.push_back(*input.ref);
|
||||
Finally cleanup([&]() { parents.pop_back(); });
|
||||
|
||||
/* Recursively process the inputs of this
|
||||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
own lock file. */
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
|
||||
}
|
||||
|
||||
else {
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||
node->inputs.insert_or_assign(id,
|
||||
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the flake input '%s'", inputPathS);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
computeLocks(
|
||||
flake.inputs, newLockFile.root, {},
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
|
||||
computeLocks(
|
||||
flake.inputs, newLockFile.root, {},
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
warn("the flag '--override-input %s %s' does not match any input",
|
||||
printInputPath(i.first), i.second);
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
warn("the flag '--override-input %s %s' does not match any input",
|
||||
printInputPath(i.first), i.second);
|
||||
|
||||
for (auto & i : lockFlags.inputUpdates)
|
||||
if (!updatesUsed.count(i))
|
||||
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
|
||||
for (auto & i : lockFlags.inputUpdates)
|
||||
if (!updatesUsed.count(i))
|
||||
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
|
||||
|
||||
/* Check 'follows' inputs. */
|
||||
newLockFile.check();
|
||||
/* Check 'follows' inputs. */
|
||||
newLockFile.check();
|
||||
|
||||
debug("new lock file: %s", newLockFile);
|
||||
debug("new lock file: %s", newLockFile);
|
||||
|
||||
/* Check whether we need to / can write the new lock file. */
|
||||
if (!(newLockFile == oldLockFile)) {
|
||||
/* Check whether we need to / can write the new lock file. */
|
||||
if (!(newLockFile == oldLockFile)) {
|
||||
|
||||
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
||||
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
||||
|
||||
if (lockFlags.writeLockFile) {
|
||||
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||
if (!newLockFile.isImmutable()) {
|
||||
if (settings.warnDirty)
|
||||
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
if (lockFlags.writeLockFile) {
|
||||
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||
if (!newLockFile.isImmutable()) {
|
||||
if (settings.warnDirty)
|
||||
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||
|
||||
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
||||
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
||||
|
||||
auto path = *sourcePath + "/" + relPath;
|
||||
auto path = *sourcePath + "/" + relPath;
|
||||
|
||||
bool lockFileExists = pathExists(path);
|
||||
bool lockFileExists = pathExists(path);
|
||||
|
||||
if (lockFileExists) {
|
||||
auto s = chomp(diff);
|
||||
if (s.empty())
|
||||
warn("updating lock file '%s'", path);
|
||||
else
|
||||
warn("updating lock file '%s':\n%s", path, s);
|
||||
} else
|
||||
warn("creating lock file '%s'", path);
|
||||
if (lockFileExists) {
|
||||
auto s = chomp(diff);
|
||||
if (s.empty())
|
||||
warn("updating lock file '%s'", path);
|
||||
else
|
||||
warn("updating lock file '%s':\n%s", path, s);
|
||||
} else
|
||||
warn("creating lock file '%s'", path);
|
||||
|
||||
newLockFile.write(path);
|
||||
newLockFile.write(path);
|
||||
|
||||
topRef.input.markChangedFile(
|
||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||
lockFlags.commitLockFile
|
||||
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
|
||||
relPath, lockFileExists ? "Update" : "Add", diff))
|
||||
: std::nullopt);
|
||||
topRef.input.markChangedFile(
|
||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||
lockFlags.commitLockFile
|
||||
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
|
||||
relPath, lockFileExists ? "Update" : "Add", diff))
|
||||
: std::nullopt);
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
also just clear the 'rev' field... */
|
||||
auto prevLockedRef = flake.lockedRef;
|
||||
FlakeCache dummyCache;
|
||||
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
also just clear the 'rev' field... */
|
||||
auto prevLockedRef = flake.lockedRef;
|
||||
FlakeCache dummyCache;
|
||||
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
|
||||
|
||||
if (lockFlags.commitLockFile &&
|
||||
flake.lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||
if (lockFlags.commitLockFile &&
|
||||
flake.lockedRef.input.getRev() &&
|
||||
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||
|
||||
/* Make sure that we picked up the change,
|
||||
i.e. the tree should usually be dirty
|
||||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isImmutable())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
}
|
||||
/* Make sure that we picked up the change,
|
||||
i.e. the tree should usually be dirty
|
||||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isImmutable())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
}
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
} else
|
||||
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||
}
|
||||
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||
}
|
||||
|
||||
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
void callFlake(EvalState & state,
|
||||
|
|
|
@ -284,7 +284,7 @@ void ExprVar::bindVars(const StaticEnv & env)
|
|||
"undefined variable" error now. */
|
||||
if (withLevel == -1)
|
||||
throw UndefinedVarError({
|
||||
.hint = hintfmt("undefined variable '%1%'", name),
|
||||
.msg = hintfmt("undefined variable '%1%'", name),
|
||||
.errPos = pos
|
||||
});
|
||||
fromWith = true;
|
||||
|
|
|
@ -239,7 +239,7 @@ struct ExprLambda : Expr
|
|||
{
|
||||
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
|
||||
throw ParseError({
|
||||
.hint = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.errPos = pos
|
||||
});
|
||||
};
|
||||
|
|
|
@ -32,7 +32,7 @@ namespace nix {
|
|||
Path basePath;
|
||||
Symbol file;
|
||||
FileOrigin origin;
|
||||
ErrorInfo error;
|
||||
std::optional<ErrorInfo> error;
|
||||
Symbol sLetBody;
|
||||
ParseData(EvalState & state)
|
||||
: state(state)
|
||||
|
@ -66,8 +66,8 @@ namespace nix {
|
|||
static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.hint = hintfmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(attrPath), prevPos),
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(attrPath), prevPos),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ static void dupAttr(const AttrPath & attrPath, const Pos & pos, const Pos & prev
|
|||
static void dupAttr(Symbol attr, const Pos & pos, const Pos & prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.hint = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%", attr, prevPos),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
|
|||
{
|
||||
if (!formals->argNames.insert(formal.name).second)
|
||||
throw ParseError({
|
||||
.hint = hintfmt("duplicate formal function argument '%1%'",
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'",
|
||||
formal.name),
|
||||
.errPos = pos
|
||||
});
|
||||
|
@ -258,7 +258,7 @@ static inline Pos makeCurPos(const YYLTYPE & loc, ParseData * data)
|
|||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
|
||||
{
|
||||
data->error = {
|
||||
.hint = hintfmt(error),
|
||||
.msg = hintfmt(error),
|
||||
.errPos = makeCurPos(*loc, data)
|
||||
};
|
||||
}
|
||||
|
@ -338,7 +338,7 @@ expr_function
|
|||
| LET binds IN expr_function
|
||||
{ if (!$2->dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.hint = hintfmt("dynamic attributes not allowed in let"),
|
||||
.msg = hintfmt("dynamic attributes not allowed in let"),
|
||||
.errPos = CUR_POS
|
||||
});
|
||||
$$ = new ExprLet($2, $4);
|
||||
|
@ -418,7 +418,7 @@ expr_simple
|
|||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
|
||||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.hint = hintfmt("URL literals are disabled"),
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = CUR_POS
|
||||
});
|
||||
$$ = new ExprString(data->symbols.create($1));
|
||||
|
@ -491,7 +491,7 @@ attrs
|
|||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
.hint = hintfmt("dynamic attributes not allowed in inherit"),
|
||||
.msg = hintfmt("dynamic attributes not allowed in inherit"),
|
||||
.errPos = makeCurPos(@2, data)
|
||||
});
|
||||
}
|
||||
|
@ -593,7 +593,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
|
|||
int res = yyparse(scanner, &data);
|
||||
yylex_destroy(scanner);
|
||||
|
||||
if (res) throw ParseError(data.error);
|
||||
if (res) throw ParseError(data.error.value());
|
||||
|
||||
data.result->bindVars(staticEnv);
|
||||
|
||||
|
@ -703,7 +703,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
|
|||
return corepkgsPrefix + path.substr(4);
|
||||
|
||||
throw ThrownError({
|
||||
.hint = hintfmt(evalSettings.pureEval
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path),
|
||||
|
@ -725,8 +725,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
|||
store, resolveUri(elem.second), "source", false).first.storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.name = "Entry download",
|
||||
.hint = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
|
@ -736,8 +735,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
|||
res = { true, path };
|
||||
else {
|
||||
logWarning({
|
||||
.name = "Entry not found",
|
||||
.hint = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
.msg = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
|
|
|
@ -115,9 +115,12 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.msg = hintfmt("cannot import '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = pos
|
||||
});
|
||||
} catch (Error & e) {
|
||||
e.addTrace(pos, "while importing '%s'", path);
|
||||
throw e;
|
||||
}
|
||||
|
||||
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
|
||||
|
@ -282,7 +285,7 @@ void prim_importNative(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt(
|
||||
.msg = hintfmt(
|
||||
"cannot import '%1%', since path '%2%' is not valid",
|
||||
path, e.path),
|
||||
.errPos = pos
|
||||
|
@ -322,7 +325,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|||
auto count = args[0]->listSize();
|
||||
if (count == 0) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("at least one argument to 'exec' required"),
|
||||
.msg = hintfmt("at least one argument to 'exec' required"),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -336,7 +339,7 @@ void prim_exec(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
|
||||
.msg = hintfmt("cannot execute '%1%', since path '%2%' is not valid",
|
||||
program, e.path),
|
||||
.errPos = pos
|
||||
});
|
||||
|
@ -551,7 +554,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
|
|||
args[0]->attrs->find(state.symbols.create("startSet"));
|
||||
if (startSet == args[0]->attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("attribute 'startSet' required"),
|
||||
.msg = hintfmt("attribute 'startSet' required"),
|
||||
.errPos = pos
|
||||
});
|
||||
state.forceList(*startSet->value, pos);
|
||||
|
@ -565,7 +568,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
|
|||
args[0]->attrs->find(state.symbols.create("operator"));
|
||||
if (op == args[0]->attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("attribute 'operator' required"),
|
||||
.msg = hintfmt("attribute 'operator' required"),
|
||||
.errPos = pos
|
||||
});
|
||||
state.forceValue(*op->value, pos);
|
||||
|
@ -587,7 +590,7 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
|
|||
e->attrs->find(state.symbols.create("key"));
|
||||
if (key == e->attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("attribute 'key' required"),
|
||||
.msg = hintfmt("attribute 'key' required"),
|
||||
.errPos = pos
|
||||
});
|
||||
state.forceValue(*key->value, pos);
|
||||
|
@ -693,10 +696,14 @@ static RegisterPrimOp primop_tryEval({
|
|||
Try to shallowly evaluate *e*. Return a set containing the
|
||||
attributes `success` (`true` if *e* evaluated successfully,
|
||||
`false` if an error was thrown) and `value`, equalling *e* if
|
||||
successful and `false` otherwise. Note that this doesn't evaluate
|
||||
*e* deeply, so ` let e = { x = throw ""; }; in (builtins.tryEval
|
||||
e).success ` will be `true`. Using ` builtins.deepSeq ` one can
|
||||
get the expected result: `let e = { x = throw ""; }; in
|
||||
successful and `false` otherwise. `tryEval` will only prevent
|
||||
errors created by `throw` or `assert` from being thrown.
|
||||
Errors `tryEval` will not catch are for example those created
|
||||
by `abort` and type errors generated by builtins. Also note that
|
||||
this doesn't evaluate *e* deeply, so `let e = { x = throw ""; };
|
||||
in (builtins.tryEval e).success` will be `true`. Using
|
||||
`builtins.deepSeq` one can get the expected result:
|
||||
`let e = { x = throw ""; }; in
|
||||
(builtins.tryEval (builtins.deepSeq e e)).success` will be
|
||||
`false`.
|
||||
)",
|
||||
|
@ -810,7 +817,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
Bindings::iterator attr = args[0]->attrs->find(state.sName);
|
||||
if (attr == args[0]->attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("required attribute 'name' missing"),
|
||||
.msg = hintfmt("required attribute 'name' missing"),
|
||||
.errPos = pos
|
||||
});
|
||||
string drvName;
|
||||
|
@ -859,7 +866,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
|
||||
else
|
||||
throw EvalError({
|
||||
.hint = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
};
|
||||
|
@ -869,7 +876,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
for (auto & j : ss) {
|
||||
if (outputs.find(j) != outputs.end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("duplicate derivation output '%1%'", j),
|
||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
/* !!! Check whether j is a valid attribute
|
||||
|
@ -879,14 +886,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
the resulting set. */
|
||||
if (j == "drv")
|
||||
throw EvalError({
|
||||
.hint = hintfmt("invalid derivation output name 'drv'" ),
|
||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
outputs.insert(j);
|
||||
}
|
||||
if (outputs.empty())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("derivation cannot have an empty set of outputs"),
|
||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
};
|
||||
|
@ -1007,20 +1014,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
/* Do we have all required attributes? */
|
||||
if (drv.builder == "")
|
||||
throw EvalError({
|
||||
.hint = hintfmt("required attribute 'builder' missing"),
|
||||
.msg = hintfmt("required attribute 'builder' missing"),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
if (drv.platform == "")
|
||||
throw EvalError({
|
||||
.hint = hintfmt("required attribute 'system' missing"),
|
||||
.msg = hintfmt("required attribute 'system' missing"),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
/* Check whether the derivation name is valid. */
|
||||
if (isDerivation(drvName))
|
||||
throw EvalError({
|
||||
.hint = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
||||
.msg = hintfmt("derivation names are not allowed to end in '%s'", drvExtension),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
|
@ -1031,7 +1038,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
already content addressed. */
|
||||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||
throw Error({
|
||||
.hint = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||
.errPos = posDrvName
|
||||
});
|
||||
|
||||
|
@ -1211,7 +1218,7 @@ static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, V
|
|||
if (!state.store->isStorePath(path)) path = canonPath(path, true);
|
||||
if (!state.store->isInStore(path))
|
||||
throw EvalError({
|
||||
.hint = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.errPos = pos
|
||||
});
|
||||
auto path2 = state.store->toStorePath(path).first;
|
||||
|
@ -1247,7 +1254,7 @@ static void prim_pathExists(EvalState & state, const Pos & pos, Value * * args,
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt(
|
||||
.msg = hintfmt(
|
||||
"cannot check the existence of '%1%', since path '%2%' is not valid",
|
||||
path, e.path),
|
||||
.errPos = pos
|
||||
|
@ -1324,7 +1331,7 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.msg = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -1363,7 +1370,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
i = v2.attrs->find(state.symbols.create("path"));
|
||||
if (i == v2.attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("attribute 'path' missing"),
|
||||
.msg = hintfmt("attribute 'path' missing"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -1374,7 +1381,7 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
state.realiseContext(context);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -1400,7 +1407,7 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
std::optional<HashType> ht = parseHashType(type);
|
||||
if (!ht)
|
||||
throw Error({
|
||||
.hint = hintfmt("unknown hash type '%1%'", type),
|
||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -1430,7 +1437,7 @@ static void prim_readDir(EvalState & state, const Pos & pos, Value * * args, Val
|
|||
state.realiseContext(ctx);
|
||||
} catch (InvalidPathError & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.msg = hintfmt("cannot read '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -1650,7 +1657,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
|||
for (auto path : context) {
|
||||
if (path.at(0) != '/')
|
||||
throw EvalError( {
|
||||
.hint = hintfmt(
|
||||
.msg = hintfmt(
|
||||
"in 'toFile': the file named '%1%' must not contain a reference "
|
||||
"to a derivation but contains (%2%)",
|
||||
name, path),
|
||||
|
@ -1801,14 +1808,14 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
|
|||
Path path = state.coerceToPath(pos, *args[1], context);
|
||||
if (!context.empty())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
|
||||
.msg = hintfmt("string '%1%' cannot refer to other paths", path),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
state.forceValue(*args[0], pos);
|
||||
if (args[0]->type() != nFunction)
|
||||
throw TypeError({
|
||||
.hint = hintfmt(
|
||||
.msg = hintfmt(
|
||||
"first argument in call to 'filterSource' is not a function but %1%",
|
||||
showType(*args[0])),
|
||||
.errPos = pos
|
||||
|
@ -1875,7 +1882,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
path = state.coerceToPath(*attr.pos, *attr.value, context);
|
||||
if (!context.empty())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("string '%1%' cannot refer to other paths", path),
|
||||
.msg = hintfmt("string '%1%' cannot refer to other paths", path),
|
||||
.errPos = *attr.pos
|
||||
});
|
||||
} else if (attr.name == state.sName)
|
||||
|
@ -1889,13 +1896,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||
else
|
||||
throw EvalError({
|
||||
.hint = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
|
||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", attr.name),
|
||||
.errPos = *attr.pos
|
||||
});
|
||||
}
|
||||
if (path.empty())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("'path' required"),
|
||||
.msg = hintfmt("'path' required"),
|
||||
.errPos = pos
|
||||
});
|
||||
if (name.empty())
|
||||
|
@ -2010,7 +2017,7 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|||
Bindings::iterator i = args[1]->attrs->find(state.symbols.create(attr));
|
||||
if (i == args[1]->attrs->end())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("attribute '%1%' missing", attr),
|
||||
.msg = hintfmt("attribute '%1%' missing", attr),
|
||||
.errPos = pos
|
||||
});
|
||||
// !!! add to stack trace?
|
||||
|
@ -2142,7 +2149,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
|
|||
Bindings::iterator j = v2.attrs->find(state.sName);
|
||||
if (j == v2.attrs->end())
|
||||
throw TypeError({
|
||||
.hint = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
|
||||
.msg = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
|
||||
.errPos = pos
|
||||
});
|
||||
string name = state.forceStringNoCtx(*j->value, pos);
|
||||
|
@ -2152,7 +2159,7 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
|
|||
Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue));
|
||||
if (j2 == v2.attrs->end())
|
||||
throw TypeError({
|
||||
.hint = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
|
||||
.msg = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
|
||||
.errPos = pos
|
||||
});
|
||||
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
|
||||
|
@ -2258,7 +2265,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
|
|||
}
|
||||
if (!args[0]->isLambda())
|
||||
throw TypeError({
|
||||
.hint = hintfmt("'functionArgs' requires a function"),
|
||||
.msg = hintfmt("'functionArgs' requires a function"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -2352,7 +2359,7 @@ static void elemAt(EvalState & state, const Pos & pos, Value & list, int n, Valu
|
|||
state.forceList(list, pos);
|
||||
if (n < 0 || (unsigned int) n >= list.listSize())
|
||||
throw Error({
|
||||
.hint = hintfmt("list index %1% is out of bounds", n),
|
||||
.msg = hintfmt("list index %1% is out of bounds", n),
|
||||
.errPos = pos
|
||||
});
|
||||
state.forceValue(*list.listElems()[n], pos);
|
||||
|
@ -2400,7 +2407,7 @@ static void prim_tail(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
state.forceList(*args[0], pos);
|
||||
if (args[0]->listSize() == 0)
|
||||
throw Error({
|
||||
.hint = hintfmt("'tail' called on an empty list"),
|
||||
.msg = hintfmt("'tail' called on an empty list"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -2639,7 +2646,7 @@ static void prim_genList(EvalState & state, const Pos & pos, Value * * args, Val
|
|||
|
||||
if (len < 0)
|
||||
throw EvalError({
|
||||
.hint = hintfmt("cannot create list of size %1%", len),
|
||||
.msg = hintfmt("cannot create list of size %1%", len),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -2890,7 +2897,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
|
|||
NixFloat f2 = state.forceFloat(*args[1], pos);
|
||||
if (f2 == 0)
|
||||
throw EvalError({
|
||||
.hint = hintfmt("division by zero"),
|
||||
.msg = hintfmt("division by zero"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -2902,7 +2909,7 @@ static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value &
|
|||
/* Avoid division overflow as it might raise SIGFPE. */
|
||||
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
||||
throw EvalError({
|
||||
.hint = hintfmt("overflow in integer division"),
|
||||
.msg = hintfmt("overflow in integer division"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -3033,7 +3040,7 @@ static void prim_substring(EvalState & state, const Pos & pos, Value * * args, V
|
|||
|
||||
if (start < 0)
|
||||
throw EvalError({
|
||||
.hint = hintfmt("negative start position in 'substring'"),
|
||||
.msg = hintfmt("negative start position in 'substring'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -3084,7 +3091,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
|
|||
std::optional<HashType> ht = parseHashType(type);
|
||||
if (!ht)
|
||||
throw Error({
|
||||
.hint = hintfmt("unknown hash type '%1%'", type),
|
||||
.msg = hintfmt("unknown hash type '%1%'", type),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -3148,12 +3155,12 @@ void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
throw EvalError({
|
||||
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = pos
|
||||
});
|
||||
} else {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("invalid regular expression '%s'", re),
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -3256,12 +3263,12 @@ static void prim_split(EvalState & state, const Pos & pos, Value * * args, Value
|
|||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
throw EvalError({
|
||||
.hint = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = pos
|
||||
});
|
||||
} else {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("invalid regular expression '%s'", re),
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
@ -3341,7 +3348,7 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
|
|||
state.forceList(*args[1], pos);
|
||||
if (args[0]->listSize() != args[1]->listSize())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
|
||||
.msg = hintfmt("'from' and 'to' arguments to 'replaceStrings' have different lengths"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
|
|
@ -147,7 +147,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
for (auto & i : *args[1]->attrs) {
|
||||
if (!state.store->isStorePath(i.name))
|
||||
throw EvalError({
|
||||
.hint = hintfmt("Context key '%s' is not a store path", i.name),
|
||||
.msg = hintfmt("Context key '%s' is not a store path", i.name),
|
||||
.errPos = *i.pos
|
||||
});
|
||||
if (!settings.readOnlyMode)
|
||||
|
@ -164,7 +164,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
if (state.forceBool(*iter->value, *iter->pos)) {
|
||||
if (!isDerivation(i.name)) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
|
||||
.msg = hintfmt("Tried to add all-outputs context of %s, which is not a derivation, to a string", i.name),
|
||||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
state.forceList(*iter->value, *iter->pos);
|
||||
if (iter->value->listSize() && !isDerivation(i.name)) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
|
||||
.msg = hintfmt("Tried to add derivation output context of %s, which is not a derivation, to a string", i.name),
|
||||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
|
|
|
@ -38,14 +38,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else
|
||||
throw EvalError({
|
||||
.hint = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
|
||||
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", attr.name),
|
||||
.errPos = *attr.pos
|
||||
});
|
||||
}
|
||||
|
||||
if (url.empty())
|
||||
throw EvalError({
|
||||
.hint = hintfmt("'url' argument required"),
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
|
|
@ -115,7 +115,7 @@ static void fetchTree(
|
|||
|
||||
if (!attrs.count("type"))
|
||||
throw Error({
|
||||
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
|
@ -153,6 +153,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
|
|||
fetchTree(state, pos, args, v, std::nullopt);
|
||||
}
|
||||
|
||||
// FIXME: document
|
||||
static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
|
||||
|
||||
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||
|
@ -177,14 +178,14 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else
|
||||
throw EvalError({
|
||||
.hint = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
|
||||
.msg = hintfmt("unsupported argument '%s' to '%s'", attr.name, who),
|
||||
.errPos = *attr.pos
|
||||
});
|
||||
}
|
||||
|
||||
if (!url)
|
||||
throw EvalError({
|
||||
.hint = hintfmt("'url' argument required"),
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = pos
|
||||
});
|
||||
} else
|
||||
|
|
|
@ -82,7 +82,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
visit(v, parser(tomlStream).parse());
|
||||
} catch (std::runtime_error & e) {
|
||||
throw EvalError({
|
||||
.hint = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.errPos = pos
|
||||
});
|
||||
}
|
||||
|
|
|
@ -132,7 +132,14 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
}
|
||||
}
|
||||
|
||||
auto [tree, input] = scheme->fetch(store, *this);
|
||||
auto [tree, input] = [&]() -> std::pair<Tree, Input> {
|
||||
try {
|
||||
return scheme->fetch(store, *this);
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
|
||||
if (tree.actualPath == "")
|
||||
tree.actualPath = store->toRealPath(tree.storePath);
|
||||
|
|
|
@ -64,7 +64,6 @@ DownloadFileResult downloadFile(
|
|||
|
||||
if (res.cached) {
|
||||
assert(cached);
|
||||
assert(request.expectedETag == res.etag);
|
||||
storePath = std::move(cached->storePath);
|
||||
} else {
|
||||
StringSink sink;
|
||||
|
|
|
@ -11,18 +11,21 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
|||
.longName = "verbose",
|
||||
.shortName = 'v',
|
||||
.description = "Increase the logging verbosity level.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "quiet",
|
||||
.description = "Decrease the logging verbosity level.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "debug",
|
||||
.description = "Set the logging verbosity level to 'debug'.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[]() { verbosity = lvlDebug; }},
|
||||
});
|
||||
|
||||
|
@ -52,6 +55,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
|||
addFlag({
|
||||
.longName = "log-format",
|
||||
.description = "Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`.",
|
||||
.category = loggingCategory,
|
||||
.labels = {"format"},
|
||||
.handler = {[](std::string format) { setLogFormat(format); }},
|
||||
});
|
||||
|
@ -66,7 +70,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
|||
}}
|
||||
});
|
||||
|
||||
std::string cat = "config";
|
||||
std::string cat = "Options to override configuration settings";
|
||||
globalConfig.convertToArgs(*this, cat);
|
||||
|
||||
// Backward compatibility hack: nix-env already had a --system flag.
|
||||
|
|
|
@ -4,6 +4,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
//static constexpr auto commonArgsCategory = "Miscellaneous common options";
|
||||
static constexpr auto loggingCategory = "Logging-related options";
|
||||
|
||||
struct MixCommonArgs : virtual Args
|
||||
{
|
||||
string programName;
|
||||
|
@ -16,7 +19,12 @@ struct MixDryRun : virtual Args
|
|||
|
||||
MixDryRun()
|
||||
{
|
||||
mkFlag(0, "dry-run", "Show what this command would do without doing it.", &dryRun);
|
||||
addFlag({
|
||||
.longName = "dry-run",
|
||||
.description = "Show what this command would do without doing it.",
|
||||
//.category = commonArgsCategory,
|
||||
.handler = {&dryRun, true},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -26,7 +34,12 @@ struct MixJSON : virtual Args
|
|||
|
||||
MixJSON()
|
||||
{
|
||||
mkFlag(0, "json", "Produce output in JSON format, suitable for consumption by another program.", &json);
|
||||
addFlag({
|
||||
.longName = "json",
|
||||
.description = "Produce output in JSON format, suitable for consumption by another program.",
|
||||
//.category = commonArgsCategory,
|
||||
.handler = {&json, true},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -229,11 +229,17 @@ LegacyArgs::LegacyArgs(const std::string & programName,
|
|||
intSettingAlias(0, "max-silent-time", "Number of seconds of silence before a build is killed.", "max-silent-time");
|
||||
intSettingAlias(0, "timeout", "Number of seconds before a build is killed.", "timeout");
|
||||
|
||||
mkFlag(0, "readonly-mode", "Do not write to the Nix store.",
|
||||
&settings.readOnlyMode);
|
||||
addFlag({
|
||||
.longName = "readonly-mode",
|
||||
.description = "Do not write to the Nix store.",
|
||||
.handler = {&settings.readOnlyMode, true},
|
||||
});
|
||||
|
||||
mkFlag(0, "no-gc-warning", "Disable warnings about not using `--add-root`.",
|
||||
&gcWarning, false);
|
||||
addFlag({
|
||||
.longName = "no-gc-warning",
|
||||
.description = "Disable warnings about not using `--add-root`.",
|
||||
.handler = {&gcWarning, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "store",
|
||||
|
|
|
@ -108,13 +108,6 @@ public:
|
|||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override
|
||||
{ unsupported("buildDerivation"); }
|
||||
|
||||
void ensurePath(const StorePath & path) override
|
||||
{ unsupported("ensurePath"); }
|
||||
|
||||
ref<FSAccessor> getFSAccessor() override;
|
||||
|
||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||
|
|
|
@ -87,8 +87,8 @@ void handleDiffHook(
|
|||
printError(chomp(diffRes.second));
|
||||
} catch (Error & error) {
|
||||
ErrorInfo ei = error.info();
|
||||
ei.hint = hintfmt("diff hook execution failed: %s",
|
||||
(error.info().hint.has_value() ? error.info().hint->str() : ""));
|
||||
// FIXME: wrap errors.
|
||||
ei.msg = hintfmt("diff hook execution failed: %s", ei.msg.str());
|
||||
logError(ei);
|
||||
}
|
||||
}
|
||||
|
@ -439,12 +439,9 @@ void DerivationGoal::repairClosure()
|
|||
/* Check each path (slow!). */
|
||||
for (auto & i : outputClosure) {
|
||||
if (worker.pathContentsGood(i)) continue;
|
||||
logError({
|
||||
.name = "Corrupt path in closure",
|
||||
.hint = hintfmt(
|
||||
"found corrupted or missing path '%s' in the output closure of '%s'",
|
||||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath))
|
||||
});
|
||||
printError(
|
||||
"found corrupted or missing path '%s' in the output closure of '%s'",
|
||||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
|
||||
auto drvPath2 = outputsToDrv.find(i);
|
||||
if (drvPath2 == outputsToDrv.end())
|
||||
addWaitee(upcast_goal(worker.makeSubstitutionGoal(i, Repair)));
|
||||
|
@ -539,12 +536,12 @@ void DerivationGoal::inputsRealised()
|
|||
if (!optRealizedInput)
|
||||
throw Error(
|
||||
"derivation '%s' requires output '%s' from input derivation '%s', which is supposedly realized already, yet we still don't know what path corresponds to that output",
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath));
|
||||
worker.store.computeFSClosure(*optRealizedInput, inputPaths);
|
||||
} else
|
||||
throw Error(
|
||||
"derivation '%s' requires non-existent output '%s' from input derivation '%s'",
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
|
||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(depDrvPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -597,9 +594,17 @@ void DerivationGoal::tryToBuild()
|
|||
PathSet lockFiles;
|
||||
/* FIXME: Should lock something like the drv itself so we don't build same
|
||||
CA drv concurrently */
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||
if (i.second.second)
|
||||
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
|
||||
if (dynamic_cast<LocalStore *>(&worker.store))
|
||||
/* If we aren't a local store, we might need to use the local store as
|
||||
a build remote, but that would cause a deadlock. */
|
||||
/* FIXME: Make it so we can use ourselves as a build remote even if we
|
||||
are the local store (separate locking for building vs scheduling? */
|
||||
/* FIXME: find some way to lock for scheduling for the other stores so
|
||||
a forking daemon with --store still won't farm out redundant builds.
|
||||
*/
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store))
|
||||
if (i.second.second)
|
||||
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
|
||||
|
||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||
if (!actLock)
|
||||
|
@ -681,6 +686,12 @@ void DerivationGoal::tryToBuild()
|
|||
|
||||
void DerivationGoal::tryLocalBuild() {
|
||||
/* Make sure that we are allowed to start a build. */
|
||||
if (!dynamic_cast<LocalStore *>(&worker.store)) {
|
||||
throw Error(
|
||||
"unable to build with a primary store that isn't a local store; "
|
||||
"either pass a different '--store' or enable remote builds."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
}
|
||||
unsigned int curBuilds = worker.getNrLocalBuilds();
|
||||
if (curBuilds >= settings.maxBuildJobs) {
|
||||
worker.waitForBuildSlot(shared_from_this());
|
||||
|
@ -849,14 +860,16 @@ void DerivationGoal::buildDone()
|
|||
So instead, check if the disk is (nearly) full now. If
|
||||
so, we don't mark this build as a permanent failure. */
|
||||
#if HAVE_STATVFS
|
||||
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
|
||||
struct statvfs st;
|
||||
if (statvfs(worker.store.realStoreDir.c_str(), &st) == 0 &&
|
||||
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||
diskFull = true;
|
||||
if (statvfs(tmpDir.c_str(), &st) == 0 &&
|
||||
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||
diskFull = true;
|
||||
if (auto localStore = dynamic_cast<LocalStore *>(&worker.store)) {
|
||||
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
|
||||
struct statvfs st;
|
||||
if (statvfs(localStore->realStoreDir.c_str(), &st) == 0 &&
|
||||
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||
diskFull = true;
|
||||
if (statvfs(tmpDir.c_str(), &st) == 0 &&
|
||||
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||
diskFull = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
deleteTmpDir(false);
|
||||
|
@ -877,9 +890,14 @@ void DerivationGoal::buildDone()
|
|||
statusToString(status));
|
||||
|
||||
if (!logger->isVerbose() && !logTail.empty()) {
|
||||
msg += (format("; last %d log lines:") % logTail.size()).str();
|
||||
for (auto & line : logTail)
|
||||
msg += "\n " + line;
|
||||
msg += fmt(";\nlast %d log lines:\n", logTail.size());
|
||||
for (auto & line : logTail) {
|
||||
msg += "> ";
|
||||
msg += line;
|
||||
msg += "\n";
|
||||
}
|
||||
msg += fmt("For full logs, run '" ANSI_BOLD "nix log %s" ANSI_NORMAL "'.",
|
||||
worker.store.printStorePath(drvPath));
|
||||
}
|
||||
|
||||
if (diskFull)
|
||||
|
@ -1026,7 +1044,14 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
whether the hook wishes to perform the build. */
|
||||
string reply;
|
||||
while (true) {
|
||||
string s = readLine(worker.hook->fromHook.readSide.get());
|
||||
auto s = [&]() {
|
||||
try {
|
||||
return readLine(worker.hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the response from the build hook");
|
||||
throw e;
|
||||
}
|
||||
}();
|
||||
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
|
||||
;
|
||||
else if (string(s, 0, 2) == "# ") {
|
||||
|
@ -1055,12 +1080,9 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo == EPIPE) {
|
||||
logError({
|
||||
.name = "Build hook died",
|
||||
.hint = hintfmt(
|
||||
"build hook died unexpectedly: %s",
|
||||
chomp(drainFD(worker.hook->fromHook.readSide.get())))
|
||||
});
|
||||
printError(
|
||||
"build hook died unexpectedly: %s",
|
||||
chomp(drainFD(worker.hook->fromHook.readSide.get())));
|
||||
worker.hook = 0;
|
||||
return rpDecline;
|
||||
} else
|
||||
|
@ -1069,7 +1091,12 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
|
||||
hook = std::move(worker.hook);
|
||||
|
||||
machineName = readLine(hook->fromHook.readSide.get());
|
||||
try {
|
||||
machineName = readLine(hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the machine name from the build hook");
|
||||
throw e;
|
||||
}
|
||||
|
||||
/* Tell the hook all the inputs that have to be copied to the
|
||||
remote system. */
|
||||
|
@ -1216,12 +1243,15 @@ void DerivationGoal::startBuilder()
|
|||
useChroot = !(derivationIsImpure(derivationType)) && !noChroot;
|
||||
}
|
||||
|
||||
if (worker.store.storeDir != worker.store.realStoreDir) {
|
||||
#if __linux__
|
||||
useChroot = true;
|
||||
#else
|
||||
throw Error("building using a diverted store is not supported on this platform");
|
||||
#endif
|
||||
if (auto localStoreP = dynamic_cast<LocalStore *>(&worker.store)) {
|
||||
auto & localStore = *localStoreP;
|
||||
if (localStore.storeDir != localStore.realStoreDir) {
|
||||
#if __linux__
|
||||
useChroot = true;
|
||||
#else
|
||||
throw Error("building using a diverted store is not supported on this platform");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/* Create a temporary directory where the build will take
|
||||
|
@ -1754,8 +1784,17 @@ void DerivationGoal::startBuilder()
|
|||
worker.childStarted(shared_from_this(), {builderOut.readSide.get()}, true, true);
|
||||
|
||||
/* Check if setting up the build environment failed. */
|
||||
std::vector<std::string> msgs;
|
||||
while (true) {
|
||||
string msg = readLine(builderOut.readSide.get());
|
||||
string msg = [&]() {
|
||||
try {
|
||||
return readLine(builderOut.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while waiting for the build environment to initialize (previous messages: %s)",
|
||||
concatStringsSep("|", msgs));
|
||||
throw e;
|
||||
}
|
||||
}();
|
||||
if (string(msg, 0, 1) == "\2") break;
|
||||
if (string(msg, 0, 1) == "\1") {
|
||||
FdSource source(builderOut.readSide.get());
|
||||
|
@ -1764,6 +1803,7 @@ void DerivationGoal::startBuilder()
|
|||
throw ex;
|
||||
}
|
||||
debug("sandbox setup: " + msg);
|
||||
msgs.push_back(std::move(msg));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2181,7 +2221,8 @@ void DerivationGoal::startDaemon()
|
|||
Store::Params params;
|
||||
params["path-info-cache-size"] = "0";
|
||||
params["store"] = worker.store.storeDir;
|
||||
params["root"] = worker.store.rootDir;
|
||||
if (auto localStore = dynamic_cast<LocalStore *>(&worker.store))
|
||||
params["root"] = localStore->rootDir;
|
||||
params["state"] = "/no-such-path";
|
||||
params["log"] = "/no-such-path";
|
||||
auto store = make_ref<RestrictedStore>(params,
|
||||
|
@ -3068,10 +3109,7 @@ void DerivationGoal::registerOutputs()
|
|||
auto rewriteOutput = [&]() {
|
||||
/* Apply hash rewriting if necessary. */
|
||||
if (!outputRewrites.empty()) {
|
||||
logWarning({
|
||||
.name = "Rewriting hashes",
|
||||
.hint = hintfmt("rewriting hashes in '%1%'; cross fingers", actualPath),
|
||||
});
|
||||
warn("rewriting hashes in '%1%'; cross fingers", actualPath);
|
||||
|
||||
/* FIXME: this is in-memory. */
|
||||
StringSink sink;
|
||||
|
@ -3269,7 +3307,13 @@ void DerivationGoal::registerOutputs()
|
|||
}
|
||||
}
|
||||
|
||||
auto localStoreP = dynamic_cast<LocalStore *>(&worker.store);
|
||||
if (!localStoreP)
|
||||
throw Unsupported("can only register outputs with local store, but this is %s", worker.store.getUri());
|
||||
auto & localStore = *localStoreP;
|
||||
|
||||
if (buildMode == bmCheck) {
|
||||
|
||||
if (!worker.store.isValidPath(newInfo.path)) continue;
|
||||
ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path));
|
||||
if (newInfo.narHash != oldInfo.narHash) {
|
||||
|
@ -3294,8 +3338,8 @@ void DerivationGoal::registerOutputs()
|
|||
/* Since we verified the build, it's now ultimately trusted. */
|
||||
if (!oldInfo.ultimate) {
|
||||
oldInfo.ultimate = true;
|
||||
worker.store.signPathInfo(oldInfo);
|
||||
worker.store.registerValidPaths({{oldInfo.path, oldInfo}});
|
||||
localStore.signPathInfo(oldInfo);
|
||||
localStore.registerValidPaths({{oldInfo.path, oldInfo}});
|
||||
}
|
||||
|
||||
continue;
|
||||
|
@ -3311,13 +3355,13 @@ void DerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
if (curRound == nrRounds) {
|
||||
worker.store.optimisePath(actualPath); // FIXME: combine with scanForReferences()
|
||||
localStore.optimisePath(actualPath); // FIXME: combine with scanForReferences()
|
||||
worker.markContentsGood(newInfo.path);
|
||||
}
|
||||
|
||||
newInfo.deriver = drvPath;
|
||||
newInfo.ultimate = true;
|
||||
worker.store.signPathInfo(newInfo);
|
||||
localStore.signPathInfo(newInfo);
|
||||
|
||||
finish(newInfo.path);
|
||||
|
||||
|
@ -3325,7 +3369,7 @@ void DerivationGoal::registerOutputs()
|
|||
isn't statically known so that we can safely unlock the path before
|
||||
the next iteration */
|
||||
if (newInfo.ca)
|
||||
worker.store.registerValidPaths({{newInfo.path, newInfo}});
|
||||
localStore.registerValidPaths({{newInfo.path, newInfo}});
|
||||
|
||||
infos.emplace(outputName, std::move(newInfo));
|
||||
}
|
||||
|
@ -3359,10 +3403,7 @@ void DerivationGoal::registerOutputs()
|
|||
if (settings.enforceDeterminism)
|
||||
throw NotDeterministic(hint);
|
||||
|
||||
logError({
|
||||
.name = "Output determinism error",
|
||||
.hint = hint
|
||||
});
|
||||
printError(hint);
|
||||
|
||||
curRound = nrRounds; // we know enough, bail out early
|
||||
}
|
||||
|
@ -3398,11 +3439,16 @@ void DerivationGoal::registerOutputs()
|
|||
paths referenced by each of them. If there are cycles in the
|
||||
outputs, this will fail. */
|
||||
{
|
||||
auto localStoreP = dynamic_cast<LocalStore *>(&worker.store);
|
||||
if (!localStoreP)
|
||||
throw Unsupported("can only register outputs with local store, but this is %s", worker.store.getUri());
|
||||
auto & localStore = *localStoreP;
|
||||
|
||||
ValidPathInfos infos2;
|
||||
for (auto & [outputName, newInfo] : infos) {
|
||||
infos2.insert_or_assign(newInfo.path, newInfo);
|
||||
}
|
||||
worker.store.registerValidPaths(infos2);
|
||||
localStore.registerValidPaths(infos2);
|
||||
}
|
||||
|
||||
/* In case of a fixed-output derivation hash mismatch, throw an
|
||||
|
@ -3600,7 +3646,12 @@ Path DerivationGoal::openLogFile()
|
|||
auto baseName = std::string(baseNameOf(worker.store.printStorePath(drvPath)));
|
||||
|
||||
/* Create a log file. */
|
||||
Path dir = fmt("%s/%s/%s/", worker.store.logDir, worker.store.drvsLogDir, string(baseName, 0, 2));
|
||||
Path logDir;
|
||||
if (auto localStore = dynamic_cast<LocalStore *>(&worker.store))
|
||||
logDir = localStore->logDir;
|
||||
else
|
||||
logDir = settings.nixLogDir;
|
||||
Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, string(baseName, 0, 2));
|
||||
createDirs(dir);
|
||||
|
||||
Path logFileName = fmt("%s/%s%s", dir, string(baseName, 2),
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode)
|
||||
void Store::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths, BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this);
|
||||
|
||||
|
@ -43,7 +43,7 @@ void LocalStore::buildPaths(const std::vector<StorePathWithOutputs> & drvPaths,
|
|||
}
|
||||
}
|
||||
|
||||
BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this);
|
||||
|
@ -63,7 +63,7 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe
|
|||
}
|
||||
|
||||
|
||||
void LocalStore::ensurePath(const StorePath & path)
|
||||
void Store::ensurePath(const StorePath & path)
|
||||
{
|
||||
/* If the path is already valid, we're done. */
|
||||
if (isValidPath(path)) return;
|
|
@ -142,15 +142,10 @@ void SubstitutionGoal::tryNext()
|
|||
/* Bail out early if this substituter lacks a valid
|
||||
signature. LocalStore::addToStore() also checks for this, but
|
||||
only after we've downloaded the path. */
|
||||
if (worker.store.requireSigs
|
||||
&& !sub->isTrusted
|
||||
&& !info->checkSignatures(worker.store, worker.store.getPublicKeys()))
|
||||
if (!sub->isTrusted && worker.store.pathInfoIsTrusted(*info))
|
||||
{
|
||||
logWarning({
|
||||
.name = "Invalid path signature",
|
||||
.hint = hintfmt("substituter '%s' does not have a valid signature for path '%s'",
|
||||
sub->getUri(), worker.store.printStorePath(storePath))
|
||||
});
|
||||
warn("substituter '%s' does not have a valid signature for path '%s'",
|
||||
sub->getUri(), worker.store.printStorePath(storePath));
|
||||
tryNext();
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Worker::Worker(LocalStore & store)
|
||||
Worker::Worker(Store & store)
|
||||
: act(*logger, actRealise)
|
||||
, actDerivations(*logger, actBuilds)
|
||||
, actSubstitutions(*logger, actCopyPaths)
|
||||
|
@ -229,7 +229,9 @@ void Worker::run(const Goals & _topGoals)
|
|||
|
||||
checkInterrupt();
|
||||
|
||||
store.autoGC(false);
|
||||
// TODO GC interface?
|
||||
if (auto localStore = dynamic_cast<LocalStore *>(&store))
|
||||
localStore->autoGC(false);
|
||||
|
||||
/* Call every wake goal (in the ordering established by
|
||||
CompareGoalPtrs). */
|
||||
|
@ -454,10 +456,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
}
|
||||
pathContentsGoodCache.insert_or_assign(path, res);
|
||||
if (!res)
|
||||
logError({
|
||||
.name = "Corrupted path",
|
||||
.hint = hintfmt("path '%s' is corrupted or missing!", store.printStorePath(path))
|
||||
});
|
||||
printError("path '%s' is corrupted or missing!", store.printStorePath(path));
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,9 +2,12 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "lock.hh"
|
||||
#include "local-store.hh"
|
||||
#include "store-api.hh"
|
||||
#include "goal.hh"
|
||||
|
||||
#include <future>
|
||||
#include <thread>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* Forward definition. */
|
||||
|
@ -102,7 +105,7 @@ public:
|
|||
/* Set if at least one derivation is not deterministic in check mode. */
|
||||
bool checkMismatch;
|
||||
|
||||
LocalStore & store;
|
||||
Store & store;
|
||||
|
||||
std::unique_ptr<HookInstance> hook;
|
||||
|
||||
|
@ -124,7 +127,7 @@ public:
|
|||
it answers with "decline-permanently", we don't try again. */
|
||||
bool tryBuildHook = true;
|
||||
|
||||
Worker(LocalStore & store);
|
||||
Worker(Store & store);
|
||||
~Worker();
|
||||
|
||||
/* Make a goal (with caching). */
|
||||
|
|
|
@ -22,10 +22,7 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
|
|||
srcFiles = readDirectory(srcDir);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo == ENOTDIR) {
|
||||
logWarning({
|
||||
.name = "Create links - directory",
|
||||
.hint = hintfmt("not including '%s' in the user environment because it's not a directory", srcDir)
|
||||
});
|
||||
warn("not including '%s' in the user environment because it's not a directory", srcDir);
|
||||
return;
|
||||
}
|
||||
throw;
|
||||
|
@ -44,10 +41,7 @@ static void createLinks(State & state, const Path & srcDir, const Path & dstDir,
|
|||
throw SysError("getting status of '%1%'", srcFile);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo == ENOENT || e.errNo == ENOTDIR) {
|
||||
logWarning({
|
||||
.name = "Create links - skipping symlink",
|
||||
.hint = hintfmt("skipping dangling symlink '%s'", dstFile)
|
||||
});
|
||||
warn("skipping dangling symlink '%s'", dstFile);
|
||||
continue;
|
||||
}
|
||||
throw;
|
||||
|
|
|
@ -55,13 +55,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
||||
void ensurePath(const StorePath & path) override
|
||||
{ unsupported("ensurePath"); }
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override
|
||||
{ unsupported("buildDerivation"); }
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
|
|
@ -375,6 +375,13 @@ struct curlFileTransfer : public FileTransfer
|
|||
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
|
||||
{
|
||||
result.cached = httpStatus == 304;
|
||||
|
||||
// In 2021, GitHub responds to If-None-Match with 304,
|
||||
// but omits ETag. We just use the If-None-Match etag
|
||||
// since 304 implies they are the same.
|
||||
if (httpStatus == 304 && result.etag == "")
|
||||
result.etag = request.expectedETag;
|
||||
|
||||
act.progress(result.bodySize, result.bodySize);
|
||||
done = true;
|
||||
callback(std::move(result));
|
||||
|
@ -632,11 +639,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
workerThreadMain();
|
||||
} catch (nix::Interrupted & e) {
|
||||
} catch (std::exception & e) {
|
||||
logError({
|
||||
.name = "File transfer",
|
||||
.hint = hintfmt("unexpected error in download thread: %s",
|
||||
e.what())
|
||||
});
|
||||
printError("unexpected error in download thread: %s", e.what());
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -852,11 +855,10 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<
|
|||
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
|
||||
// to print different messages for different verbosity levels. For now
|
||||
// we add some heuristics for detecting when we want to show the response.
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != string::npos)) {
|
||||
err.hint = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), *response);
|
||||
} else {
|
||||
err.hint = hf;
|
||||
}
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != string::npos))
|
||||
err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response));
|
||||
else
|
||||
err.msg = hf;
|
||||
}
|
||||
|
||||
bool isUri(const string & s)
|
||||
|
|
|
@ -150,12 +150,7 @@ LocalStore::LocalStore(const Params & params)
|
|||
|
||||
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
|
||||
if (!gr)
|
||||
logError({
|
||||
.name = "'build-users-group' not found",
|
||||
.hint = hintfmt(
|
||||
"warning: the group '%1%' specified in 'build-users-group' does not exist",
|
||||
settings.buildUsersGroup)
|
||||
});
|
||||
printError("warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup);
|
||||
else {
|
||||
struct stat st;
|
||||
if (stat(realStoreDir.c_str(), &st))
|
||||
|
@ -1098,7 +1093,6 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
const PublicKeys & LocalStore::getPublicKeys()
|
||||
{
|
||||
auto state(_state.lock());
|
||||
|
@ -1107,11 +1101,15 @@ const PublicKeys & LocalStore::getPublicKeys()
|
|||
return *state->publicKeys;
|
||||
}
|
||||
|
||||
bool LocalStore::pathInfoIsTrusted(const ValidPathInfo & info)
|
||||
{
|
||||
return requireSigs && !info.checkSignatures(*this, getPublicKeys());
|
||||
}
|
||||
|
||||
void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs)
|
||||
{
|
||||
if (requireSigs && checkSigs && !info.checkSignatures(*this, getPublicKeys()))
|
||||
if (checkSigs && pathInfoIsTrusted(info))
|
||||
throw Error("cannot add path '%s' because it lacks a valid signature", printStorePath(info.path));
|
||||
|
||||
addTempRoot(info.path);
|
||||
|
@ -1403,12 +1401,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
Path linkPath = linksDir + "/" + link.name;
|
||||
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
|
||||
if (hash != link.name) {
|
||||
logError({
|
||||
.name = "Invalid hash",
|
||||
.hint = hintfmt(
|
||||
"link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash)
|
||||
});
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
if (repair) {
|
||||
if (unlink(linkPath.c_str()) == 0)
|
||||
printInfo("removed link '%s'", linkPath);
|
||||
|
@ -1441,11 +1435,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
auto current = hashSink->finish();
|
||||
|
||||
if (info->narHash != nullHash && info->narHash != current.first) {
|
||||
logError({
|
||||
.name = "Invalid hash - path modified",
|
||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
||||
});
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true));
|
||||
if (repair) repairPath(i); else errors = true;
|
||||
} else {
|
||||
|
||||
|
@ -1496,10 +1487,7 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
|
|||
if (!done.insert(pathS).second) return;
|
||||
|
||||
if (!isStorePath(pathS)) {
|
||||
logError({
|
||||
.name = "Nix path not found",
|
||||
.hint = hintfmt("path '%s' is not in the Nix store", pathS)
|
||||
});
|
||||
printError("path '%s' is not in the Nix store", pathS);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1522,10 +1510,7 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
|
|||
auto state(_state.lock());
|
||||
invalidatePath(*state, path);
|
||||
} else {
|
||||
logError({
|
||||
.name = "Missing path with referrers",
|
||||
.hint = hintfmt("path '%s' disappeared, but it still has valid referrers!", pathS)
|
||||
});
|
||||
printError("path '%s' disappeared, but it still has valid referrers!", pathS);
|
||||
if (repair)
|
||||
try {
|
||||
repairPath(path);
|
||||
|
|
|
@ -136,6 +136,8 @@ public:
|
|||
void querySubstitutablePathInfos(const StorePathCAMap & paths,
|
||||
SubstitutablePathInfos & infos) override;
|
||||
|
||||
bool pathInfoIsTrusted(const ValidPathInfo &) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
|
@ -145,15 +147,6 @@ public:
|
|||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override;
|
||||
|
||||
void buildPaths(
|
||||
const std::vector<StorePathWithOutputs> & paths,
|
||||
BuildMode buildMode) override;
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode) override;
|
||||
|
||||
void ensurePath(const StorePath & path) override;
|
||||
|
||||
void addTempRoot(const StorePath & path) override;
|
||||
|
||||
void addIndirectRoot(const Path & path) override;
|
||||
|
|
|
@ -126,16 +126,13 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
NixOS (example: $fontconfig/var/cache being modified). Skip
|
||||
those files. FIXME: check the modification time. */
|
||||
if (S_ISREG(st.st_mode) && (st.st_mode & S_IWUSR)) {
|
||||
logWarning({
|
||||
.name = "Suspicious file",
|
||||
.hint = hintfmt("skipping suspicious writable file '%1%'", path)
|
||||
});
|
||||
warn("skipping suspicious writable file '%1%'", path);
|
||||
return;
|
||||
}
|
||||
|
||||
/* This can still happen on top-level files. */
|
||||
if (st.st_nlink > 1 && inodeHash.count(st.st_ino)) {
|
||||
debug(format("'%1%' is already linked, with %2% other file(s)") % path % (st.st_nlink - 2));
|
||||
debug("'%s' is already linked, with %d other file(s)", path, st.st_nlink - 2);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -191,10 +188,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
}
|
||||
|
||||
if (st.st_size != stLink.st_size) {
|
||||
logWarning({
|
||||
.name = "Corrupted link",
|
||||
.hint = hintfmt("removing corrupted link '%1%'", linkPath)
|
||||
});
|
||||
warn("removing corrupted link '%s'", linkPath);
|
||||
unlink(linkPath.c_str());
|
||||
goto retry;
|
||||
}
|
||||
|
@ -229,10 +223,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
/* Atomically replace the old file with the new hard link. */
|
||||
if (rename(tempLink.c_str(), path.c_str()) == -1) {
|
||||
if (unlink(tempLink.c_str()) == -1)
|
||||
logError({
|
||||
.name = "Unlink error",
|
||||
.hint = hintfmt("unable to unlink '%1%'", tempLink)
|
||||
});
|
||||
printError("unable to unlink '%1%'", tempLink);
|
||||
if (errno == EMLINK) {
|
||||
/* Some filesystems generate too many links on the rename,
|
||||
rather than on the original link. (Probably it
|
||||
|
|
|
@ -46,4 +46,35 @@ Realisation Realisation::fromJSON(
|
|||
};
|
||||
}
|
||||
|
||||
StorePath RealisedPath::path() const {
|
||||
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
|
||||
}
|
||||
|
||||
void RealisedPath::closure(
|
||||
Store& store,
|
||||
const RealisedPath::Set& startPaths,
|
||||
RealisedPath::Set& ret)
|
||||
{
|
||||
// FIXME: This only builds the store-path closure, not the real realisation
|
||||
// closure
|
||||
StorePathSet initialStorePaths, pathsClosure;
|
||||
for (auto& path : startPaths)
|
||||
initialStorePaths.insert(path.path());
|
||||
store.computeFSClosure(initialStorePaths, pathsClosure);
|
||||
ret.insert(startPaths.begin(), startPaths.end());
|
||||
ret.insert(pathsClosure.begin(), pathsClosure.end());
|
||||
}
|
||||
|
||||
void RealisedPath::closure(Store& store, RealisedPath::Set & ret) const
|
||||
{
|
||||
RealisedPath::closure(store, {*this}, ret);
|
||||
}
|
||||
|
||||
RealisedPath::Set RealisedPath::closure(Store& store) const
|
||||
{
|
||||
RealisedPath::Set ret;
|
||||
closure(store, ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "path.hh"
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
#include "comparator.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -17,13 +18,7 @@ struct DrvOutput {
|
|||
|
||||
static DrvOutput parse(const std::string &);
|
||||
|
||||
bool operator<(const DrvOutput& other) const { return to_pair() < other.to_pair(); }
|
||||
bool operator==(const DrvOutput& other) const { return to_pair() == other.to_pair(); }
|
||||
|
||||
private:
|
||||
// Just to make comparison operators easier to write
|
||||
std::pair<Hash, std::string> to_pair() const
|
||||
{ return std::make_pair(drvHash, outputName); }
|
||||
GENERATE_CMP(DrvOutput, me->drvHash, me->outputName);
|
||||
};
|
||||
|
||||
struct Realisation {
|
||||
|
@ -32,8 +27,47 @@ struct Realisation {
|
|||
|
||||
nlohmann::json toJSON() const;
|
||||
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
|
||||
|
||||
StorePath getPath() const { return outPath; }
|
||||
|
||||
GENERATE_CMP(Realisation, me->id, me->outPath);
|
||||
};
|
||||
|
||||
typedef std::map<DrvOutput, Realisation> DrvOutputs;
|
||||
struct OpaquePath {
|
||||
StorePath path;
|
||||
|
||||
StorePath getPath() const { return path; }
|
||||
|
||||
GENERATE_CMP(OpaquePath, me->path);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* A store path with all the history of how it went into the store
|
||||
*/
|
||||
struct RealisedPath {
|
||||
/*
|
||||
* A path is either the result of the realisation of a derivation or
|
||||
* an opaque blob that has been directly added to the store
|
||||
*/
|
||||
using Raw = std::variant<Realisation, OpaquePath>;
|
||||
Raw raw;
|
||||
|
||||
using Set = std::set<RealisedPath>;
|
||||
|
||||
RealisedPath(StorePath path) : raw(OpaquePath{path}) {}
|
||||
RealisedPath(Realisation r) : raw(r) {}
|
||||
|
||||
/**
|
||||
* Get the raw store path associated to this
|
||||
*/
|
||||
StorePath path() const;
|
||||
|
||||
void closure(Store& store, Set& ret) const;
|
||||
static void closure(Store& store, const Set& startPaths, Set& ret);
|
||||
Set closure(Store& store) const;
|
||||
|
||||
GENERATE_CMP(RealisedPath, me->raw);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -211,7 +211,7 @@ void handleSQLiteBusy(const SQLiteBusy & e)
|
|||
lastWarned = now;
|
||||
logWarning({
|
||||
.name = "Sqlite busy",
|
||||
.hint = hintfmt(e.what())
|
||||
.msg = hintfmt(e.what())
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -394,7 +394,7 @@ OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
|
|||
OutputPathMap result;
|
||||
for (auto & [outName, optOutPath] : resp) {
|
||||
if (!optOutPath)
|
||||
throw Error("output '%s' has no store path mapped to it", outName);
|
||||
throw Error("output '%s' of derivation '%s' has no store path mapped to it", outName, printStorePath(path));
|
||||
result.insert_or_assign(outName, *optOutPath);
|
||||
}
|
||||
return result;
|
||||
|
@ -747,29 +747,6 @@ const Store::Stats & Store::getStats()
|
|||
}
|
||||
|
||||
|
||||
void Store::buildPaths(const std::vector<StorePathWithOutputs> & paths, BuildMode buildMode)
|
||||
{
|
||||
StorePathSet paths2;
|
||||
|
||||
for (auto & path : paths) {
|
||||
if (path.path.isDerivation()) {
|
||||
auto outPaths = queryPartialDerivationOutputMap(path.path);
|
||||
for (auto & outputName : path.outputs) {
|
||||
auto currentOutputPathIter = outPaths.find(outputName);
|
||||
if (currentOutputPathIter == outPaths.end() ||
|
||||
!currentOutputPathIter->second ||
|
||||
!isValidPath(*currentOutputPathIter->second))
|
||||
unsupported("buildPaths");
|
||||
}
|
||||
} else
|
||||
paths2.insert(path.path);
|
||||
}
|
||||
|
||||
if (queryValidPaths(paths2).size() != paths2.size())
|
||||
unsupported("buildPaths");
|
||||
}
|
||||
|
||||
|
||||
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||
const StorePath & storePath, RepairFlag repair, CheckSigsFlag checkSigs)
|
||||
{
|
||||
|
|
|
@ -374,6 +374,21 @@ public:
|
|||
void queryPathInfo(const StorePath & path,
|
||||
Callback<ref<const ValidPathInfo>> callback) noexcept;
|
||||
|
||||
/* Check whether the given valid path info is sufficiently attested, by
|
||||
either being signed by a trusted public key or content-addressed, in
|
||||
order to be included in the given store.
|
||||
|
||||
These same checks would be performed in addToStore, but this allows an
|
||||
earlier failure in the case where dependencies need to be added too, but
|
||||
the addToStore wouldn't fail until those dependencies are added. Also,
|
||||
we don't really want to add the dependencies listed in a nar info we
|
||||
don't trust anyyways.
|
||||
*/
|
||||
virtual bool pathInfoIsTrusted(const ValidPathInfo &)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
virtual void queryPathInfoUncached(const StorePath & path,
|
||||
|
@ -521,17 +536,17 @@ public:
|
|||
explicitly choosing to allow it).
|
||||
*/
|
||||
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode = bmNormal) = 0;
|
||||
BuildMode buildMode = bmNormal);
|
||||
|
||||
/* Ensure that a path is valid. If it is not currently valid, it
|
||||
may be made valid by running a substitute (if defined for the
|
||||
path). */
|
||||
virtual void ensurePath(const StorePath & path) = 0;
|
||||
virtual void ensurePath(const StorePath & path);
|
||||
|
||||
/* Add a store path as a temporary root of the garbage collector.
|
||||
The root disappears as soon as we exit. */
|
||||
virtual void addTempRoot(const StorePath & path)
|
||||
{ unsupported("addTempRoot"); }
|
||||
{ warn("not creating temp root, store doesn't support GC"); }
|
||||
|
||||
/* Add an indirect root, which is merely a symlink to `path' from
|
||||
/nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed
|
||||
|
|
|
@ -14,6 +14,8 @@ void Args::addFlag(Flag && flag_)
|
|||
assert(flag->handler.arity == flag->labels.size());
|
||||
assert(flag->longName != "");
|
||||
longFlags[flag->longName] = flag;
|
||||
for (auto & alias : flag->aliases)
|
||||
longFlags[alias] = flag;
|
||||
if (flag->shortName) shortFlags[flag->shortName] = flag;
|
||||
}
|
||||
|
||||
|
@ -96,41 +98,6 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
processArgs(pendingArgs, true);
|
||||
}
|
||||
|
||||
void Args::printHelp(const string & programName, std::ostream & out)
|
||||
{
|
||||
std::cout << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "FLAGS..." ANSI_NORMAL, programName);
|
||||
for (auto & exp : expectedArgs) {
|
||||
std::cout << renderLabels({exp.label});
|
||||
// FIXME: handle arity > 1
|
||||
if (exp.handler.arity == ArityAny) std::cout << "...";
|
||||
if (exp.optional) std::cout << "?";
|
||||
}
|
||||
std::cout << "\n";
|
||||
|
||||
auto s = description();
|
||||
if (s != "")
|
||||
std::cout << "\n" ANSI_BOLD "Summary:" ANSI_NORMAL " " << s << ".\n";
|
||||
|
||||
if (longFlags.size()) {
|
||||
std::cout << "\n";
|
||||
std::cout << ANSI_BOLD "Flags:" ANSI_NORMAL "\n";
|
||||
printFlags(out);
|
||||
}
|
||||
}
|
||||
|
||||
void Args::printFlags(std::ostream & out)
|
||||
{
|
||||
Table2 table;
|
||||
for (auto & flag : longFlags) {
|
||||
if (hiddenCategories.count(flag.second->category)) continue;
|
||||
table.push_back(std::make_pair(
|
||||
(flag.second->shortName ? std::string("-") + flag.second->shortName + ", " : " ")
|
||||
+ "--" + flag.first + renderLabels(flag.second->labels),
|
||||
flag.second->description));
|
||||
}
|
||||
printTable(out, table);
|
||||
}
|
||||
|
||||
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||
{
|
||||
assert(pos != end);
|
||||
|
@ -226,12 +193,12 @@ nlohmann::json Args::toJSON()
|
|||
|
||||
for (auto & [name, flag] : longFlags) {
|
||||
auto j = nlohmann::json::object();
|
||||
if (flag->aliases.count(name)) continue;
|
||||
if (flag->shortName)
|
||||
j["shortName"] = std::string(1, flag->shortName);
|
||||
if (flag->description != "")
|
||||
j["description"] = flag->description;
|
||||
if (flag->category != "")
|
||||
j["category"] = flag->category;
|
||||
j["category"] = flag->category;
|
||||
if (flag->handler.arity != ArityAny)
|
||||
j["arity"] = flag->handler.arity;
|
||||
if (!flag->labels.empty())
|
||||
|
@ -331,28 +298,6 @@ Strings argvToStrings(int argc, char * * argv)
|
|||
return args;
|
||||
}
|
||||
|
||||
std::string renderLabels(const Strings & labels)
|
||||
{
|
||||
std::string res;
|
||||
for (auto label : labels) {
|
||||
for (auto & c : label) c = std::toupper(c);
|
||||
res += " " ANSI_ITALIC + label + ANSI_NORMAL;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
void printTable(std::ostream & out, const Table2 & table)
|
||||
{
|
||||
size_t max = 0;
|
||||
for (auto & row : table)
|
||||
max = std::max(max, filterANSIEscapes(row.first, true).size());
|
||||
for (auto & row : table) {
|
||||
out << " " << row.first
|
||||
<< std::string(max - filterANSIEscapes(row.first, true).size() + 2, ' ')
|
||||
<< row.second << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
MultiCommand::MultiCommand(const Commands & commands)
|
||||
: commands(commands)
|
||||
{
|
||||
|
@ -376,38 +321,6 @@ MultiCommand::MultiCommand(const Commands & commands)
|
|||
categories[Command::catDefault] = "Available commands";
|
||||
}
|
||||
|
||||
void MultiCommand::printHelp(const string & programName, std::ostream & out)
|
||||
{
|
||||
if (command) {
|
||||
command->second->printHelp(programName + " " + command->first, out);
|
||||
return;
|
||||
}
|
||||
|
||||
out << fmt(ANSI_BOLD "Usage:" ANSI_NORMAL " %s " ANSI_ITALIC "COMMAND FLAGS... ARGS..." ANSI_NORMAL "\n", programName);
|
||||
|
||||
out << "\n" ANSI_BOLD "Common flags:" ANSI_NORMAL "\n";
|
||||
printFlags(out);
|
||||
|
||||
std::map<Command::Category, std::map<std::string, ref<Command>>> commandsByCategory;
|
||||
|
||||
for (auto & [name, commandFun] : commands) {
|
||||
auto command = commandFun();
|
||||
commandsByCategory[command->category()].insert_or_assign(name, command);
|
||||
}
|
||||
|
||||
for (auto & [category, commands] : commandsByCategory) {
|
||||
out << fmt("\n" ANSI_BOLD "%s:" ANSI_NORMAL "\n", categories[category]);
|
||||
|
||||
Table2 table;
|
||||
for (auto & [name, command] : commands) {
|
||||
auto descr = command->description();
|
||||
if (!descr.empty())
|
||||
table.push_back(std::make_pair(name, descr));
|
||||
}
|
||||
printTable(out, table);
|
||||
}
|
||||
}
|
||||
|
||||
bool MultiCommand::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||
{
|
||||
if (Args::processFlag(pos, end)) return true;
|
||||
|
@ -430,7 +343,10 @@ nlohmann::json MultiCommand::toJSON()
|
|||
for (auto & [name, commandFun] : commands) {
|
||||
auto command = commandFun();
|
||||
auto j = command->toJSON();
|
||||
j["category"] = categories[command->category()];
|
||||
auto cat = nlohmann::json::object();
|
||||
cat["id"] = command->category();
|
||||
cat["description"] = categories[command->category()];
|
||||
j["category"] = std::move(cat);
|
||||
cmds[name] = std::move(j);
|
||||
}
|
||||
|
||||
|
|
|
@ -20,8 +20,6 @@ public:
|
|||
wrong. */
|
||||
void parseCmdline(const Strings & cmdline);
|
||||
|
||||
virtual void printHelp(const string & programName, std::ostream & out);
|
||||
|
||||
/* Return a short one-line description of the command. */
|
||||
virtual std::string description() { return ""; }
|
||||
|
||||
|
@ -93,12 +91,13 @@ protected:
|
|||
{ }
|
||||
};
|
||||
|
||||
/* Flags. */
|
||||
/* Options. */
|
||||
struct Flag
|
||||
{
|
||||
typedef std::shared_ptr<Flag> ptr;
|
||||
|
||||
std::string longName;
|
||||
std::set<std::string> aliases;
|
||||
char shortName = 0;
|
||||
std::string description;
|
||||
std::string category;
|
||||
|
@ -115,8 +114,6 @@ protected:
|
|||
|
||||
virtual bool processFlag(Strings::iterator & pos, Strings::iterator end);
|
||||
|
||||
virtual void printFlags(std::ostream & out);
|
||||
|
||||
/* Positional arguments. */
|
||||
struct ExpectedArg
|
||||
{
|
||||
|
@ -139,27 +136,6 @@ public:
|
|||
|
||||
void addFlag(Flag && flag);
|
||||
|
||||
/* Helper functions for constructing flags / positional
|
||||
arguments. */
|
||||
|
||||
void mkFlag(char shortName, const std::string & name,
|
||||
const std::string & description, bool * dest)
|
||||
{
|
||||
mkFlag(shortName, name, description, dest, true);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
void mkFlag(char shortName, const std::string & longName, const std::string & description,
|
||||
T * dest, const T & value)
|
||||
{
|
||||
addFlag({
|
||||
.longName = longName,
|
||||
.shortName = shortName,
|
||||
.description = description,
|
||||
.handler = {[=]() { *dest = value; }}
|
||||
});
|
||||
}
|
||||
|
||||
void expectArgs(ExpectedArg && arg)
|
||||
{
|
||||
expectedArgs.emplace_back(std::move(arg));
|
||||
|
@ -223,8 +199,6 @@ public:
|
|||
|
||||
MultiCommand(const Commands & commands);
|
||||
|
||||
void printHelp(const string & programName, std::ostream & out) override;
|
||||
|
||||
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
|
||||
|
||||
bool processArgs(const Strings & args, bool finish) override;
|
||||
|
@ -234,14 +208,6 @@ public:
|
|||
|
||||
Strings argvToStrings(int argc, char * * argv);
|
||||
|
||||
/* Helper function for rendering argument labels. */
|
||||
std::string renderLabels(const Strings & labels);
|
||||
|
||||
/* Helper function for printing 2-column tables. */
|
||||
typedef std::vector<std::pair<std::string, std::string>> Table2;
|
||||
|
||||
void printTable(std::ostream & out, const Table2 & table);
|
||||
|
||||
struct Completion {
|
||||
std::string completion;
|
||||
std::string description;
|
||||
|
|
30
src/libutil/comparator.hh
Normal file
30
src/libutil/comparator.hh
Normal file
|
@ -0,0 +1,30 @@
|
|||
#pragma once
|
||||
|
||||
/* Awfull hacky generation of the comparison operators by doing a lexicographic
|
||||
* comparison between the choosen fields.
|
||||
*
|
||||
* ```
|
||||
* GENERATE_CMP(ClassName, me->field1, me->field2, ...)
|
||||
* ```
|
||||
*
|
||||
* will generate comparison operators semantically equivalent to:
|
||||
*
|
||||
* ```
|
||||
* bool operator<(const ClassName& other) {
|
||||
* return field1 < other.field1 && field2 < other.field2 && ...;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
#define GENERATE_ONE_CMP(COMPARATOR, MY_TYPE, FIELDS...) \
|
||||
bool operator COMPARATOR(const MY_TYPE& other) const { \
|
||||
const MY_TYPE* me = this; \
|
||||
auto fields1 = std::make_tuple( FIELDS ); \
|
||||
me = &other; \
|
||||
auto fields2 = std::make_tuple( FIELDS ); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args)
|
||||
#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args)
|
||||
#define GENERATE_CMP(args...) \
|
||||
GENERATE_EQUAL(args) \
|
||||
GENERATE_LEQ(args)
|
|
@ -43,9 +43,9 @@ string showErrPos(const ErrPos & errPos)
|
|||
{
|
||||
if (errPos.line > 0) {
|
||||
if (errPos.column > 0) {
|
||||
return fmt("(%1%:%2%)", errPos.line, errPos.column);
|
||||
return fmt("%d:%d", errPos.line, errPos.column);
|
||||
} else {
|
||||
return fmt("(%1%)", errPos.line);
|
||||
return fmt("%d", errPos.line);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
@ -180,24 +180,20 @@ void printCodeLines(std::ostream & out,
|
|||
}
|
||||
}
|
||||
|
||||
void printAtPos(const string & prefix, const ErrPos & pos, std::ostream & out)
|
||||
void printAtPos(const ErrPos & pos, std::ostream & out)
|
||||
{
|
||||
if (pos)
|
||||
{
|
||||
if (pos) {
|
||||
switch (pos.origin) {
|
||||
case foFile: {
|
||||
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
|
||||
ANSI_BLUE << " in file: " << ANSI_NORMAL << pos.file;
|
||||
out << fmt(ANSI_BLUE "at " ANSI_YELLOW "%s:%s" ANSI_NORMAL ":", pos.file, showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
case foString: {
|
||||
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
|
||||
ANSI_BLUE << " from string" << ANSI_NORMAL;
|
||||
out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«string»:%s" ANSI_NORMAL ":", showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
case foStdin: {
|
||||
out << prefix << ANSI_BLUE << "at: " << ANSI_YELLOW << showErrPos(pos) <<
|
||||
ANSI_BLUE << " from stdin" << ANSI_NORMAL;
|
||||
out << fmt(ANSI_BLUE "at " ANSI_YELLOW "«stdin»:%s" ANSI_NORMAL ":", showErrPos(pos));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
|
@ -206,168 +202,108 @@ void printAtPos(const string & prefix, const ErrPos & pos, std::ostream & out)
|
|||
}
|
||||
}
|
||||
|
||||
static std::string indent(std::string_view indentFirst, std::string_view indentRest, std::string_view s)
|
||||
{
|
||||
std::string res;
|
||||
bool first = true;
|
||||
|
||||
while (!s.empty()) {
|
||||
auto end = s.find('\n');
|
||||
if (!first) res += "\n";
|
||||
res += chomp(std::string(first ? indentFirst : indentRest) + std::string(s.substr(0, end)));
|
||||
first = false;
|
||||
if (end == s.npos) break;
|
||||
s = s.substr(end + 1);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace)
|
||||
{
|
||||
auto errwidth = std::max<size_t>(getWindowSize().second, 20);
|
||||
string prefix = "";
|
||||
|
||||
string levelString;
|
||||
std::string prefix;
|
||||
switch (einfo.level) {
|
||||
case Verbosity::lvlError: {
|
||||
levelString = ANSI_RED;
|
||||
levelString += "error:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_RED "error";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlNotice: {
|
||||
prefix = ANSI_RED "note";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlWarn: {
|
||||
levelString = ANSI_YELLOW;
|
||||
levelString += "warning:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_YELLOW "warning";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlInfo: {
|
||||
levelString = ANSI_GREEN;
|
||||
levelString += "info:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_GREEN "info";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlTalkative: {
|
||||
levelString = ANSI_GREEN;
|
||||
levelString += "talk:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_GREEN "talk";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlChatty: {
|
||||
levelString = ANSI_GREEN;
|
||||
levelString += "chat:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_GREEN "chat";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlVomit: {
|
||||
levelString = ANSI_GREEN;
|
||||
levelString += "vomit:";
|
||||
levelString += ANSI_NORMAL;
|
||||
prefix = ANSI_GREEN "vomit";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlDebug: {
|
||||
levelString = ANSI_YELLOW;
|
||||
levelString += "debug:";
|
||||
levelString += ANSI_NORMAL;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
levelString = fmt("invalid error level: %1%", einfo.level);
|
||||
prefix = ANSI_YELLOW "debug";
|
||||
break;
|
||||
}
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
|
||||
auto ndl = prefix.length()
|
||||
+ filterANSIEscapes(levelString, true).length()
|
||||
+ 7
|
||||
+ einfo.name.length()
|
||||
+ einfo.programName.value_or("").length();
|
||||
auto dashwidth = std::max<int>(errwidth - ndl, 3);
|
||||
|
||||
std::string dashes(dashwidth, '-');
|
||||
|
||||
// divider.
|
||||
if (einfo.name != "")
|
||||
out << fmt("%1%%2%" ANSI_BLUE " --- %3% %4% %5%" ANSI_NORMAL,
|
||||
prefix,
|
||||
levelString,
|
||||
einfo.name,
|
||||
dashes,
|
||||
einfo.programName.value_or(""));
|
||||
// FIXME: show the program name as part of the trace?
|
||||
if (einfo.programName && einfo.programName != ErrorInfo::programName)
|
||||
prefix += fmt(" [%s]:" ANSI_NORMAL " ", einfo.programName.value_or(""));
|
||||
else
|
||||
out << fmt("%1%%2%" ANSI_BLUE " -----%3% %4%" ANSI_NORMAL,
|
||||
prefix,
|
||||
levelString,
|
||||
dashes,
|
||||
einfo.programName.value_or(""));
|
||||
prefix += ":" ANSI_NORMAL " ";
|
||||
|
||||
bool nl = false; // intersperse newline between sections.
|
||||
if (einfo.errPos.has_value() && (*einfo.errPos)) {
|
||||
out << prefix << std::endl;
|
||||
printAtPos(prefix, *einfo.errPos, out);
|
||||
nl = true;
|
||||
}
|
||||
std::ostringstream oss;
|
||||
oss << einfo.msg << "\n";
|
||||
|
||||
// description
|
||||
if (einfo.description != "") {
|
||||
if (nl)
|
||||
out << std::endl << prefix;
|
||||
out << std::endl << prefix << einfo.description;
|
||||
nl = true;
|
||||
}
|
||||
if (einfo.errPos.has_value() && *einfo.errPos) {
|
||||
oss << "\n";
|
||||
printAtPos(*einfo.errPos, oss);
|
||||
|
||||
if (einfo.errPos.has_value() && (*einfo.errPos)) {
|
||||
auto loc = getCodeLines(*einfo.errPos);
|
||||
|
||||
// lines of code.
|
||||
if (loc.has_value()) {
|
||||
if (nl)
|
||||
out << std::endl << prefix;
|
||||
printCodeLines(out, prefix, *einfo.errPos, *loc);
|
||||
nl = true;
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *einfo.errPos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
// hint
|
||||
if (einfo.hint.has_value()) {
|
||||
if (nl)
|
||||
out << std::endl << prefix;
|
||||
out << std::endl << prefix << *einfo.hint;
|
||||
nl = true;
|
||||
}
|
||||
|
||||
// traces
|
||||
if (showTrace && !einfo.traces.empty())
|
||||
{
|
||||
const string tracetitle(" show-trace ");
|
||||
|
||||
int fill = errwidth - tracetitle.length();
|
||||
int lw = 0;
|
||||
int rw = 0;
|
||||
const int min_dashes = 3;
|
||||
if (fill > min_dashes * 2) {
|
||||
if (fill % 2 != 0) {
|
||||
lw = fill / 2;
|
||||
rw = lw + 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
lw = rw = fill / 2;
|
||||
}
|
||||
}
|
||||
else
|
||||
lw = rw = min_dashes;
|
||||
|
||||
if (nl)
|
||||
out << std::endl << prefix;
|
||||
|
||||
out << ANSI_BLUE << std::string(lw, '-') << tracetitle << std::string(rw, '-') << ANSI_NORMAL;
|
||||
|
||||
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter)
|
||||
{
|
||||
out << std::endl << prefix;
|
||||
out << ANSI_BLUE << "trace: " << ANSI_NORMAL << iter->hint.str();
|
||||
if (showTrace && !einfo.traces.empty()) {
|
||||
for (auto iter = einfo.traces.rbegin(); iter != einfo.traces.rend(); ++iter) {
|
||||
oss << "\n" << "… " << iter->hint.str() << "\n";
|
||||
|
||||
if (iter->pos.has_value() && (*iter->pos)) {
|
||||
auto pos = iter->pos.value();
|
||||
out << std::endl << prefix;
|
||||
printAtPos(prefix, pos, out);
|
||||
oss << "\n";
|
||||
printAtPos(pos, oss);
|
||||
|
||||
auto loc = getCodeLines(pos);
|
||||
if (loc.has_value())
|
||||
{
|
||||
out << std::endl << prefix;
|
||||
printCodeLines(out, prefix, pos, *loc);
|
||||
out << std::endl << prefix;
|
||||
if (loc.has_value()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", pos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
out << indent(prefix, std::string(filterANSIEscapes(prefix, true).size(), ' '), chomp(oss.str()));
|
||||
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,9 +107,8 @@ struct Trace {
|
|||
|
||||
struct ErrorInfo {
|
||||
Verbosity level;
|
||||
string name;
|
||||
string description; // FIXME: remove? it seems to be barely used
|
||||
std::optional<hintformat> hint;
|
||||
string name; // FIXME: rename
|
||||
hintformat msg;
|
||||
std::optional<ErrPos> errPos;
|
||||
std::list<Trace> traces;
|
||||
|
||||
|
@ -133,23 +132,17 @@ public:
|
|||
|
||||
template<typename... Args>
|
||||
BaseError(unsigned int status, const Args & ... args)
|
||||
: err {.level = lvlError,
|
||||
.hint = hintfmt(args...)
|
||||
}
|
||||
: err { .level = lvlError, .msg = hintfmt(args...) }
|
||||
, status(status)
|
||||
{ }
|
||||
|
||||
template<typename... Args>
|
||||
BaseError(const std::string & fs, const Args & ... args)
|
||||
: err {.level = lvlError,
|
||||
.hint = hintfmt(fs, args...)
|
||||
}
|
||||
: err { .level = lvlError, .msg = hintfmt(fs, args...) }
|
||||
{ }
|
||||
|
||||
BaseError(hintformat hint)
|
||||
: err {.level = lvlError,
|
||||
.hint = hint
|
||||
}
|
||||
: err { .level = lvlError, .msg = hint }
|
||||
{ }
|
||||
|
||||
BaseError(ErrorInfo && e)
|
||||
|
@ -206,7 +199,7 @@ public:
|
|||
{
|
||||
errNo = errno;
|
||||
auto hf = hintfmt(args...);
|
||||
err.hint = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||
}
|
||||
|
||||
virtual const char* sname() const override { return "SysError"; }
|
||||
|
|
|
@ -184,7 +184,7 @@ struct JSONLogger : Logger {
|
|||
json["action"] = "msg";
|
||||
json["level"] = ei.level;
|
||||
json["msg"] = oss.str();
|
||||
json["raw_msg"] = ei.hint->str();
|
||||
json["raw_msg"] = ei.msg.str();
|
||||
|
||||
if (ei.errPos.has_value() && (*ei.errPos)) {
|
||||
json["line"] = ei.errPos->line;
|
||||
|
@ -305,10 +305,7 @@ bool handleJSONLogMessage(const std::string & msg,
|
|||
}
|
||||
|
||||
} catch (std::exception & e) {
|
||||
logError({
|
||||
.name = "JSON log message",
|
||||
.hint = hintfmt("bad log message from builder: %s", e.what())
|
||||
});
|
||||
printError("bad JSON log message from builder: %s", e.what());
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
|
@ -52,10 +52,7 @@ size_t threshold = 256 * 1024 * 1024;
|
|||
|
||||
static void warnLargeDump()
|
||||
{
|
||||
logWarning({
|
||||
.name = "Large path",
|
||||
.description = "dumping very large path (> 256 MiB); this may run out of memory"
|
||||
});
|
||||
warn("dumping very large path (> 256 MiB); this may run out of memory");
|
||||
}
|
||||
|
||||
|
||||
|
@ -306,8 +303,7 @@ Sink & operator << (Sink & sink, const Error & ex)
|
|||
<< "Error"
|
||||
<< info.level
|
||||
<< info.name
|
||||
<< info.description
|
||||
<< (info.hint ? info.hint->str() : "")
|
||||
<< info.msg.str()
|
||||
<< 0 // FIXME: info.errPos
|
||||
<< info.traces.size();
|
||||
for (auto & trace : info.traces) {
|
||||
|
@ -374,12 +370,14 @@ Error readError(Source & source)
|
|||
{
|
||||
auto type = readString(source);
|
||||
assert(type == "Error");
|
||||
ErrorInfo info;
|
||||
info.level = (Verbosity) readInt(source);
|
||||
info.name = readString(source);
|
||||
info.description = readString(source);
|
||||
auto hint = readString(source);
|
||||
if (hint != "") info.hint = hintformat(std::move(format("%s") % hint));
|
||||
auto level = (Verbosity) readInt(source);
|
||||
auto name = readString(source);
|
||||
auto msg = readString(source);
|
||||
ErrorInfo info {
|
||||
.level = level,
|
||||
.name = name,
|
||||
.msg = hintformat(std::move(format("%s") % msg)),
|
||||
};
|
||||
auto havePos = readNum<size_t>(source);
|
||||
assert(havePos == 0);
|
||||
auto nrTraces = readNum<size_t>(source);
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#if 0
|
||||
|
||||
#include "logging.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "util.hh"
|
||||
|
@ -41,8 +43,7 @@ namespace nix {
|
|||
|
||||
makeJSONLogger(*logger)->logEI({
|
||||
.name = "error name",
|
||||
.description = "error without any code lines.",
|
||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
|
@ -62,7 +63,7 @@ namespace nix {
|
|||
throw TestError(e.info());
|
||||
} catch (Error &e) {
|
||||
ErrorInfo ei = e.info();
|
||||
ei.hint = hintfmt("%s; subsequent error message.", normaltxt(e.info().hint ? e.info().hint->str() : ""));
|
||||
ei.msg = hintfmt("%s; subsequent error message.", normaltxt(e.info().msg.str()));
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
logger->logEI(ei);
|
||||
|
@ -95,7 +96,6 @@ namespace nix {
|
|||
|
||||
logger->logEI({ .level = lvlInfo,
|
||||
.name = "Info name",
|
||||
.description = "Info description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -109,7 +109,6 @@ namespace nix {
|
|||
|
||||
logger->logEI({ .level = lvlTalkative,
|
||||
.name = "Talkative name",
|
||||
.description = "Talkative description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -123,7 +122,6 @@ namespace nix {
|
|||
|
||||
logger->logEI({ .level = lvlChatty,
|
||||
.name = "Chatty name",
|
||||
.description = "Talkative description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -137,7 +135,6 @@ namespace nix {
|
|||
|
||||
logger->logEI({ .level = lvlDebug,
|
||||
.name = "Debug name",
|
||||
.description = "Debug description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -151,7 +148,6 @@ namespace nix {
|
|||
|
||||
logger->logEI({ .level = lvlVomit,
|
||||
.name = "Vomit name",
|
||||
.description = "Vomit description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -167,7 +163,6 @@ namespace nix {
|
|||
|
||||
logError({
|
||||
.name = "name",
|
||||
.description = "error description",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -182,8 +177,7 @@ namespace nix {
|
|||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.description = "error with code lines",
|
||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foString, problem_file, 02, 13),
|
||||
|
@ -200,8 +194,7 @@ namespace nix {
|
|||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.description = "error without any code lines.",
|
||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
|
@ -216,7 +209,7 @@ namespace nix {
|
|||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.hint = hintfmt("hint %1%", "only"),
|
||||
.msg = hintfmt("hint %1%", "only"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -233,8 +226,7 @@ namespace nix {
|
|||
|
||||
logWarning({
|
||||
.name = "name",
|
||||
.description = "warning description",
|
||||
.hint = hintfmt("there was a %1%", "warning"),
|
||||
.msg = hintfmt("there was a %1%", "warning"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
@ -250,8 +242,7 @@ namespace nix {
|
|||
|
||||
logWarning({
|
||||
.name = "warning name",
|
||||
.description = "warning description",
|
||||
.hint = hintfmt("this hint has %1% templated %2%!!",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foStdin, problem_file, 2, 13),
|
||||
|
@ -274,8 +265,7 @@ namespace nix {
|
|||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.description = "show-traces",
|
||||
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.msg = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
|
@ -301,8 +291,7 @@ namespace nix {
|
|||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.description = "hide traces",
|
||||
.hint = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.msg = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
|
@ -377,3 +366,5 @@ namespace nix {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -1249,7 +1249,7 @@ template StringSet tokenizeString(std::string_view s, const string & separators)
|
|||
template vector<string> tokenizeString(std::string_view s, const string & separators);
|
||||
|
||||
|
||||
string chomp(const string & s)
|
||||
string chomp(std::string_view s)
|
||||
{
|
||||
size_t i = s.find_last_not_of(" \n\r\t");
|
||||
return i == string::npos ? "" : string(s, 0, i + 1);
|
||||
|
|
|
@ -373,8 +373,9 @@ template<class C> Strings quoteStrings(const C & c)
|
|||
}
|
||||
|
||||
|
||||
/* Remove trailing whitespace from a string. */
|
||||
string chomp(const string & s);
|
||||
/* Remove trailing whitespace from a string. FIXME: return
|
||||
std::string_view. */
|
||||
string chomp(std::string_view s);
|
||||
|
||||
|
||||
/* Remove whitespace from the start and end of a string. */
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#include "get-drvs.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "attr-path.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
using namespace std::string_literals;
|
||||
|
@ -369,11 +369,8 @@ static void main_nix_build(int argc, char * * argv)
|
|||
shell = drv->queryOutPath() + "/bin/bash";
|
||||
|
||||
} catch (Error & e) {
|
||||
logWarning({
|
||||
.name = "bashInteractive",
|
||||
.hint = hintfmt("%s; will use bash from your environment",
|
||||
(e.info().hint ? e.info().hint->str() : ""))
|
||||
});
|
||||
logError(e.info());
|
||||
notice("will use bash from your environment");
|
||||
shell = "bash";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#include "globals.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "store-api.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
#include "fetchers.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#include "profiles.hh"
|
||||
#include "shared.hh"
|
||||
#include "globals.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <cerrno>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
#include "json.hh"
|
||||
#include "value-to-json.hh"
|
||||
#include "xml-writer.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
#include <cerrno>
|
||||
#include <ctime>
|
||||
|
@ -124,10 +124,7 @@ static void getAllExprs(EvalState & state,
|
|||
if (hasSuffix(attrName, ".nix"))
|
||||
attrName = string(attrName, 0, attrName.size() - 4);
|
||||
if (!attrs.insert(attrName).second) {
|
||||
logError({
|
||||
.name = "Name collision",
|
||||
.hint = hintfmt("warning: name collision in input Nix expressions, skipping '%1%'", path2)
|
||||
});
|
||||
printError("warning: name collision in input Nix expressions, skipping '%1%'", path2);
|
||||
continue;
|
||||
}
|
||||
/* Load the expression on demand. */
|
||||
|
@ -876,11 +873,7 @@ static void queryJSON(Globals & globals, vector<DrvInfo> & elems)
|
|||
auto placeholder = metaObj.placeholder(j);
|
||||
Value * v = i.queryMeta(j);
|
||||
if (!v) {
|
||||
logError({
|
||||
.name = "Invalid meta attribute",
|
||||
.hint = hintfmt("derivation '%s' has invalid meta attribute '%s'",
|
||||
i.queryName(), j)
|
||||
});
|
||||
printError("derivation '%s' has invalid meta attribute '%s'", i.queryName(), j);
|
||||
placeholder.write(nullptr);
|
||||
} else {
|
||||
PathSet context;
|
||||
|
@ -1131,12 +1124,9 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
|
|||
attrs2["name"] = j;
|
||||
Value * v = i.queryMeta(j);
|
||||
if (!v)
|
||||
logError({
|
||||
.name = "Invalid meta attribute",
|
||||
.hint = hintfmt(
|
||||
"derivation '%s' has invalid meta attribute '%s'",
|
||||
i.queryName(), j)
|
||||
});
|
||||
printError(
|
||||
"derivation '%s' has invalid meta attribute '%s'",
|
||||
i.queryName(), j);
|
||||
else {
|
||||
if (v->type() == nString) {
|
||||
attrs2["type"] = "string";
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "local-fs-store.hh"
|
||||
#include "common-eval-args.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include "util.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "graphml.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
@ -708,10 +708,7 @@ static void opVerify(Strings opFlags, Strings opArgs)
|
|||
else throw UsageError("unknown flag '%1%'", i);
|
||||
|
||||
if (store->verifyStore(checkContents, repair)) {
|
||||
logWarning({
|
||||
.name = "Store consistency",
|
||||
.description = "not all errors were fixed"
|
||||
});
|
||||
warn("not all store errors were fixed");
|
||||
throw Exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -733,14 +730,10 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
|
|||
store->narFromPath(path, sink);
|
||||
auto current = sink.finish();
|
||||
if (current.first != info->narHash) {
|
||||
logError({
|
||||
.name = "Hash mismatch",
|
||||
.hint = hintfmt(
|
||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
current.first.to_string(Base32, true))
|
||||
});
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
current.first.to_string(Base32, true));
|
||||
status = 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,8 @@ struct CmdCopy : StorePathsCommand
|
|||
|
||||
SubstituteFlag substitute = NoSubstitute;
|
||||
|
||||
using StorePathsCommand::run;
|
||||
|
||||
CmdCopy()
|
||||
: StorePathsCommand(true)
|
||||
{
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#include "globals.hh"
|
||||
#include "derivations.hh"
|
||||
#include "finally.hh"
|
||||
#include "../nix/legacy.hh"
|
||||
#include "legacy.hh"
|
||||
#include "daemon.hh"
|
||||
|
||||
#include <algorithm>
|
||||
|
@ -258,8 +258,8 @@ static void daemonLoop()
|
|||
return;
|
||||
} catch (Error & error) {
|
||||
ErrorInfo ei = error.info();
|
||||
ei.hint = std::optional(hintfmt("error processing connection: %1%",
|
||||
(error.info().hint.has_value() ? error.info().hint->str() : "")));
|
||||
// FIXME: add to trace?
|
||||
ei.msg = hintfmt("error processing connection: %1%", ei.msg.str());
|
||||
logError(ei);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -239,7 +239,7 @@ struct Common : InstallableCommand, MixProfile
|
|||
|
||||
out << buildEnvironment.bashFunctions << "\n";
|
||||
|
||||
out << "export NIX_BUILD_TOP=\"$(mktemp -d --tmpdir nix-shell.XXXXXX)\"\n";
|
||||
out << "export NIX_BUILD_TOP=\"$(mktemp -d -t nix-shell.XXXXXX)\"\n";
|
||||
for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"})
|
||||
out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i);
|
||||
|
||||
|
|
|
@ -18,7 +18,11 @@ struct CmdEval : MixJSON, InstallableCommand
|
|||
|
||||
CmdEval()
|
||||
{
|
||||
mkFlag(0, "raw", "Print strings without quotes or escaping.", &raw);
|
||||
addFlag({
|
||||
.longName = "raw",
|
||||
.description = "Print strings without quotes or escaping.",
|
||||
.handler = {&raw, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "apply",
|
||||
|
|
|
@ -19,18 +19,41 @@ struct CmdHashBase : Command
|
|||
|
||||
CmdHashBase(FileIngestionMethod mode) : mode(mode)
|
||||
{
|
||||
mkFlag(0, "sri", "Print the hash in SRI format.", &base, SRI);
|
||||
mkFlag(0, "base64", "Print the hash in base-64 format.", &base, Base64);
|
||||
mkFlag(0, "base32", "Print the hash in base-32 (Nix-specific) format.", &base, Base32);
|
||||
mkFlag(0, "base16", "Print the hash in base-16 format.", &base, Base16);
|
||||
addFlag({
|
||||
.longName = "sri",
|
||||
.description = "Print the hash in SRI format.",
|
||||
.handler = {&base, SRI},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base64",
|
||||
.description = "Print the hash in base-64 format.",
|
||||
.handler = {&base, Base64},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base32",
|
||||
.description = "Print the hash in base-32 (Nix-specific) format.",
|
||||
.handler = {&base, Base32},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "base16",
|
||||
.description = "Print the hash in base-16 format.",
|
||||
.handler = {&base, Base16},
|
||||
});
|
||||
|
||||
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
||||
|
||||
#if 0
|
||||
mkFlag()
|
||||
.longName("modulo")
|
||||
.description("Compute the hash modulo specified the string.")
|
||||
.labels({"modulus"})
|
||||
.dest(&modulus);
|
||||
#endif
|
||||
addFlag({
|
||||
.longName = "modulo",
|
||||
.description = "Compute the hash modulo the specified string.",
|
||||
.labels = {"modulus"},
|
||||
.handler = {&modulus},
|
||||
});
|
||||
#endif\
|
||||
|
||||
expectArgs({
|
||||
.label = "paths",
|
||||
.handler = {&paths},
|
||||
|
|
|
@ -14,9 +14,9 @@ nix_SOURCES := \
|
|||
$(wildcard src/nix-instantiate/*.cc) \
|
||||
$(wildcard src/nix-store/*.cc) \
|
||||
|
||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain
|
||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd
|
||||
|
||||
nix_LIBS = libexpr libmain libfetchers libstore libutil
|
||||
nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd
|
||||
|
||||
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -llowdown
|
||||
|
||||
|
|
|
@ -17,9 +17,26 @@ struct MixLs : virtual Args, MixJSON
|
|||
|
||||
MixLs()
|
||||
{
|
||||
mkFlag('R', "recursive", "List subdirectories recursively.", &recursive);
|
||||
mkFlag('l', "long", "Show detailed file information.", &verbose);
|
||||
mkFlag('d', "directory", "Show directories rather than their contents.", &showDirectory);
|
||||
addFlag({
|
||||
.longName = "recursive",
|
||||
.shortName = 'R',
|
||||
.description = "List subdirectories recursively.",
|
||||
.handler = {&recursive, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "long",
|
||||
.shortName = 'l',
|
||||
.description = "Show detailed file information.",
|
||||
.handler = {&verbose, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "directory",
|
||||
.shortName = 'd',
|
||||
.description = "Show directories rather than their contents.",
|
||||
.handler = {&showDirectory, true},
|
||||
});
|
||||
}
|
||||
|
||||
void listText(ref<FSAccessor> accessor)
|
||||
|
|
|
@ -54,6 +54,8 @@ static bool haveInternet()
|
|||
std::string programPath;
|
||||
char * * savedArgv;
|
||||
|
||||
struct HelpRequested { };
|
||||
|
||||
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||
{
|
||||
bool printBuildLogs = false;
|
||||
|
@ -71,28 +73,14 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
addFlag({
|
||||
.longName = "help",
|
||||
.description = "Show usage information.",
|
||||
.handler = {[&]() { if (!completions) showHelpAndExit(); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "help-config",
|
||||
.description = "Show configuration settings.",
|
||||
.handler = {[&]() {
|
||||
std::cout << "The following configuration settings are available:\n\n";
|
||||
Table2 tbl;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
globalConfig.getSettings(settings);
|
||||
for (const auto & s : settings)
|
||||
tbl.emplace_back(s.first, s.second.description);
|
||||
printTable(std::cout, tbl);
|
||||
throw Exit();
|
||||
}},
|
||||
.handler = {[&]() { throw HelpRequested(); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "print-build-logs",
|
||||
.shortName = 'L',
|
||||
.description = "Print full build logs on standard error.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }},
|
||||
});
|
||||
|
||||
|
@ -103,7 +91,8 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "no-net",
|
||||
.longName = "offline",
|
||||
.aliases = {"no-net"}, // FIXME: remove
|
||||
.description = "Disable substituters and consider all previously downloaded files up-to-date.",
|
||||
.handler = {[&]() { useNet = false; }},
|
||||
});
|
||||
|
@ -154,33 +143,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
|||
return pos;
|
||||
}
|
||||
|
||||
void printFlags(std::ostream & out) override
|
||||
{
|
||||
Args::printFlags(out);
|
||||
std::cout <<
|
||||
"\n"
|
||||
"In addition, most configuration settings can be overriden using '--" ANSI_ITALIC "name value" ANSI_NORMAL "'.\n"
|
||||
"Boolean settings can be overriden using '--" ANSI_ITALIC "name" ANSI_NORMAL "' or '--no-" ANSI_ITALIC "name" ANSI_NORMAL "'. See 'nix\n"
|
||||
"--help-config' for a list of configuration settings.\n";
|
||||
}
|
||||
|
||||
void printHelp(const string & programName, std::ostream & out) override
|
||||
{
|
||||
MultiCommand::printHelp(programName, out);
|
||||
|
||||
#if 0
|
||||
out << "\nFor full documentation, run 'man " << programName << "' or 'man " << programName << "-" ANSI_ITALIC "COMMAND" ANSI_NORMAL "'.\n";
|
||||
#endif
|
||||
|
||||
std::cout << "\nNote: this program is " ANSI_RED "EXPERIMENTAL" ANSI_NORMAL " and subject to change.\n";
|
||||
}
|
||||
|
||||
void showHelpAndExit()
|
||||
{
|
||||
printHelp(programName, std::cout);
|
||||
throw Exit();
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
{
|
||||
return "a tool for reproducible and declarative configuration management";
|
||||
|
@ -298,6 +260,18 @@ void mainWrapped(int argc, char * * argv)
|
|||
|
||||
try {
|
||||
args.parseCmdline(argvToStrings(argc, argv));
|
||||
} catch (HelpRequested &) {
|
||||
std::vector<std::string> subcommand;
|
||||
MultiCommand * command = &args;
|
||||
while (command) {
|
||||
if (command && command->command) {
|
||||
subcommand.push_back(command->command->first);
|
||||
command = dynamic_cast<MultiCommand *>(&*command->command->second);
|
||||
} else
|
||||
break;
|
||||
}
|
||||
showHelp(subcommand);
|
||||
return;
|
||||
} catch (UsageError &) {
|
||||
if (!completions) throw;
|
||||
}
|
||||
|
@ -306,7 +280,8 @@ void mainWrapped(int argc, char * * argv)
|
|||
|
||||
initPlugins();
|
||||
|
||||
if (!args.command) args.showHelpAndExit();
|
||||
if (!args.command)
|
||||
throw UsageError("no subcommand specified");
|
||||
|
||||
if (args.command->first != "repl"
|
||||
&& args.command->first != "doctor"
|
||||
|
|
|
@ -28,11 +28,6 @@ struct CmdNar : NixMultiCommand
|
|||
command->second->prepare();
|
||||
command->second->run();
|
||||
}
|
||||
|
||||
void printHelp(const string & programName, std::ostream & out) override
|
||||
{
|
||||
MultiCommand::printHelp(programName, out);
|
||||
}
|
||||
};
|
||||
|
||||
static auto rCmdNar = registerCommand<CmdNar>("nar");
|
||||
|
|
|
@ -18,10 +18,32 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
|
||||
CmdPathInfo()
|
||||
{
|
||||
mkFlag('s', "size", "Print the size of the NAR serialisation of each path.", &showSize);
|
||||
mkFlag('S', "closure-size", "Print the sum of the sizes of the NAR serialisations of the closure of each path.", &showClosureSize);
|
||||
mkFlag('h', "human-readable", "With `-s` and `-S`, print sizes in a human-friendly format such as `5.67G`.", &humanReadable);
|
||||
mkFlag(0, "sigs", "Show signatures.", &showSigs);
|
||||
addFlag({
|
||||
.longName = "size",
|
||||
.shortName = 's',
|
||||
.description = "Print the size of the NAR serialisation of each path.",
|
||||
.handler = {&showSize, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "closure-size",
|
||||
.shortName = 'S',
|
||||
.description = "Print the sum of the sizes of the NAR serialisations of the closure of each path.",
|
||||
.handler = {&showClosureSize, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "human-readable",
|
||||
.shortName = 'h',
|
||||
.description = "With `-s` and `-S`, print sizes in a human-friendly format such as `5.67G`.",
|
||||
.handler = {&humanReadable, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "sigs",
|
||||
.description = "Show signatures.",
|
||||
.handler = {&showSigs, true},
|
||||
});
|
||||
}
|
||||
|
||||
std::string description() override
|
||||
|
|
|
@ -249,7 +249,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
|||
attrPath,
|
||||
};
|
||||
|
||||
pathsToBuild.push_back({drv.drvPath, StringSet{"out"}}); // FIXME
|
||||
pathsToBuild.push_back({drv.drvPath, StringSet{drv.outputName}});
|
||||
|
||||
manifest.elements.emplace_back(std::move(element));
|
||||
} else {
|
||||
|
|
|
@ -81,9 +81,9 @@ struct CmdSearch : InstallableCommand, MixJSON
|
|||
|
||||
uint64_t results = 0;
|
||||
|
||||
std::function<void(eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath)> visit;
|
||||
std::function<void(eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath, bool initialRecurse)> visit;
|
||||
|
||||
visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath)
|
||||
visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath, bool initialRecurse)
|
||||
{
|
||||
Activity act(*logger, lvlInfo, actUnknown,
|
||||
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
|
||||
|
@ -94,7 +94,7 @@ struct CmdSearch : InstallableCommand, MixJSON
|
|||
auto cursor2 = cursor.getAttr(attr);
|
||||
auto attrPath2(attrPath);
|
||||
attrPath2.push_back(attr);
|
||||
visit(*cursor2, attrPath2);
|
||||
visit(*cursor2, attrPath2, false);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -150,6 +150,9 @@ struct CmdSearch : InstallableCommand, MixJSON
|
|||
|| (attrPath[0] == "packages" && attrPath.size() <= 2))
|
||||
recurse();
|
||||
|
||||
else if (initialRecurse)
|
||||
recurse();
|
||||
|
||||
else if (attrPath[0] == "legacyPackages" && attrPath.size() > 2) {
|
||||
auto attr = cursor.maybeGetAttr(state->sRecurseForDerivations);
|
||||
if (attr && attr->getBool())
|
||||
|
@ -163,7 +166,7 @@ struct CmdSearch : InstallableCommand, MixJSON
|
|||
};
|
||||
|
||||
for (auto & [cursor, prefix] : installable->getCursors(*state))
|
||||
visit(*cursor, parseAttrPath(*state, prefix));
|
||||
visit(*cursor, parseAttrPath(*state, prefix), true);
|
||||
|
||||
if (!json && !results)
|
||||
throw Error("no results for the given search term(s)!");
|
||||
|
|
|
@ -16,7 +16,7 @@ struct CmdCopySigs : StorePathsCommand
|
|||
addFlag({
|
||||
.longName = "substituter",
|
||||
.shortName = 's',
|
||||
.description = "Use signatures from specified store.",
|
||||
.description = "Copy signatures from the specified store.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {[&](std::string s) { substituterUris.push_back(s); }},
|
||||
});
|
||||
|
@ -24,7 +24,7 @@ struct CmdCopySigs : StorePathsCommand
|
|||
|
||||
std::string description() override
|
||||
{
|
||||
return "copy path signatures from substituters (like binary caches)";
|
||||
return "copy store path signatures from substituters";
|
||||
}
|
||||
|
||||
void run(ref<Store> store, StorePaths storePaths) override
|
||||
|
@ -110,7 +110,7 @@ struct CmdSign : StorePathsCommand
|
|||
|
||||
std::string description() override
|
||||
{
|
||||
return "sign the specified paths";
|
||||
return "sign store paths";
|
||||
}
|
||||
|
||||
void run(ref<Store> store, StorePaths storePaths) override
|
||||
|
|
|
@ -21,11 +21,6 @@ struct CmdStore : virtual NixMultiCommand
|
|||
command->second->prepare();
|
||||
command->second->run();
|
||||
}
|
||||
|
||||
void printHelp(const string & programName, std::ostream & out) override
|
||||
{
|
||||
MultiCommand::printHelp(programName, out);
|
||||
}
|
||||
};
|
||||
|
||||
static auto rCmdStore = registerCommand<CmdStore>("store");
|
||||
|
|
|
@ -61,10 +61,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
|||
|
||||
if (dryRun) {
|
||||
stopProgressBar();
|
||||
logWarning({
|
||||
.name = "Version update",
|
||||
.hint = hintfmt("would upgrade to version %s", version)
|
||||
});
|
||||
warn("would upgrade to version %s", version);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,8 +18,17 @@ struct CmdVerify : StorePathsCommand
|
|||
|
||||
CmdVerify()
|
||||
{
|
||||
mkFlag(0, "no-contents", "Do not verify the contents of each store path.", &noContents);
|
||||
mkFlag(0, "no-trust", "Do not verify whether each store path is trusted.", &noTrust);
|
||||
addFlag({
|
||||
.longName = "no-contents",
|
||||
.description = "Do not verify the contents of each store path.",
|
||||
.handler = {&noContents, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "no-trust",
|
||||
.description = "Do not verify whether each store path is trusted.",
|
||||
.handler = {&noTrust, true},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "substituter",
|
||||
|
@ -101,14 +110,10 @@ struct CmdVerify : StorePathsCommand
|
|||
if (hash.first != info->narHash) {
|
||||
corrupted++;
|
||||
act2.result(resCorruptedPath, store->printStorePath(info->path));
|
||||
logError({
|
||||
.name = "Hash error - path modified",
|
||||
.hint = hintfmt(
|
||||
"path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(info->path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
hash.first.to_string(Base32, true))
|
||||
});
|
||||
printError("path '%s' was modified! expected hash '%s', got '%s'",
|
||||
store->printStorePath(info->path),
|
||||
info->narHash.to_string(Base32, true),
|
||||
hash.first.to_string(Base32, true));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,12 +161,7 @@ struct CmdVerify : StorePathsCommand
|
|||
if (!good) {
|
||||
untrusted++;
|
||||
act2.result(resUntrustedPath, store->printStorePath(info->path));
|
||||
logError({
|
||||
.name = "Untrusted path",
|
||||
.hint = hintfmt("path '%s' is untrusted",
|
||||
store->printStorePath(info->path))
|
||||
});
|
||||
|
||||
printError("path '%s' is untrusted", store->printStorePath(info->path));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -39,18 +39,12 @@ std::set<std::string> runResolver(const Path & filename)
|
|||
throw SysError("statting '%s'", filename);
|
||||
|
||||
if (!S_ISREG(st.st_mode)) {
|
||||
logError({
|
||||
.name = "Regular MACH file",
|
||||
.hint = hintfmt("file '%s' is not a regular file", filename)
|
||||
});
|
||||
printError("file '%s' is not a regular MACH binary", filename);
|
||||
return {};
|
||||
}
|
||||
|
||||
if (st.st_size < sizeof(mach_header_64)) {
|
||||
logError({
|
||||
.name = "File too short",
|
||||
.hint = hintfmt("file '%s' is too short for a MACH binary", filename)
|
||||
});
|
||||
printError("file '%s' is too short for a MACH binary", filename);
|
||||
return {};
|
||||
}
|
||||
|
||||
|
@ -72,19 +66,13 @@ std::set<std::string> runResolver(const Path & filename)
|
|||
}
|
||||
}
|
||||
if (mach64_offset == 0) {
|
||||
logError({
|
||||
.name = "No mach64 blobs",
|
||||
.hint = hintfmt("Could not find any mach64 blobs in file '%1%', continuing...", filename)
|
||||
});
|
||||
printError("could not find any mach64 blobs in file '%1%', continuing...", filename);
|
||||
return {};
|
||||
}
|
||||
} else if (magic == MH_MAGIC_64 || magic == MH_CIGAM_64) {
|
||||
mach64_offset = 0;
|
||||
} else {
|
||||
logError({
|
||||
.name = "Magic number",
|
||||
.hint = hintfmt("Object file has unknown magic number '%1%', skipping it...", magic)
|
||||
});
|
||||
printError("Object file has unknown magic number '%1%', skipping it...", magic);
|
||||
return {};
|
||||
}
|
||||
|
||||
|
|
16
tests/binary-cache-build-remote.sh
Normal file
16
tests/binary-cache-build-remote.sh
Normal file
|
@ -0,0 +1,16 @@
|
|||
source common.sh
|
||||
|
||||
clearStore
|
||||
clearCacheCache
|
||||
|
||||
# Fails without remote builders
|
||||
(! nix-build --store "file://$cacheDir" dependencies.nix)
|
||||
|
||||
# Succeeds with default store as build remote.
|
||||
outPath=$(nix-build --store "file://$cacheDir" --builders 'auto - - 1 1' -j0 dependencies.nix)
|
||||
|
||||
# Test that the path exactly exists in the destination store.
|
||||
nix path-info --store "file://$cacheDir" $outPath
|
||||
|
||||
# Succeeds without any build capability because no-op
|
||||
nix-build --store "file://$cacheDir" -j0 dependencies.nix
|
|
@ -1,15 +1,20 @@
|
|||
source common.sh
|
||||
|
||||
# We can produce drvs directly into the binary cache
|
||||
clearStore
|
||||
clearCache
|
||||
clearCacheCache
|
||||
nix-instantiate --store "file://$cacheDir" dependencies.nix
|
||||
|
||||
# Create the binary cache.
|
||||
clearStore
|
||||
clearCache
|
||||
outPath=$(nix-build dependencies.nix --no-out-link)
|
||||
|
||||
nix copy --to file://$cacheDir $outPath
|
||||
|
||||
|
||||
basicTests() {
|
||||
basicDownloadTests() {
|
||||
# No uploading tests bcause upload with force HTTP doesn't work.
|
||||
|
||||
# By default, a binary cache doesn't support "nix-env -qas", but does
|
||||
# support installation.
|
||||
|
@ -44,12 +49,12 @@ basicTests() {
|
|||
|
||||
|
||||
# Test LocalBinaryCacheStore.
|
||||
basicTests
|
||||
basicDownloadTests
|
||||
|
||||
|
||||
# Test HttpBinaryCacheStore.
|
||||
export _NIX_FORCE_HTTP=1
|
||||
basicTests
|
||||
basicDownloadTests
|
||||
|
||||
|
||||
# Test whether Nix notices if the NAR doesn't match the hash in the NAR info.
|
||||
|
|
|
@ -21,6 +21,7 @@ experimental-features = nix-command flakes nix-testing
|
|||
gc-reserved-space = 0
|
||||
substituters =
|
||||
flake-registry = $TEST_ROOT/registry.json
|
||||
show-trace = true
|
||||
include nix.conf.extra
|
||||
EOF
|
||||
|
||||
|
|
|
@ -9,7 +9,9 @@ nix_tests = \
|
|||
local-store.sh remote-store.sh export.sh export-graph.sh \
|
||||
timeout.sh secure-drv-outputs.sh nix-channel.sh \
|
||||
multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \
|
||||
binary-cache.sh nix-profile.sh repair.sh dump-db.sh case-hack.sh \
|
||||
binary-cache.sh \
|
||||
binary-cache-build-remote.sh \
|
||||
nix-profile.sh repair.sh dump-db.sh case-hack.sh \
|
||||
check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \
|
||||
placeholders.sh nix-shell.sh \
|
||||
linux-sandbox.sh \
|
||||
|
|
|
@ -17,10 +17,10 @@ nix-env -q --foo 2>&1 | grep "unknown flag"
|
|||
|
||||
# Eval Errors.
|
||||
eval_arg_res=$(nix-instantiate --eval -E 'let a = {} // a; in a.foo' 2>&1 || true)
|
||||
echo $eval_arg_res | grep "at: (1:15) from string"
|
||||
echo $eval_arg_res | grep "at «string»:1:15:"
|
||||
echo $eval_arg_res | grep "infinite recursion encountered"
|
||||
|
||||
eval_stdin_res=$(echo 'let a = {} // a; in a.foo' | nix-instantiate --eval -E - 2>&1 || true)
|
||||
echo $eval_stdin_res | grep "at: (1:15) from stdin"
|
||||
echo $eval_stdin_res | grep "at «stdin»:1:15:"
|
||||
echo $eval_stdin_res | grep "infinite recursion encountered"
|
||||
|
||||
|
|
Loading…
Reference in a new issue