diff --git a/.gitignore b/.gitignore index 86ffe9304..b6a5f6adc 100644 --- a/.gitignore +++ b/.gitignore @@ -40,6 +40,7 @@ perl/Makefile.config # /src/libstore/ *.gen.* +/src/libstore/tests/libstore-tests # /src/libutil/ /src/libutil/tests/libutil-tests diff --git a/.version b/.version index 7208c2182..68151b2e1 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.4 \ No newline at end of file +2.5 \ No newline at end of file diff --git a/Makefile b/Makefile index c7d8967c8..5040d2884 100644 --- a/Makefile +++ b/Makefile @@ -4,6 +4,7 @@ makefiles = \ src/libutil/local.mk \ src/libutil/tests/local.mk \ src/libstore/local.mk \ + src/libstore/tests/local.mk \ src/libfetchers/local.mk \ src/libmain/local.mk \ src/libexpr/local.mk \ diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index 4fc9abea1..244cfa0c2 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -1,4 +1,4 @@ -command: +{ command, renderLinks ? false }: with builtins; with import ./utils.nix; @@ -20,7 +20,11 @@ let categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands))); listCommands = cmds: concatStrings (map (name: - "* [`${command} ${name}`](./${appendName filename name}.md) - ${cmds.${name}.description}\n") + "* " + + (if renderLinks + then "[`${command} ${name}`](./${appendName filename name}.md)" + else "`${command} ${name}`") + + " - ${cmds.${name}.description}\n") (attrNames cmds)); in "where *subcommand* is one of the following:\n\n" diff --git a/doc/manual/local.mk b/doc/manual/local.mk index a8c52f841..e43d9f2fb 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -44,7 +44,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli $(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix @rm -rf $@ - $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)' + $(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }' $(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp diff --git a/doc/manual/src/installation/env-variables.md b/doc/manual/src/installation/env-variables.md index 4a49897e4..bb35c0e9f 100644 --- a/doc/manual/src/installation/env-variables.md +++ b/doc/manual/src/installation/env-variables.md @@ -40,7 +40,7 @@ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt > **Note** > > You must not add the export and then do the install, as the Nix -> installer will detect the presense of Nix configuration, and abort. +> installer will detect the presence of Nix configuration, and abort. ## `NIX_SSL_CERT_FILE` with macOS and the Nix daemon diff --git a/doc/manual/src/release-notes/rl-2.5.md b/doc/manual/src/release-notes/rl-2.5.md new file mode 100644 index 000000000..ede614ca3 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.5.md @@ -0,0 +1,2 @@ +# Release 2.5 (2021-XX-XX) + diff --git a/flake.lock b/flake.lock index 96a69345b..861af1c54 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "lowdown-src": { "flake": false, "locked": { - "lastModified": 1632468475, - "narHash": "sha256-NNOm9CbdA8cuwbvaBHslGbPTiU6bh1Ao+MpEPx4rSGo=", + "lastModified": 1633514407, + "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", "owner": "kristapsdz", "repo": "lowdown", - "rev": "6bd668af3fd098bdd07a1bedd399564141e275da", + "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 43123ca7e..2d0c44889 100644 --- a/flake.nix +++ b/flake.nix @@ -78,7 +78,7 @@ # Tests buildPackages.git - buildPackages.mercurial + buildPackages.mercurial # FIXME: remove? only needed for tests buildPackages.jq ] ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]; @@ -126,8 +126,7 @@ '' mkdir -p $out/nix-support - # Converts /nix/store/50p3qk8kka9dl6wyq40vydq945k0j3kv-nix-2.4pre20201102_550e11f/bin/nix - # To 50p3qk8kka9dl6wyq40vydq945k0j3kv/bin/nix + # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. tarballPath() { # Remove the store prefix local path=''${1#${builtins.storeDir}/} @@ -153,13 +152,15 @@ echo "file installer $out/install" >> $out/nix-support/hydra-build-products ''; - testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation { + testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation { NIX_DAEMON_PACKAGE = daemon; NIX_CLIENT_PACKAGE = client; - # Must keep this name short as OSX has a rather strict limit on the - # socket path length, and this name appears in the path of the - # nix-daemon socket used in the tests - name = "nix-tests"; + name = + "nix-tests" + + optionalString + (versionAtLeast daemon.version "2.4pre20211005" && + versionAtLeast client.version "2.4pre20211005") + "-${client.version}-against-${daemon.version}"; inherit version; src = self; @@ -443,6 +444,12 @@ inherit (self) overlay; }; + tests.nssPreload = (import ./tests/nss-preload.nix rec { + system = "x86_64-linux"; + inherit nixpkgs; + inherit (self) overlay; + }); + tests.githubFlakes = (import ./tests/github-flakes.nix rec { system = "x86_64-linux"; inherit nixpkgs; @@ -490,7 +497,11 @@ let pkgs = nixpkgsFor.${system}; in pkgs.runCommand "install-tests" { againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix; - againstCurrentUnstable = testNixVersions pkgs pkgs.nix pkgs.nixUnstable; + againstCurrentUnstable = + # FIXME: temporarily disable this on macOS because of #3605. + if system == "x86_64-linux" + then testNixVersions pkgs pkgs.nix pkgs.nixUnstable + else null; # Disabled because the latest stable version doesn't handle # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work # againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable; diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index 66ccf1443..f2d6896db 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -12,6 +12,7 @@ use LWP::UserAgent; use Net::Amazon::S3; my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n"; +my $nixRev = $ARGV[1] or die; # FIXME my $releasesBucketName = "nix-releases"; my $channelsBucketName = "nix-channels"; @@ -19,6 +20,8 @@ my $nixpkgsDir = "/home/eelco/Dev/nixpkgs-pristine"; my $TMPDIR = $ENV{'TMPDIR'} // "/tmp"; +my $isLatest = ($ENV{'IS_LATEST'} // "") eq "1"; + # FIXME: cut&paste from nixos-channel-scripts. sub fetch { my ($url, $type) = @_; @@ -33,14 +36,15 @@ sub fetch { } my $evalUrl = "https://hydra.nixos.org/eval/$evalId"; -my $evalInfo = decode_json(fetch($evalUrl, 'application/json')); +#my $evalInfo = decode_json(fetch($evalUrl, 'application/json')); #print Dumper($evalInfo); -my $nixRev = $evalInfo->{jobsetevalinputs}->{nix}->{revision} or die; +#my $nixRev = $evalInfo->{jobsetevalinputs}->{nix}->{revision} or die; -my $tarballInfo = decode_json(fetch("$evalUrl/job/tarball", 'application/json')); +my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json')); +#print Dumper($buildInfo); -my $releaseName = $tarballInfo->{releasename}; +my $releaseName = $buildInfo->{nixname}; $releaseName =~ /nix-(.*)$/ or die; my $version = $1; @@ -104,8 +108,6 @@ sub downloadFile { return $sha256_expected; } -downloadFile("tarball", "2"); # .tar.bz2 -my $tarballHash = downloadFile("tarball", "3"); # .tar.xz downloadFile("binaryTarball.i686-linux", "1"); downloadFile("binaryTarball.x86_64-linux", "1"); downloadFile("binaryTarball.aarch64-linux", "1"); @@ -134,42 +136,43 @@ for my $fn (glob "$tmpDir/*") { } } -exit if $version =~ /pre/; - # Update nix-fallback-paths.nix. -system("cd $nixpkgsDir && git pull") == 0 or die; +if ($isLatest) { + system("cd $nixpkgsDir && git pull") == 0 or die; -sub getStorePath { - my ($jobName) = @_; - my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); - for my $product (values %{$buildInfo->{buildproducts}}) { - next unless $product->{type} eq "nix-build"; - next if $product->{path} =~ /[a-z]+$/; - return $product->{path}; + sub getStorePath { + my ($jobName) = @_; + my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); + for my $product (values %{$buildInfo->{buildproducts}}) { + next unless $product->{type} eq "nix-build"; + next if $product->{path} =~ /[a-z]+$/; + return $product->{path}; + } + die; } - die; + + write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix", + "{\n" . + " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" . + " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" . + " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" . + " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" . + " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" . + "}\n"); + + system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die; } -write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix", - "{\n" . - " x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" . - " i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" . - " aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" . - " x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" . - " aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" . - "}\n"); - -system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die; - # Update the "latest" symlink. $channelsBucket->add_key( "nix-latest/install", "", { "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" }) - or die $channelsBucket->err . ": " . $channelsBucket->errstr; + or die $channelsBucket->err . ": " . $channelsBucket->errstr + if $isLatest; # Tag the release in Git. chdir("/home/eelco/Dev/nix-pristine") or die; system("git remote update origin") == 0 or die; system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die; system("git push --tags") == 0 or die; -system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die; +system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die if $isLatest; diff --git a/mk/libraries.mk b/mk/libraries.mk index fd4d4ee72..07bd54dab 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -91,7 +91,7 @@ define build-library $(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) + $$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) ifndef HOST_DARWIN $(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d)) @@ -105,7 +105,7 @@ define build-library $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) $$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) + $$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME))) ifndef HOST_DARWIN diff --git a/mk/programs.mk b/mk/programs.mk index 70b09f0dd..d0cf5baf0 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -32,7 +32,7 @@ define build-program $$(eval $$(call create-dir, $$(_d))) $$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/ - $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) + $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $(1)_INSTALL_DIR ?= $$(bindir) @@ -49,7 +49,7 @@ define build-program _libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH)) $(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/ - $$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) + $$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) else diff --git a/mk/tests.mk b/mk/tests.mk index 21bdc5748..a2e30a378 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -13,3 +13,7 @@ define run-install-test endef .PHONY: check installcheck + +print-top-help += \ + echo " check: Run unit tests"; \ + echo " installcheck: Run functional tests"; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 800839a8d..3ae05a8d8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -445,12 +445,12 @@ EvalState::EvalState( StorePathSet closure; store->computeFSClosure(store->toStorePath(r.second).first, closure); for (auto & path : closure) - allowedPaths->insert(store->printStorePath(path)); + allowPath(path); } catch (InvalidPath &) { - allowedPaths->insert(r.second); + allowPath(r.second); } } else - allowedPaths->insert(r.second); + allowPath(r.second); } } @@ -482,6 +482,18 @@ void EvalState::requireExperimentalFeatureOnEvaluation( } } +void EvalState::allowPath(const Path & path) +{ + if (allowedPaths) + allowedPaths->insert(path); +} + +void EvalState::allowPath(const StorePath & storePath) +{ + if (allowedPaths) + allowedPaths->insert(store->toRealPath(storePath)); +} + Path EvalState::checkSourcePath(const Path & path_) { if (!allowedPaths) return path_; @@ -1316,13 +1328,13 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po auto size = (lambda.arg.empty() ? 0 : 1) + - (lambda.matchAttrs ? lambda.formals->formals.size() : 0); + (lambda.hasFormals() ? lambda.formals->formals.size() : 0); Env & env2(allocEnv(size)); env2.up = fun.lambda.env; size_t displ = 0; - if (!lambda.matchAttrs) + if (!lambda.hasFormals()) env2.values[displ++] = &arg; else { @@ -1402,7 +1414,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res) } } - if (!fun.isLambda() || !fun.lambda.fun->matchAttrs) { + if (!fun.isLambda() || !fun.lambda.fun->hasFormals()) { res = fun; return; } @@ -1889,6 +1901,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path) ? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first : store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair); dstPath = store->printStorePath(p); + allowPath(p); srcToStore.insert_or_assign(path, std::move(p)); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 9df6150c6..7cc16ef0a 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -150,6 +150,15 @@ public: SearchPath getSearchPath() { return searchPath; } + /* Allow access to a path. */ + void allowPath(const Path & path); + + /* Allow access to a store path. Note that this gets remapped to + the real store path if `store` is a chroot store. */ + void allowPath(const StorePath & storePath); + + /* Check whether access to a path is allowed and throw an error if + not. Otherwise return the canonicalised path. */ Path checkSourcePath(const Path & path); void checkURI(const std::string & uri); diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 1a1fa6938..43bfc3644 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -64,8 +64,7 @@ static std::tuple fetchOrSubstituteTree( debug("got tree '%s' from '%s'", state.store->printStorePath(tree.storePath), lockedRef); - if (state.allowedPaths) - state.allowedPaths->insert(tree.actualPath); + state.allowPath(tree.storePath); assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store)); @@ -231,7 +230,7 @@ static Flake getFlake( if (auto outputs = vInfo.attrs->get(sOutputs)) { expectType(state, nFunction, *outputs->value, *outputs->pos); - if (outputs->value->isLambda() && outputs->value->lambda.fun->matchAttrs) { + if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) { for (auto & formal : outputs->value->lambda.fun->formals->formals) { if (formal.name != state.sSelf) flake.inputs.emplace(formal.name, FlakeInput { diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 492b819e7..0d0f3e469 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -124,7 +124,7 @@ void ExprList::show(std::ostream & str) const void ExprLambda::show(std::ostream & str) const { str << "("; - if (matchAttrs) { + if (hasFormals()) { str << "{ "; bool first = true; for (auto & i : formals->formals) { @@ -348,7 +348,7 @@ void ExprLambda::bindVars(const StaticEnv & env) if (!arg.empty()) newEnv.vars[arg] = displ++; - if (matchAttrs) { + if (hasFormals()) { for (auto & i : formals->formals) newEnv.vars[i.name] = displ++; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 51a14cd59..851e875bd 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -233,11 +233,10 @@ struct ExprLambda : Expr Pos pos; Symbol name; Symbol arg; - bool matchAttrs; Formals * formals; Expr * body; - ExprLambda(const Pos & pos, const Symbol & arg, bool matchAttrs, Formals * formals, Expr * body) - : pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body) + ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body) + : pos(pos), arg(arg), formals(formals), body(body) { if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end()) throw ParseError({ @@ -247,6 +246,7 @@ struct ExprLambda : Expr }; void setName(Symbol & name); string showNamePos() const; + inline bool hasFormals() const { return formals != nullptr; } COMMON_METHODS }; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index e3749783a..8a0a79c96 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -324,13 +324,13 @@ expr: expr_function; expr_function : ID ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), false, 0, $3); } + { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create(""), true, $2, $5); } + { $$ = new ExprLambda(CUR_POS, data->symbols.create(""), $2, $5); } | '{' formals '}' '@' ID ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create($5), true, $2, $7); } + { $$ = new ExprLambda(CUR_POS, data->symbols.create($5), $2, $7); } | ID '@' '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), true, $4, $7); } + { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), $4, $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } | WITH expr ';' expr_function diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 25b9c32b2..8f6e937ec 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -56,13 +56,9 @@ void EvalState::realiseContext(const PathSet & context) "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", store->printStorePath(drvs.begin()->drvPath)); - /* For performance, prefetch all substitute info. */ - StorePathSet willBuild, willSubstitute, unknown; - uint64_t downloadSize, narSize; + /* Build/substitute the context. */ std::vector buildReqs; for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); - store->queryMissing(buildReqs, willBuild, willSubstitute, unknown, downloadSize, narSize); - store->buildPaths(buildReqs); /* Add the output of this derivations to the allowed @@ -414,7 +410,7 @@ static RegisterPrimOp primop_isNull({ Return `true` if *e* evaluates to `null`, and `false` otherwise. > **Warning** - > + > > This function is *deprecated*; just write `e == null` instead. )", .fun = prim_isNull, @@ -1851,56 +1847,87 @@ static RegisterPrimOp primop_toFile({ .fun = prim_toFile, }); -static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_, - Value * filterFun, FileIngestionMethod method, const std::optional expectedHash, Value & v) +static void addPath( + EvalState & state, + const Pos & pos, + const string & name, + Path path, + Value * filterFun, + FileIngestionMethod method, + const std::optional expectedHash, + Value & v, + const PathSet & context) { - const auto path = evalSettings.pureEval && expectedHash ? - path_ : - state.checkSourcePath(path_); - PathFilter filter = filterFun ? ([&](const Path & path) { - auto st = lstat(path); + try { + // FIXME: handle CA derivation outputs (where path needs to + // be rewritten to the actual output). + state.realiseContext(context); - /* Call the filter function. The first argument is the path, - the second is a string indicating the type of the file. */ - Value arg1; - mkString(arg1, path); + if (state.store->isInStore(path)) { + auto [storePath, subPath] = state.store->toStorePath(path); + auto info = state.store->queryPathInfo(storePath); + if (!info->references.empty()) + throw EvalError("store path '%s' is not allowed to have references", + state.store->printStorePath(storePath)); + path = state.store->toRealPath(storePath) + subPath; + } - Value fun2; - state.callFunction(*filterFun, arg1, fun2, noPos); + path = evalSettings.pureEval && expectedHash + ? path + : state.checkSourcePath(path); - Value arg2; - mkString(arg2, - S_ISREG(st.st_mode) ? "regular" : - S_ISDIR(st.st_mode) ? "directory" : - S_ISLNK(st.st_mode) ? "symlink" : - "unknown" /* not supported, will fail! */); + PathFilter filter = filterFun ? ([&](const Path & path) { + auto st = lstat(path); - Value res; - state.callFunction(fun2, arg2, res, noPos); + /* Call the filter function. The first argument is the path, + the second is a string indicating the type of the file. */ + Value arg1; + mkString(arg1, path); - return state.forceBool(res, pos); - }) : defaultPathFilter; + Value fun2; + state.callFunction(*filterFun, arg1, fun2, noPos); - std::optional expectedStorePath; - if (expectedHash) - expectedStorePath = state.store->makeFixedOutputPath(name, FixedOutputInfo { - { - .method = method, - .hash = *expectedHash, - }, - {}, - }); - Path dstPath; - if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - dstPath = state.store->printStorePath(settings.readOnlyMode - ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first - : state.store->addToStore(name, path, method, htSHA256, filter, state.repair)); - if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath)) - throw Error("store path mismatch in (possibly filtered) path added from '%s'", path); - } else - dstPath = state.store->printStorePath(*expectedStorePath); + Value arg2; + mkString(arg2, + S_ISREG(st.st_mode) ? "regular" : + S_ISDIR(st.st_mode) ? "directory" : + S_ISLNK(st.st_mode) ? "symlink" : + "unknown" /* not supported, will fail! */); - mkString(v, dstPath, {dstPath}); + Value res; + state.callFunction(fun2, arg2, res, noPos); + + return state.forceBool(res, pos); + }) : defaultPathFilter; + + std::optional expectedStorePath; + if (expectedHash) + expectedStorePath = state.store->makeFixedOutputPath(name, FixedOutputInfo { + { + .method = method, + .hash = *expectedHash, + }, + {}, + }); + + Path dstPath; + if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { + dstPath = state.store->printStorePath(settings.readOnlyMode + ? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first + : state.store->addToStore(name, path, method, htSHA256, filter, state.repair)); + if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath)) + throw Error("store path mismatch in (possibly filtered) path added from '%s'", path); + } else + dstPath = state.store->printStorePath(*expectedStorePath); + + mkString(v, dstPath, {dstPath}); + + state.allowPath(dstPath); + + } catch (Error & e) { + e.addTrace(pos, "while adding path '%s'", path); + throw; + } } @@ -1908,11 +1935,6 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args { PathSet context; Path path = state.coerceToPath(pos, *args[1], context); - if (!context.empty()) - throw EvalError({ - .msg = hintfmt("string '%1%' cannot refer to other paths", path), - .errPos = pos - }); state.forceValue(*args[0], pos); if (args[0]->type() != nFunction) @@ -1923,13 +1945,26 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args .errPos = pos }); - addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v); + addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } static RegisterPrimOp primop_filterSource({ .name = "__filterSource", .args = {"e1", "e2"}, .doc = R"( + > **Warning** + > + > `filterSource` should not be used to filter store paths. Since + > `filterSource` uses the name of the input directory while naming + > the output directory, doing so will produce a directory name in + > the form of `--`, where `-` is + > the name of the input directory. Since `` depends on the + > unfiltered directory, the name of the output directory will + > indirectly depend on files that are filtered out by the + > function. This will trigger a rebuild even when a filtered out + > file is changed. Use `builtins.path` instead, which allows + > specifying the name of the output directory. + This function allows you to copy sources into the Nix store while filtering certain files. For instance, suppose that you want to use the directory `source-dir` as an input to a Nix expression, e.g. @@ -1976,18 +2011,13 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value Value * filterFun = nullptr; auto method = FileIngestionMethod::Recursive; std::optional expectedHash; + PathSet context; for (auto & attr : *args[0]->attrs) { const string & n(attr.name); - if (n == "path") { - PathSet context; + if (n == "path") path = state.coerceToPath(*attr.pos, *attr.value, context); - if (!context.empty()) - throw EvalError({ - .msg = hintfmt("string '%1%' cannot refer to other paths", path), - .errPos = *attr.pos - }); - } else if (attr.name == state.sName) + else if (attr.name == state.sName) name = state.forceStringNoCtx(*attr.value, *attr.pos); else if (n == "filter") { state.forceValue(*attr.value, pos); @@ -2010,7 +2040,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value if (name.empty()) name = baseNameOf(path); - addPath(state, pos, name, path, filterFun, method, expectedHash, v); + addPath(state, pos, name, path, filterFun, method, expectedHash, v, context); } static RegisterPrimOp primop_path({ @@ -2375,7 +2405,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args .errPos = pos }); - if (!args[0]->lambda.fun->matchAttrs) { + if (!args[0]->lambda.fun->hasFormals()) { state.mkAttrs(v, 0); return; } @@ -2530,7 +2560,7 @@ static RegisterPrimOp primop_tail({ the argument isn’t a list or is an empty list. > **Warning** - > + > > This function should generally be avoided since it's inefficient: > unlike Haskell's `tail`, it takes O(n) time, so recursing over a > list by repeatedly calling `tail` takes O(n^2) time. diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 3f88ccb91..1cd481243 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -84,8 +84,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount); v.attrs->sort(); - if (state.allowedPaths) - state.allowedPaths->insert(tree.actualPath); + state.allowPath(tree.storePath); } static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 06bdec003..f570f19ae 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -66,7 +66,7 @@ void emitTreeAttrs( v.attrs->sort(); } -std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file") +std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file") { state.checkURI(uri); return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri; @@ -81,23 +81,17 @@ std::string fixURIForGit(std::string uri, EvalState & state) return fixURI(uri, state); } -void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v) -{ - string n(name); - attrs.emplace(name, n == "url" ? fixURI(v, state) : v); -} - struct FetchTreeParams { bool emptyRevFallback = false; bool allowNameArgument = false; }; static void fetchTree( - EvalState &state, - const Pos &pos, - Value **args, - Value &v, - const std::optional type, + EvalState & state, + const Pos & pos, + Value * * args, + Value & v, + std::optional type, const FetchTreeParams & params = FetchTreeParams{} ) { fetchers::Input input; @@ -110,17 +104,33 @@ static void fetchTree( fetchers::Attrs attrs; + if (auto aType = args[0]->attrs->get(state.sType)) { + if (type) + throw Error({ + .msg = hintfmt("unexpected attribute 'type'"), + .errPos = pos + }); + type = state.forceStringNoCtx(*aType->value, *aType->pos); + } else if (!type) + throw Error({ + .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), + .errPos = pos + }); + + attrs.emplace("type", type.value()); + for (auto & attr : *args[0]->attrs) { + if (attr.name == state.sType) continue; state.forceValue(*attr.value); - if (attr.value->type() == nPath || attr.value->type() == nString) - addURI( - state, - attrs, - attr.name, - state.coerceToString(*attr.pos, *attr.value, context, false, false) - ); - else if (attr.value->type() == nString) - addURI(state, attrs, attr.name, attr.value->string.s); + if (attr.value->type() == nPath || attr.value->type() == nString) { + auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false); + attrs.emplace(attr.name, + attr.name == "url" + ? type == "git" + ? fixURIForGit(s, state) + : fixURI(s, state) + : s); + } else if (attr.value->type() == nBool) attrs.emplace(attr.name, Explicit{attr.value->boolean}); else if (attr.value->type() == nInt) @@ -130,15 +140,6 @@ static void fetchTree( attr.name, showType(*attr.value)); } - if (type) - attrs.emplace("type", type.value()); - - if (!attrs.count("type")) - throw Error({ - .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), - .errPos = pos - }); - if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) throw Error({ @@ -146,7 +147,6 @@ static void fetchTree( .errPos = pos }); - input = fetchers::Input::fromAttrs(std::move(attrs)); } else { auto url = state.coerceToString(pos, *args[0], context, false, false); @@ -169,8 +169,7 @@ static void fetchTree( auto [tree, input2] = input.fetch(state.store); - if (state.allowedPaths) - state.allowedPaths->insert(tree.actualPath); + state.allowPath(tree.storePath); emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false); } @@ -234,19 +233,16 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath; - auto realPath = state.store->toRealPath(storePath); - if (expectedHash) { auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash - : hashFile(htSHA256, realPath); + : hashFile(htSHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", *url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)); } - if (state.allowedPaths) - state.allowedPaths->insert(realPath); + state.allowPath(storePath); auto path = state.store->printStorePath(storePath); mkString(v, path, PathSet({path})); diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index 2ddc5f751..b44455f5f 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -135,7 +135,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location, if (location) posToXML(xmlAttrs, v.lambda.fun->pos); XMLOpenElement _(doc, "function", xmlAttrs); - if (v.lambda.fun->matchAttrs) { + if (v.lambda.fun->hasFormals()) { XMLAttrs attrs; if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg; if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1"; diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index f574e64cf..e739f4d9d 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -711,6 +711,7 @@ void LocalDerivationGoal::startBuilder() if (!builderOut.readSide) throw SysError("opening pseudoterminal master"); + // FIXME: not thread-safe, use ptsname_r std::string slaveName(ptsname(builderOut.readSide.get())); if (buildUser) { @@ -754,7 +755,6 @@ void LocalDerivationGoal::startBuilder() result.startTime = time(0); /* Fork a child to build the package. */ - ProcessOptions options; #if __linux__ if (useChroot) { @@ -797,8 +797,6 @@ void LocalDerivationGoal::startBuilder() userNamespaceSync.create(); - options.allowVfork = false; - Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces"; static bool userNamespacesEnabled = pathExists(maxUserNamespaces) @@ -856,7 +854,7 @@ void LocalDerivationGoal::startBuilder() writeFull(builderOut.writeSide.get(), fmt("%d %d\n", usingUserNamespace, child)); _exit(0); - }, options); + }); int res = helper.wait(); if (res != 0 && settings.sandboxFallback) { @@ -920,10 +918,9 @@ void LocalDerivationGoal::startBuilder() #endif { fallback: - options.allowVfork = !buildUser && !drv->isBuiltin(); pid = startProcess([&]() { runChild(); - }, options); + }); } /* parent */ @@ -2140,8 +2137,7 @@ void LocalDerivationGoal::registerOutputs() /* Pass blank Sink as we are not ready to hash data at this stage. */ NullSink blank; - auto references = worker.store.parseStorePathSet( - scanForReferences(blank, actualPath, worker.store.printStorePathSet(referenceablePaths))); + auto references = scanForReferences(blank, actualPath, referenceablePaths); outputReferencesIfUnregistered.insert_or_assign( outputName, @@ -2208,7 +2204,7 @@ void LocalDerivationGoal::registerOutputs() auto rewriteOutput = [&]() { /* Apply hash rewriting if necessary. */ if (!outputRewrites.empty()) { - warn("rewriting hashes in '%1%'; cross fingers", actualPath); + debug("rewriting hashes in '%1%'; cross fingers", actualPath); /* FIXME: this is in-memory. */ StringSink sink; diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index a7a6b92a6..55afb5cca 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -239,7 +239,7 @@ void Worker::run(const Goals & _topGoals) } } - /* Call queryMissing() efficiently query substitutes. */ + /* Call queryMissing() to efficiently query substitutes. */ StorePathSet willBuild, willSubstitute, unknown; uint64_t downloadSize, narSize; store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize); diff --git a/src/libstore/references.cc b/src/libstore/references.cc index 3a07c1411..c369b14ac 100644 --- a/src/libstore/references.cc +++ b/src/libstore/references.cc @@ -11,11 +11,13 @@ namespace nix { -static unsigned int refLength = 32; /* characters */ +static size_t refLength = 32; /* characters */ -static void search(const unsigned char * s, size_t len, - StringSet & hashes, StringSet & seen) +static void search( + std::string_view s, + StringSet & hashes, + StringSet & seen) { static std::once_flag initialised; static bool isBase32[256]; @@ -25,7 +27,7 @@ static void search(const unsigned char * s, size_t len, isBase32[(unsigned char) base32Chars[i]] = true; }); - for (size_t i = 0; i + refLength <= len; ) { + for (size_t i = 0; i + refLength <= s.size(); ) { int j; bool match = true; for (j = refLength - 1; j >= 0; --j) @@ -35,7 +37,7 @@ static void search(const unsigned char * s, size_t len, break; } if (!match) continue; - string ref((const char *) s + i, refLength); + std::string ref(s.substr(i, refLength)); if (hashes.erase(ref)) { debug(format("found reference to '%1%' at offset '%2%'") % ref % i); @@ -46,69 +48,60 @@ static void search(const unsigned char * s, size_t len, } -struct RefScanSink : Sink +void RefScanSink::operator () (std::string_view data) { - StringSet hashes; - StringSet seen; + /* It's possible that a reference spans the previous and current + fragment, so search in the concatenation of the tail of the + previous fragment and the start of the current fragment. */ + auto s = tail; + s.append(data.data(), refLength); + search(s, hashes, seen); - string tail; + search(data, hashes, seen); - RefScanSink() { } - - void operator () (std::string_view data) override - { - /* It's possible that a reference spans the previous and current - fragment, so search in the concatenation of the tail of the - previous fragment and the start of the current fragment. */ - string s = tail + std::string(data, 0, refLength); - search((const unsigned char *) s.data(), s.size(), hashes, seen); - - search((const unsigned char *) data.data(), data.size(), hashes, seen); - - size_t tailLen = data.size() <= refLength ? data.size() : refLength; - tail = std::string(tail, tail.size() < refLength - tailLen ? 0 : tail.size() - (refLength - tailLen)); - tail.append({data.data() + data.size() - tailLen, tailLen}); - } -}; + auto tailLen = std::min(data.size(), refLength); + auto rest = refLength - tailLen; + if (rest < tail.size()) + tail = tail.substr(tail.size() - rest); + tail.append(data.data() + data.size() - tailLen, tailLen); +} -std::pair scanForReferences(const string & path, - const PathSet & refs) +std::pair scanForReferences( + const string & path, + const StorePathSet & refs) { HashSink hashSink { htSHA256 }; auto found = scanForReferences(hashSink, path, refs); auto hash = hashSink.finish(); - return std::pair(found, hash); + return std::pair(found, hash); } -PathSet scanForReferences(Sink & toTee, - const string & path, const PathSet & refs) +StorePathSet scanForReferences( + Sink & toTee, + const Path & path, + const StorePathSet & refs) { - RefScanSink refsSink; - TeeSink sink { refsSink, toTee }; - std::map backMap; + StringSet hashes; + std::map backMap; for (auto & i : refs) { - auto baseName = std::string(baseNameOf(i)); - string::size_type pos = baseName.find('-'); - if (pos == string::npos) - throw Error("bad reference '%1%'", i); - string s = string(baseName, 0, pos); - assert(s.size() == refLength); - assert(backMap.find(s) == backMap.end()); - // parseHash(htSHA256, s); - refsSink.hashes.insert(s); - backMap[s] = i; + std::string hashPart(i.hashPart()); + auto inserted = backMap.emplace(hashPart, i).second; + assert(inserted); + hashes.insert(hashPart); } /* Look for the hashes in the NAR dump of the path. */ + RefScanSink refsSink(std::move(hashes)); + TeeSink sink { refsSink, toTee }; dumpPath(path, sink); /* Map the hashes found back to their store paths. */ - PathSet found; - for (auto & i : refsSink.seen) { - std::map::iterator j; - if ((j = backMap.find(i)) == backMap.end()) abort(); + StorePathSet found; + for (auto & i : refsSink.getResult()) { + auto j = backMap.find(i); + assert(j != backMap.end()); found.insert(j->second); } diff --git a/src/libstore/references.hh b/src/libstore/references.hh index 4f12e6b21..a6119c861 100644 --- a/src/libstore/references.hh +++ b/src/libstore/references.hh @@ -1,13 +1,31 @@ #pragma once -#include "types.hh" #include "hash.hh" +#include "path.hh" namespace nix { -std::pair scanForReferences(const Path & path, const PathSet & refs); +std::pair scanForReferences(const Path & path, const StorePathSet & refs); -PathSet scanForReferences(Sink & toTee, const Path & path, const PathSet & refs); +StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathSet & refs); + +class RefScanSink : public Sink +{ + StringSet hashes; + StringSet seen; + + std::string tail; + +public: + + RefScanSink(StringSet && hashes) : hashes(hashes) + { } + + StringSet & getResult() + { return seen; } + + void operator () (std::string_view data) override; +}; struct RewritingSink : Sink { diff --git a/src/libstore/tests/local.mk b/src/libstore/tests/local.mk new file mode 100644 index 000000000..f74295d97 --- /dev/null +++ b/src/libstore/tests/local.mk @@ -0,0 +1,15 @@ +check: libstore-tests_RUN + +programs += libstore-tests + +libstore-tests_DIR := $(d) + +libstore-tests_INSTALL_DIR := + +libstore-tests_SOURCES := $(wildcard $(d)/*.cc) + +libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil + +libstore-tests_LIBS = libstore libutil + +libstore-tests_LDFLAGS := $(GTEST_LIBS) diff --git a/src/libstore/tests/references.cc b/src/libstore/tests/references.cc new file mode 100644 index 000000000..d91d1cedd --- /dev/null +++ b/src/libstore/tests/references.cc @@ -0,0 +1,45 @@ +#include "references.hh" + +#include + +namespace nix { + +TEST(references, scan) +{ + std::string hash1 = "dc04vv14dak1c1r48qa0m23vr9jy8sm0"; + std::string hash2 = "zc842j0rz61mjsp3h3wp5ly71ak6qgdn"; + + { + RefScanSink scanner(StringSet{hash1}); + auto s = "foobar"; + scanner(s); + ASSERT_EQ(scanner.getResult(), StringSet{}); + } + + { + RefScanSink scanner(StringSet{hash1}); + auto s = "foobar" + hash1 + "xyzzy"; + scanner(s); + ASSERT_EQ(scanner.getResult(), StringSet{hash1}); + } + + { + RefScanSink scanner(StringSet{hash1, hash2}); + auto s = "foobar" + hash1 + "xyzzy" + hash2; + scanner(((std::string_view) s).substr(0, 10)); + scanner(((std::string_view) s).substr(10, 5)); + scanner(((std::string_view) s).substr(15, 5)); + scanner(((std::string_view) s).substr(20)); + ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2})); + } + + { + RefScanSink scanner(StringSet{hash1, hash2}); + auto s = "foobar" + hash1 + "xyzzy" + hash2; + for (auto & i : s) + scanner(std::string(1, i)); + ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2})); + } +} + +} diff --git a/src/libstore/uds-remote-store.cc b/src/libstore/uds-remote-store.cc index cfadccf68..02e81b022 100644 --- a/src/libstore/uds-remote-store.cc +++ b/src/libstore/uds-remote-store.cc @@ -65,16 +65,7 @@ ref UDSRemoteStore::openConnection() throw SysError("cannot create Unix domain socket"); closeOnExec(conn->fd.get()); - string socketPath = path ? *path : settings.nixDaemonSocketFile; - - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - if (socketPath.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%1%' is too long", socketPath); - strcpy(addr.sun_path, socketPath.c_str()); - - if (::connect(conn->fd.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1) - throw SysError("cannot connect to daemon at '%1%'", socketPath); + nix::connect(conn->fd.get(), path ? *path : settings.nixDaemonSocketFile); conn->from.fd = conn->fd.get(); conn->to.fd = conn->fd.get(); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index ed0eb2fb5..d78ec2b93 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -42,7 +42,7 @@ static string caseHackSuffix = "~nix~case~hack~"; PathFilter defaultPathFilter = [](const Path &) { return true; }; -static void dumpContents(const Path & path, size_t size, +static void dumpContents(const Path & path, off_t size, Sink & sink) { sink << "contents" << size; @@ -76,7 +76,7 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter) sink << "type" << "regular"; if (st.st_mode & S_IXUSR) sink << "executable" << ""; - dumpContents(path, (size_t) st.st_size, sink); + dumpContents(path, st.st_size, sink); } else if (S_ISDIR(st.st_mode)) { diff --git a/src/libutil/util.cc b/src/libutil/util.cc index bc841f425..563a72c12 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -903,7 +903,7 @@ int Pid::wait() return status; } if (errno != EINTR) - throw SysError("cannot get child exit status"); + throw SysError("cannot get exit status of PID %d", pid); checkInterrupt(); } } @@ -939,9 +939,6 @@ void killUser(uid_t uid) users to which the current process can send signals. So we fork a process, switch to uid, and send a mass kill. */ - ProcessOptions options; - options.allowVfork = false; - Pid pid = startProcess([&]() { if (setuid(uid) == -1) @@ -964,7 +961,7 @@ void killUser(uid_t uid) } _exit(0); - }, options); + }); int status = pid.wait(); if (status != 0) @@ -1085,8 +1082,7 @@ void runProgram2(const RunOptions & options) // vfork implies that the environment of the main process and the fork will // be shared (technically this is undefined, but in practice that's the // case), so we can't use it if we alter the environment - if (options.environment) - processOptions.allowVfork = false; + processOptions.allowVfork = !options.environment; /* Fork. */ Pid pid = startProcess([&]() { @@ -1686,16 +1682,7 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) closeOnExec(fdSocket.get()); - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - if (path.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%1%' is too long", path); - strcpy(addr.sun_path, path.c_str()); - - unlink(path.c_str()); - - if (bind(fdSocket.get(), (struct sockaddr *) &addr, sizeof(addr)) == -1) - throw SysError("cannot bind to socket '%1%'", path); + bind(fdSocket.get(), path); if (chmod(path.c_str(), mode) == -1) throw SysError("changing permissions on '%1%'", path); @@ -1707,6 +1694,66 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) } +void bind(int fd, const std::string & path) +{ + unlink(path.c_str()); + + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + + if (path.size() + 1 >= sizeof(addr.sun_path)) { + Pid pid = startProcess([&]() { + auto dir = dirOf(path); + if (chdir(dir.c_str()) == -1) + throw SysError("chdir to '%s' failed", dir); + std::string base(baseNameOf(path)); + if (base.size() + 1 >= sizeof(addr.sun_path)) + throw Error("socket path '%s' is too long", base); + strcpy(addr.sun_path, base.c_str()); + if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot bind to socket '%s'", path); + _exit(0); + }); + int status = pid.wait(); + if (status != 0) + throw Error("cannot bind to socket '%s'", path); + } else { + strcpy(addr.sun_path, path.c_str()); + if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot bind to socket '%s'", path); + } +} + + +void connect(int fd, const std::string & path) +{ + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + + if (path.size() + 1 >= sizeof(addr.sun_path)) { + Pid pid = startProcess([&]() { + auto dir = dirOf(path); + if (chdir(dir.c_str()) == -1) + throw SysError("chdir to '%s' failed", dir); + std::string base(baseNameOf(path)); + if (base.size() + 1 >= sizeof(addr.sun_path)) + throw Error("socket path '%s' is too long", base); + strcpy(addr.sun_path, base.c_str()); + if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot connect to socket at '%s'", path); + _exit(0); + }); + int status = pid.wait(); + if (status != 0) + throw Error("cannot connect to socket at '%s'", path); + } else { + strcpy(addr.sun_path, path.c_str()); + if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot connect to socket at '%s'", path); + } +} + + string showBytes(uint64_t bytes) { return fmt("%.2f MiB", bytes / (1024.0 * 1024.0)); diff --git a/src/libutil/util.hh b/src/libutil/util.hh index bee77b53f..29232453f 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -259,10 +259,10 @@ void killUser(uid_t uid); pid to the caller. */ struct ProcessOptions { - string errorPrefix = "error: "; + string errorPrefix = ""; bool dieWithParent = true; bool runExitHandlers = false; - bool allowVfork = true; + bool allowVfork = false; }; pid_t startProcess(std::function fun, const ProcessOptions & options = ProcessOptions()); @@ -574,6 +574,12 @@ void commonChildInit(Pipe & logPipe); /* Create a Unix domain socket in listen mode. */ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode); +/* Bind a Unix domain socket to a path. */ +void bind(int fd, const std::string & path); + +/* Connect to a Unix domain socket. */ +void connect(int fd, const std::string & path); + // A Rust/Python-like enumerate() iterator adapter. // Borrowed from http://reedbeta.com/blog/python-like-enumerate-in-cpp17. diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 2cf2a04c9..6a40a0bd3 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -156,9 +156,6 @@ static void daemonLoop() if (chdir("/") == -1) throw SysError("cannot change current directory"); - // Get rid of children automatically; don't let them become zombies. - setSigChldAction(true); - AutoCloseFD fdSocket; // Handle socket-based activation by systemd. @@ -176,6 +173,9 @@ static void daemonLoop() fdSocket = createUnixDomainSocket(settings.nixDaemonSocketFile, 0666); } + // Get rid of children automatically; don't let them become zombies. + setSigChldAction(true); + // Loop accepting connections. while (1) { diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 7d7ada707..7e4d23f6e 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -346,10 +346,10 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](const std::string & attrPath, Value & v, const Pos & pos) { try { state->forceValue(v, pos); - if (!v.isLambda() || v.lambda.fun->matchAttrs || std::string(v.lambda.fun->arg) != "final") + if (!v.isLambda() || v.lambda.fun->hasFormals() || std::string(v.lambda.fun->arg) != "final") throw Error("overlay does not take an argument named 'final'"); auto body = dynamic_cast(v.lambda.fun->body); - if (!body || body->matchAttrs || std::string(body->arg) != "prev") + if (!body || body->hasFormals() || std::string(body->arg) != "prev") throw Error("overlay does not take an argument named 'prev'"); // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. @@ -363,7 +363,7 @@ struct CmdFlakeCheck : FlakeCommand try { state->forceValue(v, pos); if (v.isLambda()) { - if (!v.lambda.fun->matchAttrs || !v.lambda.fun->formals->ellipsis) + if (!v.lambda.fun->hasFormals() || !v.lambda.fun->formals->ellipsis) throw Error("module must match an open attribute set ('{ config, ... }')"); } else if (v.type() == nAttrs) { for (auto & attr : *v.attrs) diff --git a/src/nix/flake.md b/src/nix/flake.md index 3d273100b..3b5812a0a 100644 --- a/src/nix/flake.md +++ b/src/nix/flake.md @@ -225,7 +225,7 @@ Currently the `type` attribute can be one of the following: [flake:](/(/rev)?)? ``` - These perform a lookup of `` in the flake registry. or + These perform a lookup of `` in the flake registry. For example, `nixpkgs` and `nixpkgs/release-20.09` are indirect flake references. The specified `rev` and/or `ref` are merged with the entry in the registry; see [nix registry](./nix3-registry.md) for diff --git a/src/nix/main.cc b/src/nix/main.cc index 8aaf08813..2c3976689 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -187,11 +187,14 @@ static void showHelp(std::vector subcommand, MultiCommand & topleve , "/"), *vUtils); - auto vJson = state.allocValue(); + auto vArgs = state.allocValue(); + state.mkAttrs(*vArgs, 16); + auto vJson = state.allocAttr(*vArgs, state.symbols.create("command")); mkString(*vJson, toplevel.toJSON().dump()); + vArgs->attrs->sort(); auto vRes = state.allocValue(); - state.callFunction(*vGenerateManpage, *vJson, *vRes, noPos); + state.callFunction(*vGenerateManpage, *vArgs, *vRes, noPos); auto attr = vRes->attrs->get(state.symbols.create(mdName + ".md")); if (!attr) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 916966997..b49be256e 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -98,10 +98,8 @@ struct ProfileManifest else if (pathExists(profile + "/manifest.nix")) { // FIXME: needed because of pure mode; ugly. - if (state.allowedPaths) { - state.allowedPaths->insert(state.store->followLinksToStore(profile)); - state.allowedPaths->insert(state.store->followLinksToStore(profile + "/manifest.nix")); - } + state.allowPath(state.store->followLinksToStore(profile)); + state.allowPath(state.store->followLinksToStore(profile + "/manifest.nix")); auto drvInfos = queryInstalled(state, state.store->followLinksToStore(profile)); diff --git a/tests/flakes.sh b/tests/flakes.sh index 26cdf27b7..57d1b9aad 100644 --- a/tests/flakes.sh +++ b/tests/flakes.sh @@ -5,11 +5,6 @@ if [[ -z $(type -p git) ]]; then exit 99 fi -if [[ -z $(type -p hg) ]]; then - echo "Mercurial not installed; skipping flake tests" - exit 99 -fi - clearStore rm -rf $TEST_HOME/.cache $TEST_HOME/.config @@ -266,6 +261,8 @@ cat > $flake3Dir/flake.nix < \$out ''; @@ -579,45 +576,52 @@ nix build -o $TEST_ROOT/result git+file://$flakeGitBare # Test Mercurial flakes. rm -rf $flake5Dir -hg init $flake5Dir +mkdir $flake5Dir cat > $flake5Dir/flake.nix < "$out/index.html" + ''; + }; + }; + }; + + # client consumes a remote resolver + client = { lib, nodes, pkgs, ... }: { + networking.useDHCP = false; + networking.nameservers = [ + (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv6.addresses).address + (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv4.addresses).address + ]; + networking.interfaces.eth1.ipv6.addresses = [ + { address = "fd21::10"; prefixLength = 64; } + ]; + networking.interfaces.eth1.ipv4.addresses = [ + { address = "192.168.0.10"; prefixLength = 24; } + ]; + + nix.sandboxPaths = lib.mkForce []; + nix.binaryCaches = lib.mkForce []; + nix.useSandbox = lib.mkForce true; + }; + }; + + nix-fetch = pkgs.writeText "fetch.nix" '' + derivation { + # This derivation is an copy from what is available over at + # nix.git:corepkgs/fetchurl.nix + builder = "builtin:fetchurl"; + + # We're going to fetch data from the http_dns instance created before + # we expect the content to be the same as the content available there. + # ``` + # $ nix-hash --type sha256 --to-base32 $(echo "hello world" | sha256sum | cut -d " " -f 1) + # 0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59 + # ``` + outputHash = "0ix4jahrkll5zg01wandq78jw3ab30q4nscph67rniqg5x7r0j59"; + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + + name = "example.com"; + url = "http://example.com"; + + unpack = false; + executable = false; + + system = "builtin"; + + preferLocalBuild = true; + + impureEnvVars = [ + "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" + ]; + + urls = [ "http://example.com" ]; + } + ''; + + testScript = { nodes, ... }: '' + http_dns.wait_for_unit("nginx") + http_dns.wait_for_open_port(80) + http_dns.wait_for_unit("unbound") + http_dns.wait_for_open_port(53) + + client.start() + client.wait_for_unit('multi-user.target') + + with subtest("can fetch data from a remote server outside sandbox"): + client.succeed("nix --version >&2") + client.succeed("curl -vvv http://example.com/index.html >&2") + + with subtest("nix-build can lookup dns and fetch data"): + client.succeed(""" + nix-build ${nix-fetch} >&2 + """) + ''; +})