Merge remote-tracking branch 'upstream/master' into fix-and-ci-static-builds

This commit is contained in:
John Ericson 2020-10-09 18:26:47 +00:00
commit 39de73550d
193 changed files with 3689 additions and 2107 deletions

View file

@ -12,8 +12,6 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v10 - uses: cachix/install-nix-action@v11
with:
skip_adding_nixpkgs_channel: true
#- run: nix flake check #- run: nix flake check
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)

18
.gitignore vendored
View file

@ -5,22 +5,12 @@ perl/Makefile.config
/aclocal.m4 /aclocal.m4
/autom4te.cache /autom4te.cache
/precompiled-headers.h.gch /precompiled-headers.h.gch
/precompiled-headers.h.pch
/config.* /config.*
/configure /configure
/stamp-h1 /stamp-h1
/svn-revision /svn-revision
/libtool /libtool
/corepkgs/config.nix
# /corepkgs/channels/
/corepkgs/channels/unpack.sh
# /corepkgs/nar/
/corepkgs/nar/nar.sh
/corepkgs/nar/unnar.sh
# /doc/manual/ # /doc/manual/
/doc/manual/*.1 /doc/manual/*.1
/doc/manual/*.5 /doc/manual/*.5
@ -107,7 +97,7 @@ perl/Makefile.config
/src/resolve-system-dependencies/resolve-system-dependencies /src/resolve-system-dependencies/resolve-system-dependencies
inst/ outputs/
*.a *.a
*.o *.o
@ -126,4 +116,10 @@ GRTAGS
GSYMS GSYMS
GTAGS GTAGS
# ccls
/.ccls-cache
# auto-generated compilation database
compile_commands.json
nix-rust/target nix-rust/target

View file

@ -26,7 +26,7 @@ OPTIMIZE = 1
ifeq ($(OPTIMIZE), 1) ifeq ($(OPTIMIZE), 1)
GLOBAL_CXXFLAGS += -O3 GLOBAL_CXXFLAGS += -O3
else else
GLOBAL_CXXFLAGS += -O0 GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
endif endif
include mk/lib.mk include mk/lib.mk

View file

@ -9,7 +9,7 @@ for more details.
## Installation ## Installation
On Linux and macOS the easiest way to Install Nix is to run the following shell command On Linux and macOS the easiest way to install Nix is to run the following shell command
(as a user other than root): (as a user other than root):
```console ```console
@ -20,7 +20,7 @@ Information on additional installation methods is available on the [Nix download
## Building And Developing ## Building And Developing
See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual#chap-hacking) in our manual for instruction on how to See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/hacking.html) in our manual for instruction on how to
build nix from source with nix-build or how to get a development environment. build nix from source with nix-build or how to get a development environment.
## Additional Resources ## Additional Resources

View file

@ -1,13 +0,0 @@
# FIXME: remove this file?
let
fromEnv = var: def:
let val = builtins.getEnv var; in
if val != "" then val else def;
in rec {
nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@";
nixPrefix = "@prefix@";
nixLibexecDir = fromEnv "NIX_LIBEXEC_DIR" "@libexecdir@";
nixLocalstateDir = "@localstatedir@";
nixSysconfDir = "@sysconfdir@";
nixStoreDir = fromEnv "NIX_STORE_DIR" "@storedir@";
}

View file

@ -1,8 +1,4 @@
corepkgs_FILES = \ corepkgs_FILES = \
unpack-channel.nix \
derivation.nix \
fetchurl.nix fetchurl.nix
$(foreach file,config.nix $(corepkgs_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/corepkgs))) $(foreach file,$(corepkgs_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/corepkgs)))
template-files += $(d)/config.nix

View file

@ -1,6 +0,0 @@
. | to_entries | sort_by(.key) | map(
" - `builtins." + .key + "` "
+ (.value.args | map("*" + . + "*") | join(" "))
+ " \n\n"
+ (.value.doc | split("\n") | map(" " + . + "\n") | join("")) + "\n\n"
) | join("")

View file

@ -0,0 +1,14 @@
with builtins;
with import ./utils.nix;
builtins:
concatStrings (map
(name:
let builtin = builtins.${name}; in
" - `builtins.${name}` " + concatStringsSep " " (map (s: "*${s}*") builtin.args)
+ " \n\n"
+ concatStrings (map (s: " ${s}\n") (splitLines builtin.doc)) + "\n\n"
)
(attrNames builtins))

View file

@ -1,44 +0,0 @@
def show_flags:
.flags
| map_values(select(.category != "config"))
| to_entries
| map(
" - `--" + .key + "`"
+ (if .value.shortName then " / `" + .value.shortName + "`" else "" end)
+ (if .value.labels then " " + (.value.labels | map("*" + . + "*") | join(" ")) else "" end)
+ " \n"
+ " " + .value.description + "\n\n")
| join("")
;
def show_synopsis:
"`" + .command + "` [*flags*...] " + (.args | map("*" + .label + "*" + (if has("arity") then "" else "..." end)) | join(" ")) + "\n\n"
;
def show_command:
. as $top |
.section + " Name\n\n"
+ "`" + .command + "` - " + .def.description + "\n\n"
+ .section + " Synopsis\n\n"
+ ({"command": .command, "args": .def.args} | show_synopsis)
+ (if .def | has("doc")
then .section + " Description\n\n" + .def.doc + "\n\n"
else ""
end)
+ (if (.def.flags | length) > 0 then
.section + " Flags\n\n"
+ (.def | show_flags)
else "" end)
+ (if (.def.examples | length) > 0 then
.section + " Examples\n\n"
+ (.def.examples | map(.description + "\n\n```console\n" + .command + "\n```\n" ) | join("\n"))
+ "\n"
else "" end)
+ (if .def.commands then .def.commands | to_entries | map(
"# Subcommand `" + ($top.command + " " + .key) + "`\n\n"
+ ({"command": ($top.command + " " + .key), "section": "##", "def": .value} | show_command)
) | join("") else "" end)
;
"Title: nix\n\n"
+ ({"command": "nix", "section": "#", "def": .} | show_command)

View file

@ -0,0 +1,55 @@
with builtins;
with import ./utils.nix;
let
showCommand =
{ command, section, def }:
"${section} Name\n\n"
+ "`${command}` - ${def.description}\n\n"
+ "${section} Synopsis\n\n"
+ showSynopsis { inherit command; args = def.args; }
+ (if def ? doc
then "${section} Description\n\n" + def.doc + "\n\n"
else "")
+ (let s = showFlags def.flags; in
if s != ""
then "${section} Flags\n\n${s}"
else "")
+ (if def.examples or [] != []
then
"${section} Examples\n\n"
+ concatStrings (map ({ description, command }: "${description}\n\n```console\n${command}\n```\n\n") def.examples)
else "")
+ (if def.commands or [] != []
then concatStrings (
map (name:
"# Subcommand `${command} ${name}`\n\n"
+ showCommand { command = command + " " + name; section = "##"; def = def.commands.${name}; })
(attrNames def.commands))
else "");
showFlags = flags:
concatStrings
(map (longName:
let flag = flags.${longName}; in
if flag.category or "" != "config"
then
" - `--${longName}`"
+ (if flag ? shortName then " / `${flag.shortName}`" else "")
+ (if flag ? labels then " " + (concatStringsSep " " (map (s: "*${s}*") flag.labels)) else "")
+ " \n"
+ " " + flag.description + "\n\n"
else "")
(attrNames flags));
showSynopsis =
{ command, args }:
"`${command}` [*flags*...] ${concatStringsSep " "
(map (arg: "*${arg.label}*" + (if arg ? arity then "" else "...")) args)}\n\n";
in
command:
showCommand { command = "nix"; section = "#"; def = command; }

View file

@ -1,16 +0,0 @@
. | to_entries | sort_by(.key) | map(
" - `" + .key + "` \n\n"
+ (.value.description | split("\n") | map(" " + . + "\n") | join("")) + "\n\n"
+ " **Default:** " + (
if .value.value == "" or .value.value == []
then "*empty*"
elif (.value.value | type) == "array"
then "`" + (.value.value | join(" ")) + "`"
else "`" + (.value.value | tostring) + "`"
end)
+ "\n\n"
+ (if (.value.aliases | length) > 0
then " **Deprecated alias:** " + (.value.aliases | map("`" + . + "`") | join(", ")) + "\n\n"
else ""
end)
) | join("")

View file

@ -0,0 +1,26 @@
with builtins;
with import ./utils.nix;
options:
concatStrings (map
(name:
let option = options.${name}; in
" - `${name}` \n\n"
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
+ " **Default:** " + (
if option.value == "" || option.value == []
then "*empty*"
else if isBool option.value
then (if option.value then "`true`" else "`false`")
else
# n.b. a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is "null" in
# JSON, but that converts to "{ }" here.
(if isAttrs option.value then "`\"\"`"
else "`" + toString option.value + "`")) + "\n\n"
+ (if option.aliases != []
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
else "")
)
(attrNames options))

View file

@ -15,34 +15,51 @@ clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
dist-files += $(man-pages) dist-files += $(man-pages)
nix-eval = $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw --expr
$(d)/%.1: $(d)/src/command-ref/%.md $(d)/%.1: $(d)/src/command-ref/%.md
$(trace-gen) lowdown -sT man $^ -o $@ @printf "Title: %s\n\n" "$$(basename $@ .1)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man $^.tmp -o $@
@rm $^.tmp
$(d)/%.8: $(d)/src/command-ref/%.md $(d)/%.8: $(d)/src/command-ref/%.md
$(trace-gen) lowdown -sT man $^ -o $@ @printf "Title: %s\n\n" "$$(basename $@ .8)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man $^.tmp -o $@
@rm $^.tmp
$(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md $(d)/nix.conf.5: $(d)/src/command-ref/conf-file.md
$(trace-gen) lowdown -sT man $^ -o $@ @printf "Title: %s\n\n" "$$(basename $@ .5)" > $^.tmp
@cat $^ >> $^.tmp
$(trace-gen) lowdown -sT man $^.tmp -o $@
@rm $^.tmp
$(d)/src/command-ref/nix.md: $(d)/nix.json $(d)/generate-manpage.jq $(d)/src/command-ref/nix.md: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
jq -r -f doc/manual/generate-manpage.jq $< > $@ $(trace-gen) $(nix-eval) 'import doc/manual/generate-manpage.nix (builtins.fromJSON (builtins.readFile $<))' > $@.tmp
@mv $@.tmp $@
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.jq $(d)/src/command-ref/conf-file-prefix.md $(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
cat doc/manual/src/command-ref/conf-file-prefix.md > $@ @cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
jq -r -f doc/manual/generate-options.jq $< >> $@ $(trace-gen) $(nix-eval) 'import doc/manual/generate-options.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
@mv $@.tmp $@
$(d)/nix.json: $(bindir)/nix $(d)/nix.json: $(bindir)/nix
$(trace-gen) $(bindir)/nix __dump-args > $@ $(trace-gen) $(bindir)/nix __dump-args > $@.tmp
@mv $@.tmp $@
$(d)/conf-file.json: $(bindir)/nix $(d)/conf-file.json: $(bindir)/nix
$(trace-gen) env -i NIX_CONF_DIR=/dummy HOME=/dummy $(bindir)/nix show-config --json --experimental-features nix-command > $@ $(trace-gen) env -i NIX_CONF_DIR=/dummy HOME=/dummy NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt $(bindir)/nix show-config --json --experimental-features nix-command > $@.tmp
@mv $@.tmp $@
$(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.jq $(d)/src/expressions/builtins-prefix.md $(d)/src/expressions/builtins.md: $(d)/builtins.json $(d)/generate-builtins.nix $(d)/src/expressions/builtins-prefix.md $(bindir)/nix
cat doc/manual/src/expressions/builtins-prefix.md > $@ @cat doc/manual/src/expressions/builtins-prefix.md > $@.tmp
jq -r -f doc/manual/generate-builtins.jq $< >> $@ $(trace-gen) $(nix-eval) 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<))' >> $@.tmp
@mv $@.tmp $@
$(d)/builtins.json: $(bindir)/nix $(d)/builtins.json: $(bindir)/nix
$(trace-gen) $(bindir)/nix __dump-builtins > $@ $(trace-gen) NIX_PATH=nix/corepkgs=corepkgs $(bindir)/nix __dump-builtins > $@.tmp
mv $@.tmp $@
# Generate the HTML manual. # Generate the HTML manual.
install: $(docdir)/manual/index.html install: $(docdir)/manual/index.html

View file

@ -1,5 +1,3 @@
Title: nix.conf
# Name # Name
`nix.conf` - Nix configuration file `nix.conf` - Nix configuration file

View file

@ -1,5 +1,3 @@
Title: nix-build
# Name # Name
`nix-build` - build a Nix expression `nix-build` - build a Nix expression

View file

@ -1,5 +1,3 @@
Title: nix-channel
# Name # Name
`nix-channel` - manage Nix channels `nix-channel` - manage Nix channels

View file

@ -1,5 +1,3 @@
Title: nix-collect-garbage
# Name # Name
`nix-collect-garbage` - delete unreachable store paths `nix-collect-garbage` - delete unreachable store paths

View file

@ -1,5 +1,3 @@
Title: nix-copy-closure
# Name # Name
`nix-copy-closure` - copy a closure to or from a remote machine via SSH `nix-copy-closure` - copy a closure to or from a remote machine via SSH

View file

@ -1,5 +1,3 @@
Title: nix-daemon
# Name # Name
`nix-daemon` - Nix multi-user support daemon `nix-daemon` - Nix multi-user support daemon

View file

@ -1,5 +1,3 @@
Title: nix-env
# Name # Name
`nix-env` - manipulate or query Nix user environments `nix-env` - manipulate or query Nix user environments

View file

@ -1,5 +1,3 @@
Title: nix-hash
# Name # Name
`nix-hash` - compute the cryptographic hash of a path `nix-hash` - compute the cryptographic hash of a path

View file

@ -1,5 +1,3 @@
Title: nix-instantiate
# Name # Name
`nix-instantiate` - instantiate store derivations from Nix expressions `nix-instantiate` - instantiate store derivations from Nix expressions

View file

@ -1,5 +1,3 @@
Title: nix-prefetch-url
# Name # Name
`nix-prefetch-url` - copy a file from a URL into the store and print its hash `nix-prefetch-url` - copy a file from a URL into the store and print its hash
@ -51,6 +49,9 @@ Nix store is also printed.
result to the Nix store. The resulting hash can be used with result to the Nix store. The resulting hash can be used with
functions such as Nixpkgss `fetchzip` or `fetchFromGitHub`. functions such as Nixpkgss `fetchzip` or `fetchFromGitHub`.
- `--executable`
Set the executable bit on the downloaded file.
- `--name` *name* - `--name` *name*
Override the name of the file in the Nix store. By default, this is Override the name of the file in the Nix store. By default, this is
`hash-basename`, where *basename* is the last component of *url*. `hash-basename`, where *basename* is the last component of *url*.

View file

@ -1,5 +1,3 @@
Title: nix-shell
# Name # Name
`nix-shell` - start an interactive shell based on a Nix expression `nix-shell` - start an interactive shell based on a Nix expression

View file

@ -1,5 +1,3 @@
Title: nix-store
# Name # Name
`nix-store` - manipulate or query the Nix store `nix-store` - manipulate or query the Nix store

View file

@ -39,17 +39,17 @@ To build Nix itself in this shell:
```console ```console
[nix-shell]$ ./bootstrap.sh [nix-shell]$ ./bootstrap.sh
[nix-shell]$ ./configure $configureFlags [nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
[nix-shell]$ make -j $NIX_BUILD_CORES [nix-shell]$ make -j $NIX_BUILD_CORES
``` ```
To install it in `$(pwd)/inst` and test it: To install it in `$(pwd)/outputs` and test it:
```console ```console
[nix-shell]$ make install [nix-shell]$ make install
[nix-shell]$ make installcheck [nix-shell]$ make installcheck -j $NIX_BUILD_CORES
[nix-shell]$ ./inst/bin/nix --version [nix-shell]$ ./outputs/out/bin/nix --version
nix (Nix) 2.4 nix (Nix) 3.0
``` ```
To run a functional test: To run a functional test:
@ -58,6 +58,12 @@ To run a functional test:
make tests/test-name-should-auto-complete.sh.test make tests/test-name-should-auto-complete.sh.test
``` ```
To run the unit-tests for C++ code:
```
make check
```
If you have a flakes-enabled Nix you can replace: If you have a flakes-enabled Nix you can replace:
```console ```console

View file

@ -117,7 +117,7 @@ features:
- The binary tarball installer has been improved. You can now install - The binary tarball installer has been improved. You can now install
Nix by running: Nix by running:
$ bash <(curl https://nixos.org/nix/install) $ bash <(curl -L https://nixos.org/nix/install)
- More evaluation errors include position information. For instance, - More evaluation errors include position information. For instance,
selecting a missing attribute will print something like selecting a missing attribute will print something like

View file

@ -10,7 +10,7 @@ in certain situations. In addition, it has the following new features:
- The Nix installer now supports performing a Multi-User - The Nix installer now supports performing a Multi-User
installation for Linux computers which are running systemd. You installation for Linux computers which are running systemd. You
can select a Multi-User installation by passing the `--daemon` can select a Multi-User installation by passing the `--daemon`
flag to the installer: `sh <(curl https://nixos.org/nix/install) flag to the installer: `sh <(curl -L https://nixos.org/nix/install)
--daemon`. --daemon`.
The multi-user installer cannot handle systems with SELinux. If The multi-user installer cannot handle systems with SELinux. If

7
doc/manual/utils.nix Normal file
View file

@ -0,0 +1,7 @@
with builtins;
{
splitLines = s: filter (x: !isList x) (split "\n" s);
concatStrings = concatStringsSep "";
}

View file

@ -3,16 +3,15 @@
"lowdown-src": { "lowdown-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1598296217, "lastModified": 1598695561,
"narHash": "sha256-ha7lyNY1d8m+osmDpPc9f/bfZ3ZC1IVIXwfyklSWg8I=", "narHash": "sha256-gyH/5j+h/nWw0W8AcR2WKvNBUsiQ7QuxqSJNXAwV+8E=",
"owner": "edolstra", "owner": "kristapsdz",
"repo": "lowdown", "repo": "lowdown",
"rev": "c7a4e715af1e233080842db82d15b261cb74cb28", "rev": "1705b4a26fbf065d9574dce47a94e8c7c79e052f",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "edolstra", "owner": "kristapsdz",
"ref": "no-structs-in-anonymous-unions",
"repo": "lowdown", "repo": "lowdown",
"type": "github" "type": "github"
} }

View file

@ -2,7 +2,7 @@
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small"; inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small";
inputs.lowdown-src = { url = "github:edolstra/lowdown/no-structs-in-anonymous-unions"; flake = false; }; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
outputs = { self, nixpkgs, lowdown-src }: outputs = { self, nixpkgs, lowdown-src }:
@ -59,6 +59,7 @@
configureFlags = configureFlags =
lib.optionals stdenv.isLinux [ lib.optionals stdenv.isLinux [
"--with-sandbox-shell=${sh}/bin/busybox" "--with-sandbox-shell=${sh}/bin/busybox"
"LDFLAGS=-fuse-ld=gold"
]; ];
@ -145,7 +146,7 @@
enableParallelBuilding = true; enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d"; makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
doCheck = true; doCheck = true;
@ -349,9 +350,6 @@
# syntax-check generated dot files, it still requires some # syntax-check generated dot files, it still requires some
# fonts. So provide those. # fonts. So provide those.
FONTCONFIG_FILE = texFunctions.fontsConf; FONTCONFIG_FILE = texFunctions.fontsConf;
# To test building without precompiled headers.
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
}; };
# System tests. # System tests.

View file

@ -11,6 +11,6 @@ GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
$(foreach i, config.h $(wildcard src/lib*/*.hh), \ $(foreach i, config.h $(wildcard src/lib*/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644))) $(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
$(GCH) $(PCH): src/libutil/util.hh config.h $(GCH): src/libutil/util.hh config.h
GCH_CXXFLAGS = -I src/libutil GCH_CXXFLAGS = -I src/libutil

View file

@ -4,6 +4,8 @@
<dict> <dict>
<key>EnvironmentVariables</key> <key>EnvironmentVariables</key>
<dict> <dict>
<key>NIX_SSL_CERT_FILE</key>
<string>/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt</string>
<key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key> <key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key>
<string>YES</string> <string>YES</string>
</dict> </dict>

View file

@ -1,4 +1,4 @@
PRECOMPILE_HEADERS ?= 1 PRECOMPILE_HEADERS ?= 0
print-var-help += \ print-var-help += \
echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build"; echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build";
@ -10,33 +10,12 @@ $(GCH): precompiled-headers.h
@mkdir -p "$(dir $@)" @mkdir -p "$(dir $@)"
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS) $(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
PCH = $(buildprefix)precompiled-headers.h.pch clean-files += $(GCH)
$(PCH): precompiled-headers.h
@rm -f $@
@mkdir -p "$(dir $@)"
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
clean-files += $(GCH) $(PCH)
ifeq ($(PRECOMPILE_HEADERS), 1) ifeq ($(PRECOMPILE_HEADERS), 1)
ifeq ($(findstring g++,$(CXX)), g++) GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch
GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch GLOBAL_ORDER_AFTER += $(GCH)
GLOBAL_ORDER_AFTER += $(GCH)
else ifeq ($(findstring clang++,$(CXX)), clang++)
GLOBAL_CXXFLAGS_PCH += -include-pch $(PCH) -Winvalid-pch
GLOBAL_ORDER_AFTER += $(PCH)
else
$(error Don't know how to precompile headers on $(CXX))
endif
endif endif

View file

@ -1,3 +1,4 @@
#[allow(improper_ctypes_definitions)]
#[cfg(not(test))] #[cfg(not(test))]
mod c; mod c;
mod error; mod error;

View file

@ -19,9 +19,9 @@ impl StorePath {
} }
Self::new_from_base_name( Self::new_from_base_name(
path.file_name() path.file_name()
.ok_or(Error::BadStorePath(path.into()))? .ok_or_else(|| Error::BadStorePath(path.into()))?
.to_str() .to_str()
.ok_or(Error::BadStorePath(path.into()))?, .ok_or_else(|| Error::BadStorePath(path.into()))?,
) )
} }
@ -34,7 +34,7 @@ impl StorePath {
pub fn new_from_base_name(base_name: &str) -> Result<Self, Error> { pub fn new_from_base_name(base_name: &str) -> Result<Self, Error> {
if base_name.len() < STORE_PATH_HASH_CHARS + 1 if base_name.len() < STORE_PATH_HASH_CHARS + 1
|| base_name.as_bytes()[STORE_PATH_HASH_CHARS] != '-' as u8 || base_name.as_bytes()[STORE_PATH_HASH_CHARS] != b'-'
{ {
return Err(Error::BadStorePath(base_name.into())); return Err(Error::BadStorePath(base_name.into()));
} }
@ -65,7 +65,7 @@ impl StorePathHash {
Ok(Self(bytes)) Ok(Self(bytes))
} }
pub fn hash<'a>(&'a self) -> &'a [u8; STORE_PATH_HASH_BYTES] { pub fn hash(&self) -> &[u8; STORE_PATH_HASH_BYTES] {
&self.0 &self.0
} }
} }
@ -98,7 +98,7 @@ pub struct StorePathName(String);
impl StorePathName { impl StorePathName {
pub fn new(s: &str) -> Result<Self, Error> { pub fn new(s: &str) -> Result<Self, Error> {
if s.len() == 0 { if s.is_empty() {
return Err(Error::StorePathNameEmpty); return Err(Error::StorePathNameEmpty);
} }
@ -106,25 +106,24 @@ impl StorePathName {
return Err(Error::StorePathNameTooLong); return Err(Error::StorePathNameTooLong);
} }
if s.starts_with('.') let is_good_path_name = s.chars().all(|c| {
|| !s.chars().all(|c| { c.is_ascii_alphabetic()
c.is_ascii_alphabetic() || c.is_ascii_digit()
|| c.is_ascii_digit() || c == '+'
|| c == '+' || c == '-'
|| c == '-' || c == '.'
|| c == '.' || c == '_'
|| c == '_' || c == '?'
|| c == '?' || c == '='
|| c == '=' });
}) if s.starts_with('.') || !is_good_path_name {
{
return Err(Error::BadStorePathName); return Err(Error::BadStorePathName);
} }
Ok(Self(s.to_string())) Ok(Self(s.to_string()))
} }
pub fn name<'a>(&'a self) -> &'a str { pub fn name(&self) -> &str {
&self.0 &self.0
} }
} }

View file

@ -13,7 +13,7 @@ pub fn decoded_len(input_len: usize) -> usize {
input_len * 5 / 8 input_len * 5 / 8
} }
static BASE32_CHARS: &'static [u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz"; static BASE32_CHARS: &[u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz";
lazy_static! { lazy_static! {
static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = { static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = {

View file

@ -70,19 +70,6 @@ PKG_CHECK_MODULES([NIX], [nix-store])
NEED_PROG([NIX], [nix]) NEED_PROG([NIX], [nix])
# Get nix configure values
export NIX_REMOTE=daemon
nixbindir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixBinDir)
nixlibexecdir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixLibexecDir)
nixlocalstatedir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixLocalstateDir)
nixsysconfdir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixSysconfDir)
nixstoredir=$("$NIX" --experimental-features nix-command eval --raw -f '<nix/config.nix>' nixStoreDir)
AC_SUBST(nixbindir)
AC_SUBST(nixlibexecdir)
AC_SUBST(nixlocalstatedir)
AC_SUBST(nixsysconfdir)
AC_SUBST(nixstoredir)
# Expand all variables in config.status. # Expand all variables in config.status.
test "$prefix" = NONE && prefix=$ac_default_prefix test "$prefix" = NONE && prefix=$ac_default_prefix
test "$exec_prefix" = NONE && exec_prefix='${prefix}' test "$exec_prefix" = NONE && exec_prefix='${prefix}'

View file

@ -4,14 +4,8 @@ use MIME::Base64;
$version = "@PACKAGE_VERSION@"; $version = "@PACKAGE_VERSION@";
$binDir = $ENV{"NIX_BIN_DIR"} || "@nixbindir@"; $binDir = Nix::Store::getBinDir;
$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@nixlibexecdir@"; $storeDir = Nix::Store::getStoreDir;
$stateDir = $ENV{"NIX_STATE_DIR"} || "@nixlocalstatedir@/nix";
$logDir = $ENV{"NIX_LOG_DIR"} || "@nixlocalstatedir@/log/nix";
$confDir = $ENV{"NIX_CONF_DIR"} || "@nixsysconfdir@/nix";
$storeDir = $ENV{"NIX_STORE_DIR"} || "@nixstoredir@";
$useBindings = 1;
%config = (); %config = ();

View file

@ -2,7 +2,6 @@ package Nix::Store;
use strict; use strict;
use warnings; use warnings;
use Nix::Config;
require Exporter; require Exporter;
@ -22,6 +21,7 @@ our @EXPORT = qw(
addToStore makeFixedOutputPath addToStore makeFixedOutputPath
derivationFromPath derivationFromPath
addTempRoot addTempRoot
getBinDir getStoreDir
); );
our $VERSION = '0.15'; our $VERSION = '0.15';
@ -34,62 +34,8 @@ sub backtick {
return $res; return $res;
} }
if ($Nix::Config::useBindings) { require XSLoader;
require XSLoader; XSLoader::load('Nix::Store', $VERSION);
XSLoader::load('Nix::Store', $VERSION);
} else {
# Provide slow fallbacks of some functions on platforms that don't
# support the Perl bindings.
use File::Temp;
use Fcntl qw/F_SETFD/;
*hashFile = sub {
my ($algo, $base32, $path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-hash", "--flat", $path, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*hashPath = sub {
my ($algo, $base32, $path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-hash", $path, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*hashString = sub {
my ($algo, $base32, $s) = @_;
my $fh = File::Temp->new();
print $fh $s;
my $res = backtick("$Nix::Config::binDir/nix-hash", $fh->filename, "--type", $algo, $base32 ? "--base32" : ());
chomp $res;
return $res;
};
*addToStore = sub {
my ($srcPath, $recursive, $algo) = @_;
die "not implemented" if $recursive || $algo ne "sha256";
my $res = backtick("$Nix::Config::binDir/nix-store", "--add", $srcPath);
chomp $res;
return $res;
};
*isValidPath = sub {
my ($path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-store", "--check-validity", "--print-invalid", $path);
chomp $res;
return $res ne $path;
};
*queryPathHash = sub {
my ($path) = @_;
my $res = backtick("$Nix::Config::binDir/nix-store", "--query", "--hash", $path);
chomp $res;
return $res;
};
}
1; 1;
__END__ __END__

View file

@ -303,11 +303,14 @@ SV * derivationFromPath(char * drvPath)
hash = newHV(); hash = newHV();
HV * outputs = newHV(); HV * outputs = newHV();
for (auto & i : drv.outputsAndPaths(*store())) for (auto & i : drv.outputsAndOptPaths(*store())) {
hv_store( hv_store(
outputs, i.first.c_str(), i.first.size(), outputs, i.first.c_str(), i.first.size(),
newSVpv(store()->printStorePath(i.second.second).c_str(), 0), !i.second.second
? newSV(0) /* null value */
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0),
0); 0);
}
hv_stores(hash, "outputs", newRV((SV *) outputs)); hv_stores(hash, "outputs", newRV((SV *) outputs));
AV * inputDrvs = newAV(); AV * inputDrvs = newAV();
@ -348,3 +351,13 @@ void addTempRoot(char * storePath)
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
} }
SV * getBinDir()
PPCODE:
XPUSHs(sv_2mortal(newSVpv(settings.nixBinDir.c_str(), 0)));
SV * getStoreDir()
PPCODE:
XPUSHs(sv_2mortal(newSVpv(settings.nixStore.c_str(), 0)));

View file

@ -2,6 +2,8 @@
set -e set -e
umask 0022
dest="/nix" dest="/nix"
self="$(dirname "$0")" self="$(dirname "$0")"
nix="@nix@" nix="@nix@"

View file

@ -10,6 +10,8 @@ oops() {
exit 1 exit 1
} }
umask 0022
tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \ tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \
oops "Can't create temporary directory for downloading the Nix binary tarball")" oops "Can't create temporary directory for downloading the Nix binary tarball")"
cleanup() { cleanup() {

View file

@ -44,7 +44,7 @@ static bool allSupportedLocally(Store & store, const std::set<std::string>& requ
return true; return true;
} }
static int _main(int argc, char * * argv) static int main_build_remote(int argc, char * * argv)
{ {
{ {
logger = makeJSONLogger(*logger); logger = makeJSONLogger(*logger);
@ -297,4 +297,4 @@ connected:
} }
} }
static RegisterLegacyCommand s1("build-remote", _main); static RegisterLegacyCommand r_build_remote("build-remote", main_build_remote);

View file

@ -11,7 +11,7 @@ namespace nix::eval_cache {
MakeError(CachedEvalError, EvalError); MakeError(CachedEvalError, EvalError);
class AttrDb; struct AttrDb;
class AttrCursor; class AttrCursor;
class EvalCache : public std::enable_shared_from_this<EvalCache> class EvalCache : public std::enable_shared_from_this<EvalCache>

View file

@ -87,6 +87,7 @@ static void printValue(std::ostream & str, std::set<const Value *> & active, con
else if (*i == '\n') str << "\\n"; else if (*i == '\n') str << "\\n";
else if (*i == '\r') str << "\\r"; else if (*i == '\r') str << "\\r";
else if (*i == '\t') str << "\\t"; else if (*i == '\t') str << "\\t";
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
else str << *i; else str << *i;
str << "\""; str << "\"";
break; break;
@ -355,6 +356,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create("")) , sEpsilon(symbols.create(""))
, repair(NoRepair) , repair(NoRepair)
, store(store) , store(store)
, regexCache(makeRegexCache())
, baseEnv(allocEnv(128)) , baseEnv(allocEnv(128))
, staticBaseEnv(false, 0) , staticBaseEnv(false, 0)
{ {
@ -369,7 +371,11 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
for (auto & i : _searchPath) addToSearchPath(i); for (auto & i : _searchPath) addToSearchPath(i);
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i); for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
} }
addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
try {
addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
} catch (Error &) {
}
if (evalSettings.restrictEval || evalSettings.pureEval) { if (evalSettings.restrictEval || evalSettings.pureEval) {
allowedPaths = PathSet(); allowedPaths = PathSet();
@ -1348,7 +1354,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
} }
Value * actualArgs = allocValue(); Value * actualArgs = allocValue();
mkAttrs(*actualArgs, fun.lambda.fun->formals->formals.size()); mkAttrs(*actualArgs, std::max(static_cast<uint32_t>(fun.lambda.fun->formals->formals.size()), args.size()));
if (fun.lambda.fun->formals->ellipsis) { if (fun.lambda.fun->formals->ellipsis) {
// If the formals have an ellipsis (eg the function accepts extra args) pass // If the formals have an ellipsis (eg the function accepts extra args) pass
@ -2075,7 +2081,7 @@ Strings EvalSettings::getDefaultNixPath()
EvalSettings evalSettings; EvalSettings evalSettings;
static GlobalConfig::Register r1(&evalSettings); static GlobalConfig::Register rEvalSettings(&evalSettings);
} }

View file

@ -6,7 +6,6 @@
#include "symbol-table.hh" #include "symbol-table.hh"
#include "config.hh" #include "config.hh"
#include <regex>
#include <map> #include <map>
#include <optional> #include <optional>
#include <unordered_map> #include <unordered_map>
@ -65,6 +64,11 @@ typedef std::list<SearchPathElem> SearchPath;
void initGC(); void initGC();
struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
class EvalState class EvalState
{ {
public: public:
@ -120,7 +124,7 @@ private:
std::unordered_map<Path, Path> resolvedPaths; std::unordered_map<Path, Path> resolvedPaths;
/* Cache used by prim_match(). */ /* Cache used by prim_match(). */
std::unordered_map<std::string, std::regex> regexCache; std::shared_ptr<RegexCache> regexCache;
public: public:

View file

@ -12,7 +12,7 @@ using namespace flake;
namespace flake { namespace flake {
typedef std::pair<Tree, FlakeRef> FetchedFlake; typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache; typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
static std::optional<FetchedFlake> lookupInFlakeCache( static std::optional<FetchedFlake> lookupInFlakeCache(
@ -48,17 +48,17 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
resolvedRef = originalRef.resolve(state.store); resolvedRef = originalRef.resolve(state.store);
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef); auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store)); if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
flakeCache.push_back({resolvedRef, fetchedResolved.value()}); flakeCache.push_back({resolvedRef, *fetchedResolved});
fetched.emplace(fetchedResolved.value()); fetched.emplace(*fetchedResolved);
} }
else { else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef); throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
} }
} }
flakeCache.push_back({originalRef, fetched.value()}); flakeCache.push_back({originalRef, *fetched});
} }
auto [tree, lockedRef] = fetched.value(); auto [tree, lockedRef] = *fetched;
debug("got tree '%s' from '%s'", debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef); state.store->printStorePath(tree.storePath), lockedRef);
@ -215,10 +215,9 @@ static Flake getFlake(
if (auto outputs = vInfo.attrs->get(sOutputs)) { if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, tLambda, *outputs->value, *outputs->pos); expectType(state, tLambda, *outputs->value, *outputs->pos);
flake.vOutputs = allocRootValue(outputs->value);
if ((*flake.vOutputs)->lambda.fun->matchAttrs) { if (outputs->value->lambda.fun->matchAttrs) {
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) { for (auto & formal : outputs->value->lambda.fun->formals->formals) {
if (formal.name != state.sSelf) if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput { flake.inputs.emplace(formal.name, FlakeInput {
.ref = parseFlakeRef(formal.name) .ref = parseFlakeRef(formal.name)
@ -248,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
} }
/* Compute an in-memory lock file for the specified top-level flake, /* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, it the flake is writable. */ and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake( LockedFlake lockFlake(
EvalState & state, EvalState & state,
const FlakeRef & topRef, const FlakeRef & topRef,
@ -367,7 +366,7 @@ LockedFlake lockFlake(
/* If we have an --update-input flag for an input /* If we have an --update-input flag for an input
of this input, then we must fetch the flake to of this input, then we must fetch the flake to
to update it. */ update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath); auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto hasChildUpdate = auto hasChildUpdate =

View file

@ -34,7 +34,6 @@ struct Flake
std::optional<std::string> description; std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo; std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs; FlakeInputs inputs;
RootValue vOutputs;
~Flake(); ~Flake();
}; };

View file

@ -1,6 +1,7 @@
#include "flakeref.hh" #include "flakeref.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url.hh" #include "url.hh"
#include "url-parts.hh"
#include "fetchers.hh" #include "fetchers.hh"
#include "registry.hh" #include "registry.hh"
@ -15,10 +16,10 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege
std::string FlakeRef::to_string() const std::string FlakeRef::to_string() const
{ {
auto url = input.toURL(); std::map<std::string, std::string> extraQuery;
if (subdir != "") if (subdir != "")
url.query.insert_or_assign("dir", subdir); extraQuery.insert_or_assign("dir", subdir);
return url.to_string(); return input.toURLString(extraQuery);
} }
fetchers::Attrs FlakeRef::toAttrs() const fetchers::Attrs FlakeRef::toAttrs() const
@ -156,7 +157,8 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
} else { } else {
if (!hasPrefix(path, "/")) if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url); throw BadURL("flake reference '%s' is not an absolute path", url);
path = canonPath(path); auto query = decodeQuery(match[2]);
path = canonPath(path + "/" + get(query, "dir").value_or(""));
} }
fetchers::Attrs attrs; fetchers::Attrs attrs;

View file

@ -1,5 +1,6 @@
#include "lockfile.hh" #include "lockfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
@ -12,12 +13,12 @@ FlakeRef getFlakeRef(
{ {
auto i = json.find(attr); auto i = json.find(attr);
if (i != json.end()) { if (i != json.end()) {
auto attrs = jsonToAttrs(*i); auto attrs = fetchers::jsonToAttrs(*i);
// FIXME: remove when we drop support for version 5. // FIXME: remove when we drop support for version 5.
if (info) { if (info) {
auto j = json.find(info); auto j = json.find(info);
if (j != json.end()) { if (j != json.end()) {
for (auto k : jsonToAttrs(*j)) for (auto k : fetchers::jsonToAttrs(*j))
attrs.insert_or_assign(k.first, k.second); attrs.insert_or_assign(k.first, k.second);
} }
} }

View file

@ -6,13 +6,11 @@
namespace nix { namespace nix {
class Store; class Store;
struct StorePath; class StorePath;
} }
namespace nix::flake { namespace nix::flake {
using namespace fetchers;
typedef std::vector<FlakeId> InputPath; typedef std::vector<FlakeId> InputPath;
struct LockedNode; struct LockedNode;

View file

@ -38,8 +38,11 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
auto i = drv.outputs.find(outputName); auto i = drv.outputs.find(outputName);
if (i == drv.outputs.end()) if (i == drv.outputs.end())
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName); throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
auto & [outputName, output] = *i;
outPath = store->printStorePath(i->second.path(*store, drv.name)); auto optStorePath = output.path(*store, drv.name, outputName);
if (optStorePath)
outPath = store->printStorePath(*optStorePath);
} }
@ -77,12 +80,15 @@ string DrvInfo::queryDrvPath() const
string DrvInfo::queryOutPath() const string DrvInfo::queryOutPath() const
{ {
if (outPath == "" && attrs) { if (!outPath && attrs) {
Bindings::iterator i = attrs->find(state->sOutPath); Bindings::iterator i = attrs->find(state->sOutPath);
PathSet context; PathSet context;
outPath = i != attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : ""; if (i != attrs->end())
outPath = state->coerceToPath(*i->pos, *i->value, context);
} }
return outPath; if (!outPath)
throw UnimplementedError("CA derivations are not yet supported");
return *outPath;
} }

View file

@ -20,7 +20,7 @@ private:
mutable string name; mutable string name;
mutable string system; mutable string system;
mutable string drvPath; mutable string drvPath;
mutable string outPath; mutable std::optional<string> outPath;
mutable string outputName; mutable string outputName;
Outputs outputs; Outputs outputs;

View file

@ -115,6 +115,14 @@ public:
{ {
return handle_value<void(Value&, const char*)>(mkString, val.c_str()); return handle_value<void(Value&, const char*)>(mkString, val.c_str());
} }
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
bool binary(binary_t&)
{
// This function ought to be unreachable
assert(false);
return true;
}
#endif
bool start_object(std::size_t len) bool start_object(std::size_t len)
{ {

View file

@ -42,6 +42,6 @@ $(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \ $(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644))) $(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh $(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh

View file

@ -614,8 +614,7 @@ Path resolveExprPath(Path path)
// Basic cycle/depth limit to avoid infinite loops. // Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow) if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path); throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (lstat(path.c_str(), &st)) st = lstat(path);
throw SysError("getting status of '%s'", path);
if (!S_ISLNK(st.st_mode)) break; if (!S_ISLNK(st.st_mode)) break;
path = absPath(readLink(path), dirOf(path)); path = absPath(readLink(path), dirOf(path));
} }

View file

@ -44,16 +44,6 @@ void EvalState::realiseContext(const PathSet & context)
throw InvalidPathError(store->printStorePath(ctx)); throw InvalidPathError(store->printStorePath(ctx));
if (!outputName.empty() && ctx.isDerivation()) { if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}}); drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
/* Add the output of this derivation to the allowed
paths. */
if (allowedPaths) {
auto drv = store->derivationFromPath(ctx);
DerivationOutputs::iterator i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
allowedPaths->insert(store->printStorePath(i->second.path(*store, drv.name)));
}
} }
} }
@ -69,8 +59,50 @@ void EvalState::realiseContext(const PathSet & context)
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize); store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs); store->buildPaths(drvs);
/* Add the output of this derivations to the allowed
paths. */
if (allowedPaths) {
for (auto & [drvPath, outputs] : drvs) {
auto outputPaths = store->queryDerivationOutputMap(drvPath);
for (auto & outputName : outputs) {
if (outputPaths.count(outputName) == 0)
throw Error("derivation '%s' does not have an output named '%s'",
store->printStorePath(drvPath), outputName);
allowedPaths->insert(store->printStorePath(outputPaths.at(outputName)));
}
}
}
} }
/* Add and attribute to the given attribute map from the output name to
the output path, or a placeholder.
Where possible the path is used, but for floating CA derivations we
may not know it. For sake of determinism we always assume we don't
and instead put in a place holder. In either case, however, the
string context will contain the drv path and output name, so
downstream derivations will have the proper dependency, and in
addition, before building, the placeholder will be rewritten to be
the actual path.
The 'drv' and 'drvPath' outputs must correspond. */
static void mkOutputString(EvalState & state, Value & v,
const StorePath & drvPath, const BasicDerivation & drv,
std::pair<string, DerivationOutput> o)
{
auto optOutputPath = o.second.path(*state.store, drv.name, o.first);
mkString(
*state.allocAttr(v, state.symbols.create(o.first)),
optOutputPath
? state.store->printStorePath(*optOutputPath)
/* Downstream we would substitute this for an actual path once
we build the floating CA derivation */
/* FIXME: we need to depend on the basic derivation, not
derivation */
: downstreamPlaceholder(*state.store, drvPath, o.first),
{"!" + o.first + "!" + state.store->printStorePath(drvPath)});
}
/* Load and evaluate an expression from path specified by the /* Load and evaluate an expression from path specified by the
argument. */ argument. */
@ -114,9 +146,8 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
state.mkList(*outputsVal, drv.outputs.size()); state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0; unsigned int outputs_index = 0;
for (const auto & o : drv.outputsAndPaths(*state.store)) { for (const auto & o : drv.outputs) {
v2 = state.allocAttr(w, state.symbols.create(o.first)); mkOutputString(state, w, storePath, drv, o);
mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
outputsVal->listElems()[outputs_index] = state.allocValue(); outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first); mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
} }
@ -1080,16 +1111,18 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Optimisation, but required in read-only mode! because in that /* Optimisation, but required in read-only mode! because in that
case we don't actually write store derivations, so we can't case we don't actually write store derivations, so we can't
read them later. */ read them later.
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false)); However, we don't bother doing this for floating CA derivations because
their "hash modulo" is indeterminate until built. */
if (drv.type() != DerivationType::CAFloating)
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false));
state.mkAttrs(v, 1 + drv.outputs.size()); state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS}); mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
for (auto & i : drv.outputsAndPaths(*state.store)) { for (auto & i : drv.outputs)
mkString(*state.allocAttr(v, state.symbols.create(i.first)), mkOutputString(state, v, drvPath, drv, i);
state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
}
v.attrs->sort(); v.attrs->sort();
} }
@ -1671,7 +1704,7 @@ static RegisterPrimOp primop_toFile({
... ...
cp ${configFile} $out/etc/foo.conf cp ${configFile} $out/etc/foo.conf
"; ";
``` ```
Note that `${configFile}` is an Note that `${configFile}` is an
[antiquotation](language-values.md), so the result of the [antiquotation](language-values.md), so the result of the
@ -2203,6 +2236,10 @@ static RegisterPrimOp primop_catAttrs({
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) {
state.mkAttrs(v, 0);
return;
}
if (args[0]->type != tLambda) if (args[0]->type != tLambda)
throw TypeError({ throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"), .hint = hintfmt("'functionArgs' requires a function"),
@ -3052,17 +3089,25 @@ static RegisterPrimOp primop_hashString({
.fun = prim_hashString, .fun = prim_hashString,
}); });
/* Match a regular expression against a string and return either struct RegexCache
null or a list containing substring matches. */ {
std::unordered_map<std::string, std::regex> cache;
};
std::shared_ptr<RegexCache> makeRegexCache()
{
return std::make_shared<RegexCache>();
}
void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v) void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
auto re = state.forceStringNoCtx(*args[0], pos); auto re = state.forceStringNoCtx(*args[0], pos);
try { try {
auto regex = state.regexCache.find(re); auto regex = state.regexCache->cache.find(re);
if (regex == state.regexCache.end()) if (regex == state.regexCache->cache.end())
regex = state.regexCache.emplace(re, std::regex(re, std::regex::extended)).first; regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first;
PathSet context; PathSet context;
const std::string str = state.forceString(*args[1], context, pos); const std::string str = state.forceString(*args[1], context, pos);
@ -3532,9 +3577,10 @@ void EvalState::createBaseEnv()
/* Add a wrapper around the derivation primop that computes the /* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */ `drvPath' and `outPath' attributes lazily. */
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true); sDerivationNix = symbols.create("//builtin/derivation.nix");
sDerivationNix = symbols.create(path); eval(parse(
evalFile(path, v); #include "primops/derivation.nix.gen.hh"
, foFile, sDerivationNix, "/", staticBaseEnv), v);
addConstant("derivation", v); addConstant("derivation", v);
/* Now that we've added all primops, sort the `builtins' set, /* Now that we've added all primops, sort the `builtins' set,

View file

@ -1,3 +1,5 @@
#pragma once
#include "eval.hh" #include "eval.hh"
#include <tuple> #include <tuple>

View file

@ -11,7 +11,7 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos,
mkString(v, s, PathSet()); mkString(v, s, PathSet());
} }
static RegisterPrimOp r1("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext); static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -21,7 +21,7 @@ static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args,
mkBool(v, !context.empty()); mkBool(v, !context.empty());
} }
static RegisterPrimOp r2("__hasContext", 1, prim_hasContext); static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a /* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
@ -42,7 +42,7 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & po
mkString(v, s, context2); mkString(v, s, context2);
} }
static RegisterPrimOp r3("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency); static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
/* Extract the context of a string as a structured Nix value. /* Extract the context of a string as a structured Nix value.
@ -127,7 +127,7 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
v.attrs->sort(); v.attrs->sort();
} }
static RegisterPrimOp r4("__getContext", 1, prim_getContext); static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
/* Append the given context to a given string. /* Append the given context to a given string.
@ -191,6 +191,6 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
mkString(v, orig, context); mkString(v, orig, context);
} }
static RegisterPrimOp r5("__appendContext", 2, prim_appendContext); static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
} }

View file

@ -3,8 +3,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "fetchers.hh" #include "fetchers.hh"
#include "url.hh" #include "url.hh"
#include "url-parts.hh"
#include <regex>
namespace nix { namespace nix {
@ -88,6 +87,6 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
state.allowedPaths->insert(tree.actualPath); state.allowedPaths->insert(tree.actualPath);
} }
static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial); static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
} }

View file

@ -152,7 +152,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
fetchTree(state, pos, args, v, std::nullopt); fetchTree(state, pos, args, v, std::nullopt);
} }
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree); static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
const string & who, bool unpack, std::string name) const string & who, bool unpack, std::string name)

View file

@ -88,6 +88,6 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
} }
} }
static RegisterPrimOp r("fromTOML", 1, prim_fromTOML); static RegisterPrimOp primop_fromTOML("fromTOML", 1, prim_fromTOML);
} }

View file

@ -69,6 +69,14 @@ ParsedURL Input::toURL() const
return scheme->toURL(*this); return scheme->toURL(*this);
} }
std::string Input::toURLString(const std::map<std::string, std::string> & extraQuery) const
{
auto url = toURL();
for (auto & attr : extraQuery)
url.query.insert(attr);
return url.to_string();
}
std::string Input::to_string() const std::string Input::to_string() const
{ {
return toURL().to_string(); return toURL().to_string();

View file

@ -23,7 +23,7 @@ struct InputScheme;
struct Input struct Input
{ {
friend class InputScheme; friend struct InputScheme;
std::shared_ptr<InputScheme> scheme; // note: can be null std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs; Attrs attrs;
@ -39,6 +39,8 @@ public:
ParsedURL toURL() const; ParsedURL toURL() const;
std::string toURLString(const std::map<std::string, std::string> & extraQuery = {}) const;
std::string to_string() const; std::string to_string() const;
Attrs toAttrs() const; Attrs toAttrs() const;
@ -73,7 +75,7 @@ public:
StorePath computeStorePath(Store & store) const; StorePath computeStorePath(Store & store) const;
// Convience functions for common attributes. // Convenience functions for common attributes.
std::string getType() const; std::string getType() const;
std::optional<Hash> getNarHash() const; std::optional<Hash> getNarHash() const;
std::optional<std::string> getRef() const; std::optional<std::string> getRef() const;
@ -84,6 +86,9 @@ public:
struct InputScheme struct InputScheme
{ {
virtual ~InputScheme()
{ }
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0; virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0; virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
@ -119,12 +124,14 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
std::pair<Tree, time_t> downloadTarball( std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
} }

View file

@ -3,6 +3,7 @@
#include "globals.hh" #include "globals.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h> #include <sys/time.h>
@ -451,6 +452,6 @@ struct GitInputScheme : InputScheme
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); }); static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
} }

View file

@ -3,19 +3,30 @@
#include "fetchers.hh" #include "fetchers.hh"
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "types.hh"
#include "url-parts.hh"
#include <optional>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
namespace nix::fetchers { namespace nix::fetchers {
// A github or gitlab url struct DownloadUrl
const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check {
std::regex urlRegex(urlRegexS, std::regex::ECMAScript); std::string url;
Headers headers;
};
// A github or gitlab host
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme struct GitArchiveInputScheme : InputScheme
{ {
virtual std::string type() = 0; virtual std::string type() = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) override
{ {
if (url.scheme != type()) return {}; if (url.scheme != type()) return {};
@ -50,9 +61,9 @@ struct GitArchiveInputScheme : InputScheme
throw BadURL("URL '%s' contains multiple branch/tag names", url.url); throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
ref = value; ref = value;
} }
else if (name == "url") { else if (name == "host") {
if (!std::regex_match(value, urlRegex)) if (!std::regex_match(value, hostRegex))
throw BadURL("URL '%s' contains an invalid instance url", url.url); throw BadURL("URL '%s' contains an invalid instance host", url.url);
host_url = value; host_url = value;
} }
// FIXME: barf on unsupported attributes // FIXME: barf on unsupported attributes
@ -67,7 +78,7 @@ struct GitArchiveInputScheme : InputScheme
input.attrs.insert_or_assign("repo", path[1]); input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev()); if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref); if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("url", *host_url); if (host_url) input.attrs.insert_or_assign("host", *host_url);
return input; return input;
} }
@ -77,7 +88,7 @@ struct GitArchiveInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != type()) return {}; if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs) for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified") if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
throw Error("unsupported input attribute '%s'", name); throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner"); getStrAttr(attrs, "owner");
@ -129,9 +140,31 @@ struct GitArchiveInputScheme : InputScheme
return input; return input;
} }
std::optional<std::string> getAccessToken(const std::string & host) const
{
auto tokens = settings.accessTokens.get();
if (auto token = get(tokens, host))
return *token;
return {};
}
Headers makeHeadersWithAuthTokens(const std::string & host) const
{
Headers headers;
auto accessToken = getAccessToken(host);
if (accessToken) {
auto hdr = accessHeaderFromToken(*accessToken);
if (hdr)
headers.push_back(*hdr);
else
warn("Unrecognized access token for host '%s'", host);
}
return headers;
}
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0; virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
virtual std::string getDownloadUrl(const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{ {
@ -160,7 +193,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input); auto url = getDownloadUrl(input);
auto [tree, lastModified] = downloadTarball(store, url, "source", true); auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
input.attrs.insert_or_assign("lastModified", lastModified); input.attrs.insert_or_assign("lastModified", lastModified);
@ -182,49 +215,52 @@ struct GitHubInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "github"; } std::string type() override { return "github"; }
void addAccessToken(std::string & url) const std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{ {
std::string accessToken = settings.githubAccessToken.get(); // Github supports PAT/OAuth2 tokens and HTTP Basic
if (accessToken != "") // Authentication. The former simply specifies the token, the
url += "?access_token=" + accessToken; // latter can use the token as the password. Only the first
// is used here. See
// https://developer.github.com/v3/#authentication and
// https://docs.github.com/en/developers/apps/authorizing-oath-apps
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
} }
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
addAccessToken(url); Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
std::string getDownloadUrl(const Input & input) const override DownloadUrl getDownloadUrl(const Input & input) const override
{ {
// FIXME: use regular /archive URLs instead? api.github.com // FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits. // might have stricter rate limits.
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base16, false));
addAccessToken(url); Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
return url;
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir); .clone(destDir);
} }
@ -234,48 +270,71 @@ struct GitLabInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "gitlab"; } std::string type() override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
// Gitlab supports 4 kinds of authorization, two of which are
// relevant here: OAuth2 and PAT (Private Access Token). The
// user can indicate which token is used by specifying the
// token as <TYPE>:<VALUE>, where type is "OAuth2" or "PAT".
// If the <TYPE> is unrecognized, this will fall back to
// treating this simply has <HDRNAME>:<HDRVAL>. See
// https://docs.gitlab.com/12.10/ee/api/README.html#authentication
auto fldsplit = token.find_first_of(':');
// n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ...
if ("OAuth2" == token.substr(0, fldsplit))
return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1)));
if ("PAT" == token.substr(0, fldsplit))
return std::make_pair("Private-token", token.substr(fldsplit+1));
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
return std::nullopt;
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// See rate limiting note below
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
std::string getDownloadUrl(const Input & input) const override DownloadUrl getDownloadUrl(const Input & input) const override
{ {
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute // This endpoint has a rate limit threshold that may be
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); // server-specific and vary based whether the user is
// authenticated via an accessToken or not, but the usual rate
// is 10 reqs/sec/ip-addr. See
// https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base16, false));
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`) Headers headers = makeHeadersWithAuthTokens(host);
std::string accessToken = settings.githubAccessToken.get(); return DownloadUrl { url, headers };
if (accessToken != "")
url += "?access_token=" + accessToken;*/
return url;
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere // FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir); .clone(destDir);
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); }); static auto rGitHubInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
static auto r2 = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); }); static auto rGitLabInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
} }

View file

@ -1,4 +1,5 @@
#include "fetchers.hh" #include "fetchers.hh"
#include "url-parts.hh"
namespace nix::fetchers { namespace nix::fetchers {
@ -99,6 +100,6 @@ struct IndirectInputScheme : InputScheme
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); }); static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
} }

View file

@ -3,6 +3,7 @@
#include "globals.hh" #include "globals.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h> #include <sys/time.h>
@ -292,6 +293,6 @@ struct MercurialInputScheme : InputScheme
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); }); static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
} }

View file

@ -102,6 +102,6 @@ struct PathInputScheme : InputScheme
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); }); static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
} }

View file

@ -5,6 +5,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "archive.hh" #include "archive.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "types.hh"
namespace nix::fetchers { namespace nix::fetchers {
@ -12,7 +13,8 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
// FIXME: check store // FIXME: check store
@ -37,6 +39,7 @@ DownloadFileResult downloadFile(
return useCached(); return useCached();
FileTransferRequest request(url); FileTransferRequest request(url);
request.headers = headers;
if (cached) if (cached)
request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
FileTransferResult res; FileTransferResult res;
@ -111,7 +114,8 @@ std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
Attrs inAttrs({ Attrs inAttrs({
{"type", "tarball"}, {"type", "tarball"},
@ -127,7 +131,7 @@ std::pair<Tree, time_t> downloadTarball(
getIntAttr(cached->infoAttrs, "lastModified") getIntAttr(cached->infoAttrs, "lastModified")
}; };
auto res = downloadFile(store, url, name, immutable); auto res = downloadFile(store, url, name, immutable, headers);
std::optional<StorePath> unpackedStorePath; std::optional<StorePath> unpackedStorePath;
time_t lastModified; time_t lastModified;
@ -227,6 +231,6 @@ struct TarballInputScheme : InputScheme
} }
}; };
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); }); static auto rTarballInputScheme = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
} }

View file

@ -256,7 +256,7 @@ public:
} }
else if (type == resBuildLogLine || type == resPostBuildLogLine) { else if (type == resBuildLogLine || type == resPostBuildLogLine) {
auto lastLine = trim(getS(fields, 0)); auto lastLine = chomp(getS(fields, 0));
if (!lastLine.empty()) { if (!lastLine.empty()) {
auto i = state->its.find(act); auto i = state->its.find(act);
assert(i != state->its.end()); assert(i != state->its.end());

View file

@ -386,18 +386,12 @@ RunPager::~RunPager()
} }
string showBytes(uint64_t bytes)
{
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
}
PrintFreed::~PrintFreed() PrintFreed::~PrintFreed()
{ {
if (show) if (show)
std::cout << format("%1% store paths deleted, %2% freed\n") std::cout << fmt("%d store paths deleted, %s freed\n",
% results.paths.size() results.paths.size(),
% showBytes(results.bytesFreed); showBytes(results.bytesFreed));
} }
Exit::~Exit() { } Exit::~Exit() { }

View file

@ -11,6 +11,7 @@
#include "nar-accessor.hh" #include "nar-accessor.hh"
#include "json.hh" #include "json.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "callback.hh"
#include <chrono> #include <chrono>
#include <future> #include <future>
@ -22,7 +23,8 @@
namespace nix { namespace nix {
BinaryCacheStore::BinaryCacheStore(const Params & params) BinaryCacheStore::BinaryCacheStore(const Params & params)
: Store(params) : BinaryCacheStoreConfig(params)
, Store(params)
{ {
if (secretKeyFile != "") if (secretKeyFile != "")
secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile))); secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile)));
@ -140,17 +142,10 @@ struct FileSource : FdSource
} }
}; };
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
RepairFlag repair, CheckSigsFlag checkSigs) Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo)
{ {
assert(info.narSize);
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
auto [fdTemp, fnTemp] = createTempFile(); auto [fdTemp, fnTemp] = createTempFile();
AutoDelete autoDelete(fnTemp); AutoDelete autoDelete(fnTemp);
@ -160,13 +155,15 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
/* Read the NAR simultaneously into a CompressionSink+FileSink (to /* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */ NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink(htSHA256); HashSink fileHashSink { htSHA256 };
std::shared_ptr<FSAccessor> narAccessor; std::shared_ptr<FSAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
{ {
FdSink fileSink(fdTemp.get()); FdSink fileSink(fdTemp.get());
TeeSink teeSink(fileSink, fileHashSink); TeeSink teeSinkCompressed { fileSink, fileHashSink };
auto compressionSink = makeCompressionSink(compression, teeSink); auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
TeeSource teeSource(narSource, *compressionSink); TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
TeeSource teeSource { narSource, teeSinkUncompressed };
narAccessor = makeNarAccessor(teeSource); narAccessor = makeNarAccessor(teeSource);
compressionSink->finish(); compressionSink->finish();
fileSink.flush(); fileSink.flush();
@ -174,9 +171,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto now2 = std::chrono::steady_clock::now(); auto now2 = std::chrono::steady_clock::now();
auto info = mkInfo(narHashSink.finish());
auto narInfo = make_ref<NarInfo>(info); auto narInfo = make_ref<NarInfo>(info);
narInfo->narSize = info.narSize;
narInfo->narHash = info.narHash;
narInfo->compression = compression; narInfo->compression = compression;
auto [fileHash, fileSize] = fileHashSink.finish(); auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash; narInfo->fileHash = fileHash;
@ -298,6 +294,41 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
writeNarInfo(narInfo); writeNarInfo(narInfo);
stats.narInfoWrite++; stats.narInfoWrite++;
return narInfo;
}
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
{
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) {
/* FIXME reinstate these, once we can correctly do hash modulo sink as
needed. We need to throw here in case we uploaded a corrupted store path. */
// assert(info.narHash == nar.first);
// assert(info.narSize == nar.second);
return info;
}});
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
makeFixedOutputPath(method, nar.first, name),
nar.first,
};
info.narSize = nar.second;
return info;
})->path;
} }
bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath)
@ -365,50 +396,52 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath, StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{ {
// FIXME: some cut&paste from LocalStore::addToStore(). /* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
implementation of this method in terms of addToStoreFromDump. */
/* Read the whole path into memory. This is not a very scalable HashSink sink { hashAlgo };
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
std::optional<Hash> h;
if (method == FileIngestionMethod::Recursive) { if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter); dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s);
} else { } else {
auto s = readFile(srcPath); readFile(srcPath, sink);
dumpString(s, sink);
h = hashString(hashAlgo, s);
} }
auto h = sink.finish().first;
ValidPathInfo info { auto source = sinkToSource([&](Sink & sink) {
makeFixedOutputPath(method, *h, name), dumpPath(srcPath, sink, filter);
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash });
}; return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
auto source = StringSource { *sink.s }; makeFixedOutputPath(method, h, name),
addToStore(info, source, repair, CheckSigs); nar.first,
};
return std::move(info.path); info.narSize = nar.second;
info.ca = FixedOutputHash {
.method = method,
.hash = h,
};
return info;
})->path;
} }
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
ValidPathInfo info { auto textHash = hashString(htSHA256, s);
computeStorePathForText(name, s, references), auto path = makeTextPath(name, textHash, references);
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
info.references = references;
if (repair || !isValidPath(info.path)) { if (!repair && isValidPath(path))
StringSink sink; return path;
dumpString(s, sink);
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
}
return std::move(info.path); auto source = StringSource { s };
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { path, nar.first };
info.narSize = nar.second;
info.ca = TextHash { textHash };
info.references = references;
return info;
})->path;
} }
ref<FSAccessor> BinaryCacheStore::getFSAccessor() ref<FSAccessor> BinaryCacheStore::getFSAccessor()

View file

@ -11,17 +11,21 @@ namespace nix {
struct NarInfo; struct NarInfo;
class BinaryCacheStore : public Store struct BinaryCacheStoreConfig : virtual StoreConfig
{ {
public: using StoreConfig::StoreConfig;
const Setting<std::string> compression{this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"}; const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
const Setting<bool> writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"}; const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<bool> writeDebugInfo{this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"}; const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
const Setting<Path> secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"}; const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"}; const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
const Setting<bool> parallelCompression{this, false, "parallel-compression", const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
"enable multi-threading compression, available for xz only currently"}; "enable multi-threading compression, available for xz only currently"};
};
class BinaryCacheStore : public Store, public virtual BinaryCacheStoreConfig
{
private: private:
@ -58,7 +62,7 @@ public:
public: public:
virtual void init(); virtual void init() override;
private: private:
@ -68,6 +72,10 @@ private:
void writeNarInfo(ref<NarInfo> narInfo); void writeNarInfo(ref<NarInfo> narInfo);
ref<const ValidPathInfo> addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo);
public: public:
bool isValidPathUncached(const StorePath & path) override; bool isValidPathUncached(const StorePath & path) override;
@ -81,6 +89,9 @@ public:
void addToStore(const ValidPathInfo & info, Source & narSource, void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
StorePath addToStore(const string & name, const Path & srcPath, StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override; PathFilter & filter, RepairFlag repair) override;

File diff suppressed because it is too large Load diff

View file

@ -4,11 +4,13 @@
namespace nix { namespace nix {
std::string FixedOutputHash::printMethodAlgo() const { std::string FixedOutputHash::printMethodAlgo() const
{
return makeFileIngestionPrefix(method) + printHashType(hash.type); return makeFileIngestionPrefix(method) + printHashType(hash.type);
} }
std::string makeFileIngestionPrefix(const FileIngestionMethod m) { std::string makeFileIngestionPrefix(const FileIngestionMethod m)
{
switch (m) { switch (m) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
return ""; return "";
@ -26,7 +28,8 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
+ hash.to_string(Base32, true); + hash.to_string(Base32, true);
} }
std::string renderContentAddress(ContentAddress ca) { std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded { return std::visit(overloaded {
[](TextHash th) { [](TextHash th) {
return "text:" + th.hash.to_string(Base32, true); return "text:" + th.hash.to_string(Base32, true);
@ -37,54 +40,97 @@ std::string renderContentAddress(ContentAddress ca) {
}, ca); }, ca);
} }
ContentAddress parseContentAddress(std::string_view rawCa) { std::string renderContentAddressMethod(ContentAddressMethod cam)
auto rest = rawCa; {
return std::visit(overloaded {
[](TextHashMethod &th) {
return std::string{"text:"} + printHashType(htSHA256);
},
[](FixedOutputHashMethod &fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
}
/*
Parses content address strings up to the hash.
*/
static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest)
{
std::string_view wholeInput { rest };
std::string_view prefix; std::string_view prefix;
{ {
auto optPrefix = splitPrefixTo(rest, ':'); auto optPrefix = splitPrefixTo(rest, ':');
if (!optPrefix) if (!optPrefix)
throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", rawCa); throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", wholeInput);
prefix = *optPrefix; prefix = *optPrefix;
} }
auto parseHashType_ = [&](){ auto parseHashType_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':'); auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw) if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", rawCa); throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
HashType hashType = parseHashType(*hashTypeRaw); HashType hashType = parseHashType(*hashTypeRaw);
return std::move(hashType); return std::move(hashType);
}; };
// Switch on prefix // Switch on prefix
if (prefix == "text") { if (prefix == "text") {
// No parsing of the method, "text" only support flat. // No parsing of the ingestion method, "text" only support flat.
HashType hashType = parseHashType_(); HashType hashType = parseHashType_();
if (hashType != htSHA256) if (hashType != htSHA256)
throw Error("text content address hash should use %s, but instead uses %s", throw Error("text content address hash should use %s, but instead uses %s",
printHashType(htSHA256), printHashType(hashType)); printHashType(htSHA256), printHashType(hashType));
return TextHash { return TextHashMethod {};
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
};
} else if (prefix == "fixed") { } else if (prefix == "fixed") {
// Parse method // Parse method
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:")) if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_(); HashType hashType = parseHashType_();
return FixedOutputHash { return FixedOutputHashMethod {
.method = method, .fileIngestionMethod = method,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)), .hashType = std::move(hashType),
}; };
} else } else
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto rest = rawCa;
ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest);
return std::visit(
overloaded {
[&](TextHashMethod thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
[&](FixedOutputHashMethod fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
});
},
}, caMethod);
}
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
{
std::string_view asPrefix {std::string{caMethod} + ":"};
return parseContentAddressMethodPrefix(asPrefix);
}
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
{
return rawCaOpt == "" ? std::optional<ContentAddress>() : parseContentAddress(rawCaOpt);
}; };
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) { std::string renderContentAddress(std::optional<ContentAddress> ca)
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt); {
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
return ca ? renderContentAddress(*ca) : ""; return ca ? renderContentAddress(*ca) : "";
} }

View file

@ -55,4 +55,23 @@ std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
Hash getContentAddressHash(const ContentAddress & ca); Hash getContentAddressHash(const ContentAddress & ca);
/*
We only have one way to hash text with references, so this is single-value
type is only useful in std::variant.
*/
struct TextHashMethod { };
struct FixedOutputHashMethod {
FileIngestionMethod fileIngestionMethod;
HashType hashType;
};
typedef std::variant<
TextHashMethod,
FixedOutputHashMethod
> ContentAddressMethod;
ContentAddressMethod parseContentAddressMethod(std::string_view rawCaMethod);
std::string renderContentAddressMethod(ContentAddressMethod caMethod);
} }

View file

@ -2,7 +2,6 @@
#include "monitor-fd.hh" #include "monitor-fd.hh"
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "store-api.hh" #include "store-api.hh"
#include "local-store.hh"
#include "finally.hh" #include "finally.hh"
#include "affinity.hh" #include "affinity.hh"
#include "archive.hh" #include "archive.hh"
@ -102,17 +101,20 @@ struct TunnelLogger : public Logger
/* stopWork() means that we're done; stop sending stderr to the /* stopWork() means that we're done; stop sending stderr to the
client. */ client. */
void stopWork(bool success = true, const string & msg = "", unsigned int status = 0) void stopWork(const Error * ex = nullptr)
{ {
auto state(state_.lock()); auto state(state_.lock());
state->canSendStderr = false; state->canSendStderr = false;
if (success) if (!ex)
to << STDERR_LAST; to << STDERR_LAST;
else { else {
to << STDERR_ERROR << msg; if (GET_PROTOCOL_MINOR(clientVersion) >= 26) {
if (status != 0) to << status; to << STDERR_ERROR << *ex;
} else {
to << STDERR_ERROR << ex->what() << ex->status;
}
} }
} }
@ -232,7 +234,7 @@ struct ClientSettings
else if (setSubstituters(settings.extraSubstituters)) else if (setSubstituters(settings.extraSubstituters))
; ;
else else
warn("ignoring the user-specified setting '%s', because it is a restricted setting and you are not a trusted user", name); debug("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
} catch (UsageError & e) { } catch (UsageError & e) {
warn(e.what()); warn(e.what());
} }
@ -240,6 +242,23 @@ struct ClientSettings
} }
}; };
static void writeValidPathInfo(
ref<Store> store,
unsigned int clientVersion,
Sink & to,
std::shared_ptr<const ValidPathInfo> info)
{
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
worker_proto::write(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
}
static void performOp(TunnelLogger * logger, ref<Store> store, static void performOp(TunnelLogger * logger, ref<Store> store,
TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion, TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion,
Source & from, BufferedSink & to, unsigned int op) Source & from, BufferedSink & to, unsigned int op)
@ -256,11 +275,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopQueryValidPaths: { case wopQueryValidPaths: {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto res = store->queryValidPaths(paths); auto res = store->queryValidPaths(paths);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, res); worker_proto::write(*store, to, res);
break; break;
} }
@ -276,11 +295,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopQuerySubstitutablePaths: { case wopQuerySubstitutablePaths: {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto res = store->querySubstitutablePaths(paths); auto res = store->querySubstitutablePaths(paths);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, res); worker_proto::write(*store, to, res);
break; break;
} }
@ -309,7 +328,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
paths = store->queryValidDerivers(path); paths = store->queryValidDerivers(path);
else paths = store->queryDerivationOutputs(path); else paths = store->queryDerivationOutputs(path);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, paths); worker_proto::write(*store, to, paths);
break; break;
} }
@ -350,54 +369,90 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopAddToStore: { case wopAddToStore: {
HashType hashAlgo; if (GET_PROTOCOL_MINOR(clientVersion) >= 25) {
std::string baseName; auto name = readString(from);
FileIngestionMethod method; auto camStr = readString(from);
{ auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
bool fixed; bool repairBool;
uint8_t recursive; from >> repairBool;
std::string hashAlgoRaw; auto repair = RepairFlag{repairBool};
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive) logger->startWork();
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive); auto pathInfo = [&]() {
method = FileIngestionMethod { recursive }; // NB: FramedSource must be out of scope before logger->stopWork();
/* Compatibility hack. */ ContentAddressMethod contentAddressMethod = parseContentAddressMethod(camStr);
if (!fixed) { FramedSource source(from);
hashAlgoRaw = "sha256"; // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
method = FileIngestionMethod::Recursive; return std::visit(overloaded {
[&](TextHashMethod &_) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](FixedOutputHashMethod &fohm) {
if (!refs.empty())
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
return store->queryPathInfo(path);
},
}, contentAddressMethod);
}();
logger->stopWork();
to << store->printStorePath(pathInfo->path);
writeValidPathInfo(store, clientVersion, to, pathInfo);
} else {
HashType hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
bool fixed;
uint8_t recursive;
std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
hashAlgo = parseHashType(hashAlgoRaw);
} }
hashAlgo = parseHashType(hashAlgoRaw);
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
} }
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
break; break;
} }
case wopAddTextToStore: { case wopAddTextToStore: {
string suffix = readString(from); string suffix = readString(from);
string s = readString(from); string s = readString(from);
auto refs = readStorePaths<StorePathSet>(*store, from); auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto path = store->addTextToStore(suffix, s, refs, NoRepair); auto path = store->addTextToStore(suffix, s, refs, NoRepair);
logger->stopWork(); logger->stopWork();
@ -494,6 +549,20 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
are in fact content-addressed if we don't trust them. */ are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted); assert(derivationIsCA(drv.type()) || trusted);
/* Recompute the derivation path when we cannot trust the original. */
if (!trusted) {
/* Recomputing the derivation path for input-address derivations
makes it harder to audit them after the fact, since we need the
original not-necessarily-resolved derivation to verify the drv
derivation as adequate claim to the input-addressed output
paths. */
assert(derivationIsCA(drv.type()));
Derivation drv2;
static_cast<BasicDerivation &>(drv2) = drv;
drvPath = writeDerivation(*store, Derivation { drv2 });
}
auto res = store->buildDerivation(drvPath, drv, buildMode); auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork(); logger->stopWork();
to << res.status << res.errorMsg; to << res.status << res.errorMsg;
@ -556,7 +625,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopCollectGarbage: { case wopCollectGarbage: {
GCOptions options; GCOptions options;
options.action = (GCOptions::GCAction) readInt(from); options.action = (GCOptions::GCAction) readInt(from);
options.pathsToDelete = readStorePaths<StorePathSet>(*store, from); options.pathsToDelete = worker_proto::read(*store, from, Phantom<StorePathSet> {});
from >> options.ignoreLiveness >> options.maxFreed; from >> options.ignoreLiveness >> options.maxFreed;
// obsolete fields // obsolete fields
readInt(from); readInt(from);
@ -625,7 +694,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
else { else {
to << 1 to << 1
<< (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); << (i->second.deriver ? store->printStorePath(*i->second.deriver) : "");
writeStorePaths(*store, to, i->second.references); worker_proto::write(*store, to, i->second.references);
to << i->second.downloadSize to << i->second.downloadSize
<< i->second.narSize; << i->second.narSize;
} }
@ -636,11 +705,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
SubstitutablePathInfos infos; SubstitutablePathInfos infos;
StorePathCAMap pathsMap = {}; StorePathCAMap pathsMap = {};
if (GET_PROTOCOL_MINOR(clientVersion) < 22) { if (GET_PROTOCOL_MINOR(clientVersion) < 22) {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
for (auto & path : paths) for (auto & path : paths)
pathsMap.emplace(path, std::nullopt); pathsMap.emplace(path, std::nullopt);
} else } else
pathsMap = readStorePathCAMap(*store, from); pathsMap = worker_proto::read(*store, from, Phantom<StorePathCAMap> {});
logger->startWork(); logger->startWork();
store->querySubstitutablePathInfos(pathsMap, infos); store->querySubstitutablePathInfos(pathsMap, infos);
logger->stopWork(); logger->stopWork();
@ -648,7 +717,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
for (auto & i : infos) { for (auto & i : infos) {
to << store->printStorePath(i.first) to << store->printStorePath(i.first)
<< (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); << (i.second.deriver ? store->printStorePath(*i.second.deriver) : "");
writeStorePaths(*store, to, i.second.references); worker_proto::write(*store, to, i.second.references);
to << i.second.downloadSize << i.second.narSize; to << i.second.downloadSize << i.second.narSize;
} }
break; break;
@ -658,7 +727,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto paths = store->queryAllValidPaths(); auto paths = store->queryAllValidPaths();
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, paths); worker_proto::write(*store, to, paths);
break; break;
} }
@ -675,15 +744,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (info) { if (info) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 17) if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1; to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "") writeValidPathInfo(store, clientVersion, to, info);
<< info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
} else { } else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17); assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
to << 0; to << 0;
@ -738,7 +799,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
ValidPathInfo info { path, narHash }; ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.references = readStorePaths<StorePathSet>(*store, from); info.references = worker_proto::read(*store, from, Phantom<StorePathSet> {});
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
info.ca = parseContentAddressOpt(readString(from)); info.ca = parseContentAddressOpt(readString(from));
@ -749,59 +810,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.ultimate = false; info.ultimate = false;
if (GET_PROTOCOL_MINOR(clientVersion) >= 23) { if (GET_PROTOCOL_MINOR(clientVersion) >= 23) {
struct FramedSource : Source
{
Source & from;
bool eof = false;
std::vector<unsigned char> pending;
size_t pos = 0;
FramedSource(Source & from) : from(from)
{ }
~FramedSource()
{
if (!eof) {
while (true) {
auto n = readInt(from);
if (!n) break;
std::vector<unsigned char> data(n);
from(data.data(), n);
}
}
}
size_t read(unsigned char * data, size_t len) override
{
if (eof) throw EndOfFile("reached end of FramedSource");
if (pos >= pending.size()) {
size_t len = readInt(from);
if (!len) {
eof = true;
return 0;
}
pending = std::vector<unsigned char>(len);
pos = 0;
from(pending.data(), len);
}
auto n = std::min(len, pending.size() - pos);
memcpy(data, pending.data() + pos, n);
pos += n;
return n;
}
};
logger->startWork(); logger->startWork();
{ {
FramedSource source(from); FramedSource source(from);
store->addToStore(info, source, (RepairFlag) repair, store->addToStore(info, source, (RepairFlag) repair,
dontCheckSigs ? NoCheckSigs : CheckSigs); dontCheckSigs ? NoCheckSigs : CheckSigs);
} }
logger->stopWork(); logger->stopWork();
} }
@ -838,9 +852,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
uint64_t downloadSize, narSize; uint64_t downloadSize, narSize;
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, willBuild); worker_proto::write(*store, to, willBuild);
writeStorePaths(*store, to, willSubstitute); worker_proto::write(*store, to, willSubstitute);
writeStorePaths(*store, to, unknown); worker_proto::write(*store, to, unknown);
to << downloadSize << narSize; to << downloadSize << narSize;
break; break;
} }
@ -924,10 +938,11 @@ void processConnection(
during addTextToStore() / importPath(). If that during addTextToStore() / importPath(). If that
happens, just send the error message and exit. */ happens, just send the error message and exit. */
bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr; bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr;
tunnelLogger->stopWork(false, e.msg(), e.status); tunnelLogger->stopWork(&e);
if (!errorAllowed) throw; if (!errorAllowed) throw;
} catch (std::bad_alloc & e) { } catch (std::bad_alloc & e) {
tunnelLogger->stopWork(false, "Nix daemon out of memory", 1); auto ex = Error("Nix daemon out of memory");
tunnelLogger->stopWork(&ex);
throw; throw;
} }
@ -936,8 +951,13 @@ void processConnection(
assert(!tunnelLogger->state_.lock()->canSendStderr); assert(!tunnelLogger->state_.lock()->canSendStderr);
}; };
} catch (Error & e) {
tunnelLogger->stopWork(&e);
to.flush();
return;
} catch (std::exception & e) { } catch (std::exception & e) {
tunnelLogger->stopWork(false, e.what(), 1); auto ex = Error(e.what());
tunnelLogger->stopWork(&ex);
to.flush(); to.flush();
return; return;
} }

View file

@ -1,3 +1,5 @@
#pragma once
#include "serialise.hh" #include "serialise.hh"
#include "store-api.hh" #include "store-api.hh"

View file

@ -7,7 +7,7 @@
namespace nix { namespace nix {
std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName) const std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{ {
return std::visit(overloaded { return std::visit(overloaded {
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> { [](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
@ -15,7 +15,7 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
}, },
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> { [&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
return { return {
store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName) dof.path(store, drvName, outputName)
}; };
}, },
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> { [](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
@ -25,6 +25,13 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
} }
StorePath DerivationOutputCAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const {
return store.makeFixedOutputPath(
hash.method, hash.hash,
outputPathName(drvName, outputName));
}
bool derivationIsCA(DerivationType dt) { bool derivationIsCA(DerivationType dt) {
switch (dt) { switch (dt) {
case DerivationType::InputAddressed: return false; case DerivationType::InputAddressed: return false;
@ -62,7 +69,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair) const Derivation & drv, RepairFlag repair, bool readOnly)
{ {
auto references = drv.inputSrcs; auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs) for (auto & i : drv.inputDrvs)
@ -72,7 +79,7 @@ StorePath writeDerivation(Store & store,
held during a garbage collection). */ held during a garbage collection). */
auto suffix = std::string(drv.name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false); auto contents = drv.unparse(store, false);
return settings.readOnlyMode return readOnly || settings.readOnlyMode
? store.computeStorePathForText(suffix, contents, references) ? store.computeStorePathForText(suffix, contents, references)
: store.addTextToStore(suffix, contents, references, repair); : store.addTextToStore(suffix, contents, references, repair);
} }
@ -106,12 +113,15 @@ static string parseString(std::istream & str)
return res; return res;
} }
static void validatePath(std::string_view s) {
if (s.size() == 0 || s[0] != '/')
throw FormatError("bad path '%1%' in derivation", s);
}
static Path parsePath(std::istream & str) static Path parsePath(std::istream & str)
{ {
string s = parseString(str); auto s = parseString(str);
if (s.size() == 0 || s[0] != '/') validatePath(s);
throw FormatError("bad path '%1%' in derivation", s);
return s; return s;
} }
@ -140,7 +150,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
static DerivationOutput parseDerivationOutput(const Store & store, static DerivationOutput parseDerivationOutput(const Store & store,
StorePath path, std::string_view hashAlgo, std::string_view hash) std::string_view pathS, std::string_view hashAlgo, std::string_view hash)
{ {
if (hashAlgo != "") { if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat; auto method = FileIngestionMethod::Flat;
@ -148,40 +158,45 @@ static DerivationOutput parseDerivationOutput(const Store & store,
method = FileIngestionMethod::Recursive; method = FileIngestionMethod::Recursive;
hashAlgo = hashAlgo.substr(2); hashAlgo = hashAlgo.substr(2);
} }
const HashType hashType = parseHashType(hashAlgo); const auto hashType = parseHashType(hashAlgo);
if (hash != "") {
return hash != "" validatePath(pathS);
? DerivationOutput { return DerivationOutput {
.output = DerivationOutputCAFixed { .output = DerivationOutputCAFixed {
.hash = FixedOutputHash { .hash = FixedOutputHash {
.method = std::move(method), .method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed(hash, hashType), .hash = Hash::parseNonSRIUnprefixed(hash, hashType),
}, },
} },
} };
: (settings.requireExperimentalFeature("ca-derivations"), } else {
DerivationOutput { settings.requireExperimentalFeature("ca-derivations");
.output = DerivationOutputCAFloating { assert(pathS == "");
.method = std::move(method), return DerivationOutput {
.hashType = std::move(hashType), .output = DerivationOutputCAFloating {
}, .method = std::move(method),
}); .hashType = std::move(hashType),
} else },
};
}
} else {
validatePath(pathS);
return DerivationOutput { return DerivationOutput {
.output = DerivationOutputInputAddressed { .output = DerivationOutputInputAddressed {
.path = std::move(path), .path = store.parseStorePath(pathS),
} }
}; };
}
} }
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str) static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
{ {
expect(str, ","); auto path = store.parseStorePath(parsePath(str)); expect(str, ","); const auto pathS = parseString(str);
expect(str, ","); const auto hashAlgo = parseString(str); expect(str, ","); const auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str); expect(str, ","); const auto hash = parseString(str);
expect(str, ")"); expect(str, ")");
return parseDerivationOutput(store, std::move(path), hashAlgo, hash); return parseDerivationOutput(store, pathS, hashAlgo, hash);
} }
@ -294,17 +309,19 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
for (auto & i : outputs) { for (auto & i : outputs) {
if (first) first = false; else s += ','; if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first); s += '('; printUnquotedString(s, i.first);
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name)));
std::visit(overloaded { std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) { [&](DerivationOutputInputAddressed doi) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
}, },
[&](DerivationOutputCAFixed dof) { [&](DerivationOutputCAFixed dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo()); s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false)); s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
}, },
[&](DerivationOutputCAFloating dof) { [&](DerivationOutputCAFloating dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)); s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, ""); s += ','; printUnquotedString(s, "");
}, },
@ -360,6 +377,16 @@ bool isDerivation(const string & fileName)
} }
std::string outputPathName(std::string_view drvName, std::string_view outputName) {
std::string res { drvName };
if (outputName != "out") {
res += "-";
res += outputName;
}
return res;
}
DerivationType BasicDerivation::type() const DerivationType BasicDerivation::type() const
{ {
std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs; std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs;
@ -452,12 +479,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations"); throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: { case DerivationType::CAFixed: {
std::map<std::string, Hash> outputHashes; std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputsAndPaths(store)) { for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output); auto & dof = std::get<DerivationOutputCAFixed>(i.second.output);
auto hash = hashString(htSHA256, "fixed:out:" auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":" + dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":" + dof.hash.hash.to_string(Base16, false) + ":"
+ store.printStorePath(i.second.second)); + store.printStorePath(dof.path(store, drv.name, i.first)));
outputHashes.insert_or_assign(i.first, std::move(hash)); outputHashes.insert_or_assign(i.first, std::move(hash));
} }
return outputHashes; return outputHashes;
@ -508,21 +535,13 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
} }
StorePathSet BasicDerivation::outputPaths(const Store & store) const
{
StorePathSet paths;
for (auto & i : outputsAndPaths(store))
paths.insert(i.second.second);
return paths;
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store) static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{ {
auto path = store.parseStorePath(readString(in)); const auto pathS = readString(in);
const auto hashAlgo = readString(in); const auto hashAlgo = readString(in);
const auto hash = readString(in); const auto hash = readString(in);
return parseDerivationOutput(store, std::move(path), hashAlgo, hash); return parseDerivationOutput(store, pathS, hashAlgo, hash);
} }
StringSet BasicDerivation::outputNames() const StringSet BasicDerivation::outputNames() const
@ -533,23 +552,12 @@ StringSet BasicDerivation::outputNames() const
return names; return names;
} }
DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
DerivationOutputsAndPaths outsAndPaths;
for (auto output : outputs)
outsAndPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.path(store, name))
)
);
return outsAndPaths;
}
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const { DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
DerivationOutputsAndOptPaths outsAndOptPaths; DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto output : outputs) for (auto output : outputs)
outsAndOptPaths.insert(std::make_pair( outsAndOptPaths.insert(std::make_pair(
output.first, output.first,
std::make_pair(output.second, output.second.pathOpt(store, output.first)) std::make_pair(output.second, output.second.path(store, name, output.first))
) )
); );
return outsAndOptPaths; return outsAndOptPaths;
@ -576,7 +584,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
drv.outputs.emplace(std::move(name), std::move(output)); drv.outputs.emplace(std::move(name), std::move(output));
} }
drv.inputSrcs = readStorePaths<StorePathSet>(store, in); drv.inputSrcs = worker_proto::read(store, in, Phantom<StorePathSet> {});
in >> drv.platform >> drv.builder; in >> drv.platform >> drv.builder;
drv.args = readStrings<Strings>(in); drv.args = readStrings<Strings>(in);
@ -594,24 +602,27 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv) void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{ {
out << drv.outputs.size(); out << drv.outputs.size();
for (auto & i : drv.outputsAndPaths(store)) { for (auto & i : drv.outputs) {
out << i.first out << i.first;
<< store.printStorePath(i.second.second);
std::visit(overloaded { std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) { [&](DerivationOutputInputAddressed doi) {
out << "" << ""; out << store.printStorePath(doi.path)
<< ""
<< "";
}, },
[&](DerivationOutputCAFixed dof) { [&](DerivationOutputCAFixed dof) {
out << dof.hash.printMethodAlgo() out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false); << dof.hash.hash.to_string(Base16, false);
}, },
[&](DerivationOutputCAFloating dof) { [&](DerivationOutputCAFloating dof) {
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType)) out << ""
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< ""; << "";
}, },
}, i.second.first.output); }, i.second.output);
} }
writeStorePaths(store, out, drv.inputSrcs); worker_proto::write(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;
out << drv.env.size(); out << drv.env.size();
for (auto & i : drv.env) for (auto & i : drv.env)
@ -625,5 +636,65 @@ std::string hashPlaceholder(const std::string & outputName)
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false); return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
} }
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
{
auto drvNameWithExtension = drvPath.name();
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
}
// N.B. Outputs are left unchanged
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
debug("Rewriting the derivation");
for (auto &rewrite: rewrites) {
debug("rewriting %s as %s", rewrite.first, rewrite.second);
}
drv.builder = rewriteStrings(drv.builder, rewrites);
for (auto & arg: drv.args) {
arg = rewriteStrings(arg, rewrites);
}
StringPairs newEnv;
for (auto & envVar: drv.env) {
auto envName = rewriteStrings(envVar.first, rewrites);
auto envValue = rewriteStrings(envVar.second, rewrites);
newEnv.emplace(envName, envValue);
}
drv.env = newEnv;
}
Sync<DrvPathResolutions> drvPathResolutions;
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
BasicDerivation resolved { *this };
// Input paths that we'll want to rewrite in the derivation
StringMap inputRewrites;
for (auto & input : inputDrvs) {
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first);
StringSet newOutputNames;
for (auto & outputName : input.second) {
auto actualPathOpt = inputDrvOutputs.at(outputName);
if (!actualPathOpt)
return std::nullopt;
auto actualPath = *actualPathOpt;
inputRewrites.emplace(
downstreamPlaceholder(store, input.first, outputName),
store.printStorePath(actualPath));
resolved.inputSrcs.insert(std::move(actualPath));
}
}
rewriteDerivation(store, resolved, inputRewrites);
return resolved;
}
} }

View file

@ -4,6 +4,7 @@
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
#include "sync.hh"
#include <map> #include <map>
#include <variant> #include <variant>
@ -27,6 +28,7 @@ struct DerivationOutputInputAddressed
struct DerivationOutputCAFixed struct DerivationOutputCAFixed
{ {
FixedOutputHash hash; /* hash used for expected hash computation */ FixedOutputHash hash; /* hash used for expected hash computation */
StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
}; };
/* Floating-output derivations, whose output paths are content addressed, but /* Floating-output derivations, whose output paths are content addressed, but
@ -49,14 +51,8 @@ struct DerivationOutput
std::optional<HashType> hashAlgoOpt(const Store & store) const; std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing /* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndPaths */ interface provided by BasicDerivation::outputsAndOptPaths */
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const; std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const {
auto p = pathOpt(store, drvName);
if (!p) throw UnimplementedError("floating content-addressed derivations are not yet implemented");
return *p;
}
}; };
typedef std::map<string, DerivationOutput> DerivationOutputs; typedef std::map<string, DerivationOutput> DerivationOutputs;
@ -65,8 +61,6 @@ typedef std::map<string, DerivationOutput> DerivationOutputs;
also contains, for each output, the (optional) store path in which it would also contains, for each output, the (optional) store path in which it would
be written. To calculate values of these types, see the corresponding be written. To calculate values of these types, see the corresponding
functions in BasicDerivation */ functions in BasicDerivation */
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
DerivationOutputsAndPaths;
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>> typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
DerivationOutputsAndOptPaths; DerivationOutputsAndOptPaths;
@ -105,7 +99,7 @@ struct BasicDerivation
StringPairs env; StringPairs env;
std::string name; std::string name;
BasicDerivation() { } BasicDerivation() = default;
virtual ~BasicDerivation() { }; virtual ~BasicDerivation() { };
bool isBuiltin() const; bool isBuiltin() const;
@ -113,17 +107,12 @@ struct BasicDerivation
/* Return true iff this is a fixed-output derivation. */ /* Return true iff this is a fixed-output derivation. */
DerivationType type() const; DerivationType type() const;
/* Return the output paths of a derivation. */
StorePathSet outputPaths(const Store & store) const;
/* Return the output names of a derivation. */ /* Return the output names of a derivation. */
StringSet outputNames() const; StringSet outputNames() const;
/* Calculates the maps that contains all the DerivationOutputs, but /* Calculates the maps that contains all the DerivationOutputs, but
augmented with knowledge of the Store paths they would be written into. augmented with knowledge of the Store paths they would be written
The first one of these functions will be removed when the CA work is into. */
completed */
DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const; DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath); static std::string_view nameFromPath(const StorePath & storePath);
@ -137,7 +126,17 @@ struct Derivation : BasicDerivation
std::string unparse(const Store & store, bool maskOutputs, std::string unparse(const Store & store, bool maskOutputs,
std::map<std::string, StringSet> * actualInputs = nullptr) const; std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { } /* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
Derivation() = default;
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
}; };
@ -147,7 +146,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair = NoRepair); const Derivation & drv,
RepairFlag repair = NoRepair,
bool readOnly = false);
/* Read a derivation from a file. */ /* Read a derivation from a file. */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
@ -155,6 +156,13 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
// FIXME: remove // FIXME: remove
bool isDerivation(const string & fileName); bool isDerivation(const string & fileName);
/* Calculate the name that will be used for the store path for this
output.
This is usually <drv-name>-<output-name>, but is just <drv-name> when
the output name is "out". */
std::string outputPathName(std::string_view drvName, std::string_view outputName);
// known CA drv's output hashes, current just for fixed-output derivations // known CA drv's output hashes, current just for fixed-output derivations
// whose output hashes are always known since they are fixed up-front. // whose output hashes are always known since they are fixed up-front.
typedef std::map<std::string, Hash> CaOutputHashes; typedef std::map<std::string, Hash> CaOutputHashes;
@ -194,6 +202,16 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
extern DrvHashes drvHashes; // FIXME: global, not thread-safe extern DrvHashes drvHashes; // FIXME: global, not thread-safe
/* Memoisation of `readDerivation(..).resove()`. */
typedef std::map<
StorePath,
std::optional<StorePath>
> DrvPathResolutions;
// FIXME: global, though at least thread-safe.
// FIXME: arguably overlaps with hashDerivationModulo memo table.
extern Sync<DrvPathResolutions> drvPathResolutions;
bool wantOutput(const string & output, const std::set<string> & wanted); bool wantOutput(const string & output, const std::set<string> & wanted);
struct Source; struct Source;
@ -202,6 +220,21 @@ struct Sink;
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name); Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv); void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
/* This creates an opaque and almost certainly unique string
deterministically from the output name.
It is used as a placeholder to allow derivations to refer to their
own outputs without needing to use the hash of a derivation in
itself, making the hash near-impossible to calculate. */
std::string hashPlaceholder(const std::string & outputName); std::string hashPlaceholder(const std::string & outputName);
/* This creates an opaque and almost certainly unique string
deterministically from a derivation path and output name.
It is used as a placeholder to allow derivations to refer to
content-addressed paths whose content --- and thus the path
themselves --- isn't yet known. This occurs when a derivation has a
dependency which is a CA derivation. */
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
} }

View file

@ -1,18 +1,28 @@
#include "store-api.hh" #include "store-api.hh"
#include "callback.hh"
namespace nix { namespace nix {
static std::string uriScheme = "dummy://"; struct DummyStoreConfig : virtual StoreConfig {
using StoreConfig::StoreConfig;
struct DummyStore : public Store const std::string name() override { return "Dummy Store"; }
};
struct DummyStore : public Store, public virtual DummyStoreConfig
{ {
DummyStore(const std::string scheme, const std::string uri, const Params & params)
: DummyStore(params)
{ }
DummyStore(const Params & params) DummyStore(const Params & params)
: Store(params) : StoreConfig(params)
, Store(params)
{ } { }
string getUri() override string getUri() override
{ {
return uriScheme; return *uriSchemes().begin();
} }
void queryPathInfoUncached(const StorePath & path, void queryPathInfoUncached(const StorePath & path,
@ -21,6 +31,10 @@ struct DummyStore : public Store
callback(nullptr); callback(nullptr);
} }
static std::set<std::string> uriSchemes() {
return {"dummy"};
}
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ unsupported("queryPathFromHashPart"); } { unsupported("queryPathFromHashPart"); }
@ -48,12 +62,6 @@ struct DummyStore : public Store
{ unsupported("buildDerivation"); } { unsupported("buildDerivation"); }
}; };
static RegisterStoreImplementation regStore([]( static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (uri != uriScheme) return nullptr;
return std::make_shared<DummyStore>(params);
});
} }

View file

@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
teeSink teeSink
<< exportMagic << exportMagic
<< printStorePath(path); << printStorePath(path);
writeStorePaths(*this, teeSink, info->references); worker_proto::write(*this, teeSink, info->references);
teeSink teeSink
<< (info->deriver ? printStorePath(*info->deriver) : "") << (info->deriver ? printStorePath(*info->deriver) : "")
<< 0; << 0;
@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
auto references = readStorePaths<StorePathSet>(*this, source); auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
auto deriver = readString(source); auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s); auto narHash = hashString(htSHA256, *saved.s);

View file

@ -5,6 +5,7 @@
#include "s3.hh" #include "s3.hh"
#include "compression.hh" #include "compression.hh"
#include "finally.hh" #include "finally.hh"
#include "callback.hh"
#ifdef ENABLE_S3 #ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h> #include <aws/core/client/ClientConfiguration.h>
@ -30,7 +31,7 @@ namespace nix {
FileTransferSettings fileTransferSettings; FileTransferSettings fileTransferSettings;
static GlobalConfig::Register r1(&fileTransferSettings); static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
std::string resolveUri(const std::string & uri) std::string resolveUri(const std::string & uri)
{ {
@ -112,6 +113,9 @@ struct curlFileTransfer : public FileTransfer
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
if (!request.mimeType.empty()) if (!request.mimeType.empty())
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
for (auto it = request.headers.begin(); it != request.headers.end(); ++it){
requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str());
}
} }
~TransferItem() ~TransferItem()

View file

@ -51,6 +51,7 @@ extern FileTransferSettings fileTransferSettings;
struct FileTransferRequest struct FileTransferRequest
{ {
std::string uri; std::string uri;
Headers headers;
std::string expectedETag; std::string expectedETag;
bool verifyTLS = true; bool verifyTLS = true;
bool head = false; bool head = false;

View file

@ -574,9 +574,12 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
/* If keep-derivations is set and this is a derivation, then /* If keep-derivations is set and this is a derivation, then
don't delete the derivation if any of the outputs are alive. */ don't delete the derivation if any of the outputs are alive. */
if (state.gcKeepDerivations && path.isDerivation()) { if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & i : queryDerivationOutputs(path)) for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path) if (maybeOutPath &&
incoming.insert(i); isValidPath(*maybeOutPath) &&
queryPathInfo(*maybeOutPath)->deriver == path
)
incoming.insert(*maybeOutPath);
} }
/* If keep-outputs is set, then don't delete this path if there /* If keep-outputs is set, then don't delete this path if there
@ -660,9 +663,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
if (name == "." || name == "..") continue; if (name == "." || name == "..") continue;
Path path = linksDir + "/" + name; Path path = linksDir + "/" + name;
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st) == -1)
throw SysError("statting '%1%'", path);
if (st.st_nlink != 1) { if (st.st_nlink != 1) {
actualSize += st.st_size; actualSize += st.st_size;

View file

@ -2,6 +2,7 @@
#include "util.hh" #include "util.hh"
#include "archive.hh" #include "archive.hh"
#include "args.hh" #include "args.hh"
#include "abstract-setting-to-json.hh"
#include <algorithm> #include <algorithm>
#include <map> #include <map>
@ -25,7 +26,7 @@ namespace nix {
Settings settings; Settings settings;
static GlobalConfig::Register r1(&settings); static GlobalConfig::Register rSettings(&settings);
Settings::Settings() Settings::Settings()
: nixPrefix(NIX_PREFIX) : nixPrefix(NIX_PREFIX)
@ -42,6 +43,7 @@ Settings::Settings()
{ {
buildUsersGroup = getuid() == 0 ? "nixbld" : ""; buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1"; lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
allowSymlinkedStore = getEnv("NIX_IGNORE_SYMLINK_STORE") == "1";
caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or("")); caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
if (caFile == "") { if (caFile == "") {
@ -147,6 +149,12 @@ bool Settings::isWSL1()
const string nixVersion = PACKAGE_VERSION; const string nixVersion = PACKAGE_VERSION;
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
{SandboxMode::smEnabled, true},
{SandboxMode::smRelaxed, "relaxed"},
{SandboxMode::smDisabled, false},
});
template<> void BaseSetting<SandboxMode>::set(const std::string & str) template<> void BaseSetting<SandboxMode>::set(const std::string & str)
{ {
if (str == "true") value = smEnabled; if (str == "true") value = smEnabled;
@ -163,11 +171,6 @@ template<> std::string BaseSetting<SandboxMode>::to_string() const
else abort(); else abort();
} }
template<> nlohmann::json BaseSetting<SandboxMode>::toJSON()
{
return AbstractSetting::toJSON();
}
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category) template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
{ {
args.addFlag({ args.addFlag({

View file

@ -859,8 +859,54 @@ public:
are loaded as plugins (non-recursively). are loaded as plugins (non-recursively).
)"}; )"};
Setting<std::string> githubAccessToken{this, "", "github-access-token", Setting<StringMap> accessTokens{this, {}, "access-tokens",
"GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."}; R"(
Access tokens used to access protected GitHub, GitLab, or
other locations requiring token-based authentication.
Access tokens are specified as a string made up of
space-separated `host=token` values. The specific token
used is selected by matching the `host` portion against the
"host" specification of the input. The actual use of the
`token` value is determined by the type of resource being
accessed:
* Github: the token value is the OAUTH-TOKEN string obtained
as the Personal Access Token from the Github server (see
https://docs.github.com/en/developers/apps/authorizing-oath-apps).
* Gitlab: the token value is either the OAuth2 token or the
Personal Access Token (these are different types tokens
for gitlab, see
https://docs.gitlab.com/12.10/ee/api/README.html#authentication).
The `token` value should be `type:tokenstring` where
`type` is either `OAuth2` or `PAT` to indicate which type
of token is being specified.
Example `~/.config/nix/nix.conf`:
```
access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk"
```
Example `~/code/flake.nix`:
```nix
input.foo = {
type = "gitlab";
host = "gitlab.mycompany.com";
owner = "mycompany";
repo = "pro";
};
```
This example specifies three tokens, one each for accessing
github.com, gitlab.mycompany.com, and sourceforge.net.
The `input.foo` uses the "gitlab" fetcher, which might
requires specifying the token type along with the token
value.
)"};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features", Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."}; "Experimental Nix features to enable."};
@ -880,6 +926,19 @@ public:
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry", Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."}; "Path or URI of the global flake registry."};
Setting<bool> allowSymlinkedStore{
this, false, "allow-symlinked-store",
R"(
If set to `true`, Nix will stop complaining if the store directory
(typically /nix/store) contains symlink components.
This risks making some builds "impure" because builders sometimes
"canonicalise" paths by resolving all symlink components. Problems
occur if those builds are then deployed to machines where /nix/store
resolves to a different location from that of the build machine. You
can enable this setting if you are sure you're not going to do that.
)"};
}; };

View file

@ -2,12 +2,20 @@
#include "filetransfer.hh" #include "filetransfer.hh"
#include "globals.hh" #include "globals.hh"
#include "nar-info-disk-cache.hh" #include "nar-info-disk-cache.hh"
#include "callback.hh"
namespace nix { namespace nix {
MakeError(UploadToHTTP, Error); MakeError(UploadToHTTP, Error);
class HttpBinaryCacheStore : public BinaryCacheStore struct HttpBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const std::string name() override { return "Http Binary Cache Store"; }
};
class HttpBinaryCacheStore : public BinaryCacheStore, public HttpBinaryCacheStoreConfig
{ {
private: private:
@ -24,9 +32,12 @@ private:
public: public:
HttpBinaryCacheStore( HttpBinaryCacheStore(
const Params & params, const Path & _cacheUri) const std::string & scheme,
: BinaryCacheStore(params) const Path & _cacheUri,
, cacheUri(_cacheUri) const Params & params)
: StoreConfig(params)
, BinaryCacheStore(params)
, cacheUri(scheme + "://" + _cacheUri)
{ {
if (cacheUri.back() == '/') if (cacheUri.back() == '/')
cacheUri.pop_back(); cacheUri.pop_back();
@ -55,6 +66,14 @@ public:
} }
} }
static std::set<std::string> uriSchemes()
{
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
auto ret = std::set<std::string>({"http", "https"});
if (forceHttp) ret.insert("file");
return ret;
}
protected: protected:
void maybeDisable() void maybeDisable()
@ -162,18 +181,6 @@ protected:
}; };
static RegisterStoreImplementation regStore([]( static RegisterStoreImplementation<HttpBinaryCacheStore, HttpBinaryCacheStoreConfig> regHttpBinaryCacheStore;
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
if (std::string(uri, 0, 7) != "http://" &&
std::string(uri, 0, 8) != "https://" &&
(!forceHttp || std::string(uri, 0, 7) != "file://"))
return 0;
auto store = std::make_shared<HttpBinaryCacheStore>(params, uri);
store->init();
return store;
});
} }

View file

@ -6,21 +6,28 @@
#include "worker-protocol.hh" #include "worker-protocol.hh"
#include "ssh.hh" #include "ssh.hh"
#include "derivations.hh" #include "derivations.hh"
#include "callback.hh"
namespace nix { namespace nix {
static std::string uriScheme = "ssh://"; struct LegacySSHStoreConfig : virtual StoreConfig
struct LegacySSHStore : public Store
{ {
const Setting<int> maxConnections{this, 1, "max-connections", "maximum number of concurrent SSH connections"}; using StoreConfig::StoreConfig;
const Setting<Path> sshKey{this, "", "ssh-key", "path to an SSH private key"}; const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
const Setting<bool> compress{this, false, "compress", "whether to compress the connection"}; const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
const Setting<Path> remoteProgram{this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"}; const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<std::string> remoteStore{this, "", "remote-store", "URI of the store on the remote system"}; const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
const std::string name() override { return "Legacy SSH Store"; }
};
struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
{
// Hack for getting remote build log output. // Hack for getting remote build log output.
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
// the documentation
const Setting<int> logFD{(StoreConfig*) this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
struct Connection struct Connection
{ {
@ -37,8 +44,11 @@ struct LegacySSHStore : public Store
SSHMaster master; SSHMaster master;
LegacySSHStore(const string & host, const Params & params) static std::set<std::string> uriSchemes() { return {"ssh"}; }
: Store(params)
LegacySSHStore(const string & scheme, const string & host, const Params & params)
: StoreConfig(params)
, Store(params)
, host(host) , host(host)
, connections(make_ref<Pool<Connection>>( , connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections), std::max(1, (int) maxConnections),
@ -84,7 +94,7 @@ struct LegacySSHStore : public Store
string getUri() override string getUri() override
{ {
return uriScheme + host; return *uriSchemes().begin() + "://" + host;
} }
void queryPathInfoUncached(const StorePath & path, void queryPathInfoUncached(const StorePath & path,
@ -112,7 +122,7 @@ struct LegacySSHStore : public Store
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
if (deriver != "") if (deriver != "")
info->deriver = parseStorePath(deriver); info->deriver = parseStorePath(deriver);
info->references = readStorePaths<StorePathSet>(*this, conn->from); info->references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
readLongLong(conn->from); // download size readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from); info->narSize = readLongLong(conn->from);
@ -146,7 +156,7 @@ struct LegacySSHStore : public Store
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references); worker_proto::write(*this, conn->to, info.references);
conn->to conn->to
<< info.registrationTime << info.registrationTime
<< info.narSize << info.narSize
@ -175,7 +185,7 @@ struct LegacySSHStore : public Store
conn->to conn->to
<< exportMagic << exportMagic
<< printStorePath(info.path); << printStorePath(info.path);
writeStorePaths(*this, conn->to, info.references); worker_proto::write(*this, conn->to, info.references);
conn->to conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< 0 << 0
@ -291,10 +301,10 @@ public:
conn->to conn->to
<< cmdQueryClosure << cmdQueryClosure
<< includeOutputs; << includeOutputs;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn->to.flush(); conn->to.flush();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from)) for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
out.insert(i); out.insert(i);
} }
@ -307,10 +317,10 @@ public:
<< cmdQueryValidPaths << cmdQueryValidPaths
<< false // lock << false // lock
<< maybeSubstitute; << maybeSubstitute;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn->to.flush(); conn->to.flush();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
void connect() override void connect() override
@ -325,12 +335,6 @@ public:
} }
}; };
static RegisterStoreImplementation regStore([]( static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
return std::make_shared<LegacySSHStore>(std::string(uri, uriScheme.size()), params);
});
} }

View file

@ -4,7 +4,14 @@
namespace nix { namespace nix {
class LocalBinaryCacheStore : public BinaryCacheStore struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const std::string name() override { return "Local Binary Cache Store"; }
};
class LocalBinaryCacheStore : public BinaryCacheStore, public virtual LocalBinaryCacheStoreConfig
{ {
private: private:
@ -13,8 +20,11 @@ private:
public: public:
LocalBinaryCacheStore( LocalBinaryCacheStore(
const Params & params, const Path & binaryCacheDir) const std::string scheme,
: BinaryCacheStore(params) const Path & binaryCacheDir,
const Params & params)
: StoreConfig(params)
, BinaryCacheStore(params)
, binaryCacheDir(binaryCacheDir) , binaryCacheDir(binaryCacheDir)
{ {
} }
@ -26,6 +36,8 @@ public:
return "file://" + binaryCacheDir; return "file://" + binaryCacheDir;
} }
static std::set<std::string> uriSchemes();
protected: protected:
bool fileExists(const std::string & path) override; bool fileExists(const std::string & path) override;
@ -85,16 +97,14 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path)
return pathExists(binaryCacheDir + "/" + path); return pathExists(binaryCacheDir + "/" + path);
} }
static RegisterStoreImplementation regStore([]( std::set<std::string> LocalBinaryCacheStore::uriSchemes()
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{ {
if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1" || if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1")
std::string(uri, 0, 7) != "file://") return {};
return 0; else
auto store = std::make_shared<LocalBinaryCacheStore>(params, std::string(uri, 7)); return {"file"};
store->init(); }
return store;
}); static RegisterStoreImplementation<LocalBinaryCacheStore, LocalBinaryCacheStoreConfig> regLocalBinaryCacheStore;
} }

View file

@ -6,6 +6,7 @@
#include "derivations.hh" #include "derivations.hh"
#include "nar-info.hh" #include "nar-info.hh"
#include "references.hh" #include "references.hh"
#include "callback.hh"
#include <iostream> #include <iostream>
#include <algorithm> #include <algorithm>
@ -42,7 +43,8 @@ namespace nix {
LocalStore::LocalStore(const Params & params) LocalStore::LocalStore(const Params & params)
: Store(params) : StoreConfig(params)
, Store(params)
, LocalFSStore(params) , LocalFSStore(params)
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real", , realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"} "physical path to the Nix store"}
@ -108,12 +110,11 @@ LocalStore::LocalStore(const Params & params)
} }
/* Ensure that the store and its parents are not symlinks. */ /* Ensure that the store and its parents are not symlinks. */
if (getEnv("NIX_IGNORE_SYMLINK_STORE") != "1") { if (!settings.allowSymlinkedStore) {
Path path = realStoreDir; Path path = realStoreDir;
struct stat st; struct stat st;
while (path != "/") { while (path != "/") {
if (lstat(path.c_str(), &st)) st = lstat(path);
throw SysError("getting status of '%1%'", path);
if (S_ISLNK(st.st_mode)) if (S_ISLNK(st.st_mode))
throw Error( throw Error(
"the path '%1%' is a symlink; " "the path '%1%' is a symlink; "
@ -417,10 +418,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
void canonicaliseTimestampAndPermissions(const Path & path) void canonicaliseTimestampAndPermissions(const Path & path)
{ {
struct stat st; canonicaliseTimestampAndPermissions(path, lstat(path));
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
canonicaliseTimestampAndPermissions(path, st);
} }
@ -438,9 +436,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
} }
#endif #endif
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
/* Really make sure that the path is of a supported type. */ /* Really make sure that the path is of a supported type. */
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
@ -476,8 +472,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
ensure that we don't fail on hard links within the same build ensure that we don't fail on hard links within the same build
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */ (i.e. "touch $out/foo; ln $out/foo $out/bar"). */
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) { if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
assert(!S_ISDIR(st.st_mode)); if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
if (inodesSeen.find(Inode(st.st_dev, st.st_ino)) == inodesSeen.end())
throw BuildError("invalid ownership on file '%1%'", path); throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT; mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore)); assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
@ -520,9 +515,7 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid, InodesSeen & ino
/* On platforms that don't have lchown(), the top-level path can't /* On platforms that don't have lchown(), the top-level path can't
be a symlink, since we can't change its ownership. */ be a symlink, since we can't change its ownership. */
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
if (st.st_uid != geteuid()) { if (st.st_uid != geteuid()) {
assert(S_ISLNK(st.st_mode)); assert(S_ISLNK(st.st_mode));
@ -578,13 +571,32 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
envHasRightPath(path, i.first); envHasRightPath(path, i.first);
}, },
[&](DerivationOutputCAFloating _) { [&](DerivationOutputCAFloating _) {
throw UnimplementedError("floating CA output derivations are not yet implemented"); /* Nothing to check */
}, },
}, i.second.output); }, i.second.output);
} }
} }
void LocalStore::linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output)
{
auto state(_state.lock());
return linkDeriverToPath(*state, queryValidPathId(*state, deriver), outputName, output);
}
void LocalStore::linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output)
{
retrySQLite<void>([&]() {
state.stmtAddDerivationOutput.use()
(deriver)
(outputName)
(printStorePath(output))
.exec();
});
}
uint64_t LocalStore::addValidPath(State & state, uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs) const ValidPathInfo & info, bool checkOutputs)
{ {
@ -618,12 +630,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */ registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv); if (checkOutputs) checkDerivationOutputs(info.path, drv);
for (auto & i : drv.outputsAndPaths(*this)) { for (auto & i : drv.outputsAndOptPaths(*this)) {
state.stmtAddDerivationOutput.use() /* Floating CA derivations have indeterminate output paths until
(id) they are built, so don't register anything in that case */
(i.first) if (i.second.second)
(printStorePath(i.second.second)) linkDeriverToPath(state, id, i.first, *i.second.second);
.exec();
} }
} }
@ -710,7 +721,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path)
{ {
auto use(state.stmtQueryPathInfo.use()(printStorePath(path))); auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
if (!use.next()) if (!use.next())
throw Error("path '%s' is not valid", printStorePath(path)); throw InvalidPath("path '%s' is not valid", printStorePath(path));
return use.getInt(0); return use.getInt(0);
} }
@ -785,18 +796,58 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
} }
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path) std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
{ {
auto path = path_;
std::map<std::string, std::optional<StorePath>> outputs; std::map<std::string, std::optional<StorePath>> outputs;
BasicDerivation drv = readDerivation(path); Derivation drv = readDerivation(path);
for (auto & [outName, _] : drv.outputs) { for (auto & [outName, _] : drv.outputs) {
outputs.insert_or_assign(outName, std::nullopt); outputs.insert_or_assign(outName, std::nullopt);
} }
bool haveCached = false;
{
auto resolutions = drvPathResolutions.lock();
auto resolvedPathOptIter = resolutions->find(path);
if (resolvedPathOptIter != resolutions->end()) {
auto & [_, resolvedPathOpt] = *resolvedPathOptIter;
if (resolvedPathOpt)
path = *resolvedPathOpt;
haveCached = true;
}
}
/* can't just use else-if instead of `!haveCached` because we need to unlock
`drvPathResolutions` before it is locked in `Derivation::resolve`. */
if (!haveCached && drv.type() == DerivationType::CAFloating) {
/* Try resolve drv and use that path instead. */
auto attempt = drv.tryResolve(*this);
if (!attempt)
/* If we cannot resolve the derivation, we cannot have any path
assigned so we return the map of all std::nullopts. */
return outputs;
/* Just compute store path */
auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true);
/* Store in memo table. */
/* FIXME: memo logic should not be local-store specific, should have
wrapper-method instead. */
drvPathResolutions.lock()->insert_or_assign(path, pathResolved);
path = std::move(pathResolved);
}
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() { return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock()); auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() uint64_t drvId;
(queryValidPathId(*state, path))); try {
drvId = queryValidPathId(*state, path);
} catch (InvalidPath &) {
/* FIXME? if the derivation doesn't exist, we cannot have a mapping
for it. */
return outputs;
}
auto useQueryDerivationOutputs {
state->stmtQueryDerivationOutputs.use()
(drvId)
};
while (useQueryDerivationOutputs.next()) while (useQueryDerivationOutputs.next())
outputs.insert_or_assign( outputs.insert_or_assign(
@ -1435,7 +1486,7 @@ static void makeMutable(const Path & path)
{ {
checkInterrupt(); checkInterrupt();
struct stat st = lstat(path); auto st = lstat(path);
if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return; if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return;

View file

@ -30,8 +30,19 @@ struct OptimiseStats
uint64_t blocksFreed = 0; uint64_t blocksFreed = 0;
}; };
struct LocalStoreConfig : virtual LocalFSStoreConfig
{
using LocalFSStoreConfig::LocalFSStoreConfig;
class LocalStore : public LocalFSStore Setting<bool> requireSigs{(StoreConfig*) this,
settings.requireSigs,
"require-sigs", "whether store paths should have a trusted signature on import"};
const std::string name() override { return "Local Store"; }
};
class LocalStore : public LocalFSStore, public virtual LocalStoreConfig
{ {
private: private:
@ -95,10 +106,6 @@ public:
private: private:
Setting<bool> requireSigs{(Store*) this,
settings.requireSigs,
"require-sigs", "whether store paths should have a trusted signature on import"};
const PublicKeys & getPublicKeys(); const PublicKeys & getPublicKeys();
public: public:
@ -279,6 +286,11 @@ private:
specified by the secret-key-files option. */ specified by the secret-key-files option. */
void signPathInfo(ValidPathInfo & info); void signPathInfo(ValidPathInfo & info);
/* Register the store path 'output' as the output named 'outputName' of
derivation 'deriver'. */
void linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output);
void linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output);
Path getRealStoreDir() override { return realStoreDir; } Path getRealStoreDir() override { return realStoreDir; }
void createUser(const std::string & userName, uid_t userId) override; void createUser(const std::string & userName, uid_t userId) override;

View file

@ -5,7 +5,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "thread-pool.hh" #include "thread-pool.hh"
#include "topo-sort.hh" #include "topo-sort.hh"
#include "callback.hh"
namespace nix { namespace nix {
@ -203,17 +203,24 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
return; return;
} }
PathSet invalid;
/* true for regular derivations, and CA derivations for which we
have a trust mapping for all wanted outputs. */
auto knownOutputPaths = true;
for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(path.path)) {
if (!pathOpt) {
knownOutputPaths = false;
break;
}
if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt))
invalid.insert(printStorePath(*pathOpt));
}
if (knownOutputPaths && invalid.empty()) return;
auto drv = make_ref<Derivation>(derivationFromPath(path.path)); auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(StorePath(path.path), *drv); ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid; if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs)
&& !isValidPath(j.second.second))
invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size())); auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size()));
for (auto & output : invalid) for (auto & output : invalid)
pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState)); pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState));

View file

@ -1,10 +1,18 @@
#include "names.hh" #include "names.hh"
#include "util.hh" #include "util.hh"
#include <regex>
namespace nix { namespace nix {
struct Regex
{
std::regex regex;
};
DrvName::DrvName() DrvName::DrvName()
{ {
name = ""; name = "";
@ -30,11 +38,18 @@ DrvName::DrvName(std::string_view s) : hits(0)
} }
DrvName::~DrvName()
{ }
bool DrvName::matches(DrvName & n) bool DrvName::matches(DrvName & n)
{ {
if (name != "*") { if (name != "*") {
if (!regex) regex = std::unique_ptr<std::regex>(new std::regex(name, std::regex::extended)); if (!regex) {
if (!std::regex_match(n.name, *regex)) return false; regex = std::make_unique<Regex>();
regex->regex = std::regex(name, std::regex::extended);
}
if (!std::regex_match(n.name, regex->regex)) return false;
} }
if (version != "" && version != n.version) return false; if (version != "" && version != n.version) return false;
return true; return true;
@ -99,7 +114,7 @@ DrvNames drvNamesFromArgs(const Strings & opArgs)
{ {
DrvNames result; DrvNames result;
for (auto & i : opArgs) for (auto & i : opArgs)
result.push_back(DrvName(i)); result.emplace_back(i);
return result; return result;
} }

View file

@ -3,10 +3,11 @@
#include <memory> #include <memory>
#include "types.hh" #include "types.hh"
#include <regex>
namespace nix { namespace nix {
struct Regex;
struct DrvName struct DrvName
{ {
string fullName; string fullName;
@ -16,10 +17,12 @@ struct DrvName
DrvName(); DrvName();
DrvName(std::string_view s); DrvName(std::string_view s);
~DrvName();
bool matches(DrvName & n); bool matches(DrvName & n);
private: private:
std::unique_ptr<std::regex> regex; std::unique_ptr<Regex> regex;
}; };
typedef list<DrvName> DrvNames; typedef list<DrvName> DrvNames;

View file

@ -17,9 +17,7 @@ namespace nix {
static void makeWritable(const Path & path) static void makeWritable(const Path & path)
{ {
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1) if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError("changing writability of '%1%'", path); throw SysError("changing writability of '%1%'", path);
} }
@ -94,9 +92,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
{ {
checkInterrupt(); checkInterrupt();
struct stat st; auto st = lstat(path);
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
#if __APPLE__ #if __APPLE__
/* HFS/macOS has some undocumented security feature disabling hardlinking for /* HFS/macOS has some undocumented security feature disabling hardlinking for
@ -187,9 +183,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Yes! We've seen a file with the same contents. Replace the /* Yes! We've seen a file with the same contents. Replace the
current file with a hard link to that file. */ current file with a hard link to that file. */
struct stat stLink; auto stLink = lstat(linkPath);
if (lstat(linkPath.c_str(), &stLink))
throw SysError("getting attributes of path '%1%'", linkPath);
if (st.st_ino == stLink.st_ino) { if (st.st_ino == stLink.st_ino) {
debug(format("'%1%' is already linked to '%2%'") % path % linkPath); debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
@ -282,21 +276,15 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
} }
} }
static string showBytes(uint64_t bytes)
{
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
}
void LocalStore::optimiseStore() void LocalStore::optimiseStore()
{ {
OptimiseStats stats; OptimiseStats stats;
optimiseStore(stats); optimiseStore(stats);
printInfo( printInfo("%s freed by hard-linking %d files",
format("%1% freed by hard-linking %2% files") showBytes(stats.bytesFreed),
% showBytes(stats.bytesFreed) stats.filesLinked);
% stats.filesLinked);
} }
void LocalStore::optimisePath(const Path & path) void LocalStore::optimisePath(const Path & path)

View file

@ -1,3 +1,5 @@
#pragma once
#include "store-api.hh" #include "store-api.hh"
#include <nlohmann/json_fwd.hpp> #include <nlohmann/json_fwd.hpp>

Some files were not shown because too many files have changed in this diff Show more