Merge branch 'master' into debug-step
This commit is contained in:
commit
dbe3fd3735
|
@ -1,4 +1,4 @@
|
||||||
name: "Test"
|
name: "CI"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
@ -25,7 +25,7 @@ jobs:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
- run: nix-build -A checks.$(nix-instantiate --eval -E '(builtins.currentSystem)')
|
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
||||||
|
|
||||||
check_cachix:
|
check_cachix:
|
||||||
name: Cachix secret present for installer tests
|
name: Cachix secret present for installer tests
|
||||||
|
@ -74,3 +74,35 @@ jobs:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
||||||
|
|
||||||
|
docker_push_image:
|
||||||
|
needs: [check_cachix, tests]
|
||||||
|
if: >-
|
||||||
|
github.event_name == 'push' &&
|
||||||
|
github.ref_name == 'master' &&
|
||||||
|
needs.check_cachix.outputs.secret == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.4.0
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: cachix/install-nix-action@v16
|
||||||
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
|
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
|
||||||
|
- uses: cachix/cachix-action@v10
|
||||||
|
if: needs.check_cachix.outputs.secret == 'true'
|
||||||
|
with:
|
||||||
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
|
||||||
|
- run: docker load -i ./result/image.tar.gz
|
||||||
|
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
|
||||||
|
- run: docker tag nix:$NIX_VERSION nixos/nix:master
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- run: docker push nixos/nix:$NIX_VERSION
|
||||||
|
- run: docker push nixos/nix:master
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -120,3 +120,7 @@ GTAGS
|
||||||
compile_commands.json
|
compile_commands.json
|
||||||
|
|
||||||
nix-rust/target
|
nix-rust/target
|
||||||
|
|
||||||
|
result
|
||||||
|
|
||||||
|
.vscode/
|
||||||
|
|
|
@ -16,6 +16,7 @@ LDFLAGS = @LDFLAGS@
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||||
|
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
|
|
|
@ -262,13 +262,17 @@ fi
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
|
||||||
|
|
||||||
|
# Look for nlohmann/json.
|
||||||
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
||||||
# documentation generation switch
|
# documentation generation switch
|
||||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
doc_generate=$enableval, doc_generate=yes)
|
||||||
AC_SUBST(doc_generate)
|
AC_SUBST(doc_generate)
|
||||||
|
|
||||||
# Look for lowdown library.
|
# Look for lowdown library.
|
||||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.8.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Setuid installations.
|
# Setuid installations.
|
||||||
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
|
||||||
src = ./.;
|
src = ./.;
|
||||||
}).defaultNix
|
}).defaultNix
|
||||||
|
|
|
@ -72,6 +72,7 @@
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md)
|
||||||
- [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md)
|
- [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md)
|
||||||
- [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md)
|
- [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md)
|
||||||
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
|
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
|
||||||
|
|
|
@ -101,7 +101,8 @@ The following common options are supported:
|
||||||
|
|
||||||
- `NIX_BUILD_SHELL`\
|
- `NIX_BUILD_SHELL`\
|
||||||
Shell used to start the interactive environment. Defaults to the
|
Shell used to start the interactive environment. Defaults to the
|
||||||
`bash` found in `PATH`.
|
`bash` found in `<nixpkgs>`, falling back to the `bash` found in
|
||||||
|
`PATH` if not found.
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
|
|
|
@ -284,6 +284,10 @@ The points of interest are:
|
||||||
function is called with the `localServer` argument set to `true` but
|
function is called with the `localServer` argument set to `true` but
|
||||||
the `db4` argument set to `null`, then the evaluation fails.
|
the `db4` argument set to `null`, then the evaluation fails.
|
||||||
|
|
||||||
|
Note that `->` is the [logical
|
||||||
|
implication](https://en.wikipedia.org/wiki/Truth_table#Logical_implication)
|
||||||
|
Boolean operation.
|
||||||
|
|
||||||
2. This is a more subtle condition: if Subversion is built with Apache
|
2. This is a more subtle condition: if Subversion is built with Apache
|
||||||
(`httpServer`) support, then the Expat library (an XML library) used
|
(`httpServer`) support, then the Expat library (an XML library) used
|
||||||
by Subversion should be same as the one used by Apache. This is
|
by Subversion should be same as the one used by Apache. This is
|
||||||
|
|
|
@ -276,6 +276,9 @@ more than 2800 commits from 195 contributors since release 2.3.
|
||||||
|
|
||||||
* Plugins can now register `nix` subcommands.
|
* Plugins can now register `nix` subcommands.
|
||||||
|
|
||||||
|
* The `--indirect` flag to `nix-store --add-root` has become a no-op.
|
||||||
|
`--add-root` will always generate indirect GC roots from now on.
|
||||||
|
|
||||||
## Incompatible changes
|
## Incompatible changes
|
||||||
|
|
||||||
* The `nix` command is now marked as an experimental feature. This
|
* The `nix` command is now marked as an experimental feature. This
|
||||||
|
|
21
doc/manual/src/release-notes/rl-2.6.md
Normal file
21
doc/manual/src/release-notes/rl-2.6.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Release 2.6 (2022-01-24)
|
||||||
|
|
||||||
|
* The Nix CLI now searches for a `flake.nix` up until the root of the current
|
||||||
|
Git repository or a filesystem boundary rather than just in the current
|
||||||
|
directory.
|
||||||
|
* The TOML parser used by `builtins.fromTOML` has been replaced by [a
|
||||||
|
more compliant one](https://github.com/ToruNiina/toml11).
|
||||||
|
* Added `:st`/`:show-trace` commands to `nix repl`, which are used to
|
||||||
|
set or toggle display of error traces.
|
||||||
|
* New builtin function `builtins.zipAttrsWith` with the same
|
||||||
|
functionality as `lib.zipAttrsWith` from Nixpkgs, but much more
|
||||||
|
efficient.
|
||||||
|
* New command `nix store copy-log` to copy build logs from one store
|
||||||
|
to another.
|
||||||
|
* The `commit-lockfile-summary` option can be set to a non-empty
|
||||||
|
string to override the commit summary used when commiting an updated
|
||||||
|
lockfile. This may be used in conjunction with the `nixConfig`
|
||||||
|
attribute in `flake.nix` to better conform to repository
|
||||||
|
conventions.
|
||||||
|
* `docker run -ti nixos/nix:master` will place you in the Docker
|
||||||
|
container with the latest version of Nix from the `master` branch.
|
|
@ -1,6 +1,9 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
* The TOML parser used by `builtins.fromTOML` has been replaced by [a
|
* `nix bundle` breaking API change now supports bundlers of the form
|
||||||
more compliant one](https://github.com/ToruNiina/toml11).
|
`bundler.<system>.<name>= derivation: another-derivation;`. This supports
|
||||||
* Added `:st`/`:show-trace` commands to nix repl, which are used to
|
additional functionality to inspect evaluation information during bundling. A
|
||||||
set or toggle display of error traces.
|
new [repository](https://github.com/NixOS/bundlers) has various bundlers
|
||||||
|
implemented.
|
||||||
|
|
||||||
|
* `nix store ping` now reports the version of the remote Nix daemon.
|
||||||
|
|
|
@ -20,6 +20,8 @@ let
|
||||||
man
|
man
|
||||||
cacert.out
|
cacert.out
|
||||||
findutils
|
findutils
|
||||||
|
iana-etc
|
||||||
|
git
|
||||||
];
|
];
|
||||||
|
|
||||||
users = {
|
users = {
|
||||||
|
@ -199,6 +201,8 @@ let
|
||||||
|
|
||||||
mkdir $out/tmp
|
mkdir $out/tmp
|
||||||
|
|
||||||
|
mkdir -p $out/var/tmp
|
||||||
|
|
||||||
mkdir -p $out/etc/nix
|
mkdir -p $out/etc/nix
|
||||||
cat $nixConfContentsPath > $out/etc/nix/nix.conf
|
cat $nixConfContentsPath > $out/etc/nix/nix.conf
|
||||||
|
|
||||||
|
@ -234,6 +238,7 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||||
'';
|
'';
|
||||||
fakeRootCommands = ''
|
fakeRootCommands = ''
|
||||||
chmod 1777 tmp
|
chmod 1777 tmp
|
||||||
|
chmod 1777 var/tmp
|
||||||
'';
|
'';
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
|
|
18
flake.lock
18
flake.lock
|
@ -31,10 +31,26 @@
|
||||||
"type": "indirect"
|
"type": "indirect"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixpkgs-regression": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1643052045,
|
||||||
|
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"lowdown-src": "lowdown-src",
|
"lowdown-src": "lowdown-src",
|
||||||
"nixpkgs": "nixpkgs"
|
"nixpkgs": "nixpkgs",
|
||||||
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
408
flake.nix
408
flake.nix
|
@ -2,9 +2,10 @@
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
|
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
|
||||||
|
inputs.nixpkgs-regression.url = "nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
|
|
||||||
outputs = { self, nixpkgs, lowdown-src }:
|
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
|
@ -132,6 +133,7 @@
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
];
|
];
|
||||||
}))
|
}))
|
||||||
|
nlohmann_json
|
||||||
];
|
];
|
||||||
|
|
||||||
perlDeps =
|
perlDeps =
|
||||||
|
@ -140,8 +142,8 @@
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
installScriptFor = systems:
|
installScriptFor = systems:
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
runCommand "installer-script"
|
runCommand "installer-script"
|
||||||
{ buildInputs = [ nix ];
|
{ buildInputs = [ nix ];
|
||||||
}
|
}
|
||||||
|
@ -205,188 +207,204 @@
|
||||||
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
||||||
};
|
};
|
||||||
|
|
||||||
binaryTarball = buildPackages: nix: pkgs: let
|
binaryTarball = buildPackages: nix: pkgs:
|
||||||
inherit (pkgs) cacert;
|
let
|
||||||
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
inherit (pkgs) cacert;
|
||||||
in
|
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
||||||
|
in
|
||||||
|
|
||||||
buildPackages.runCommand "nix-binary-tarball-${version}"
|
buildPackages.runCommand "nix-binary-tarball-${version}"
|
||||||
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
||||||
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
||||||
}
|
}
|
||||||
''
|
''
|
||||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||||
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||||
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
if type -p shellcheck; then
|
if type -p shellcheck; then
|
||||||
# SC1090: Don't worry about not being able to find
|
# SC1090: Don't worry about not being able to find
|
||||||
# $nix/etc/profile.d/nix.sh
|
# $nix/etc/profile.d/nix.sh
|
||||||
shellcheck --exclude SC1090 $TMPDIR/install
|
shellcheck --exclude SC1090 $TMPDIR/install
|
||||||
shellcheck $TMPDIR/create-darwin-volume.sh
|
shellcheck $TMPDIR/create-darwin-volume.sh
|
||||||
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
||||||
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
|
||||||
# SC1091: Don't panic about not being able to source
|
# SC1091: Don't panic about not being able to source
|
||||||
# /etc/profile
|
# /etc/profile
|
||||||
# SC2002: Ignore "useless cat" "error", when loading
|
# SC2002: Ignore "useless cat" "error", when loading
|
||||||
# .reginfo, as the cat is a much cleaner
|
# .reginfo, as the cat is a much cleaner
|
||||||
# implementation, even though it is "useless"
|
# implementation, even though it is "useless"
|
||||||
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
||||||
# root's home directory
|
# root's home directory
|
||||||
shellcheck --external-sources \
|
shellcheck --external-sources \
|
||||||
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chmod +x $TMPDIR/install
|
chmod +x $TMPDIR/install
|
||||||
chmod +x $TMPDIR/create-darwin-volume.sh
|
chmod +x $TMPDIR/create-darwin-volume.sh
|
||||||
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
||||||
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
||||||
chmod +x $TMPDIR/install-multi-user
|
chmod +x $TMPDIR/install-multi-user
|
||||||
dir=nix-${version}-${pkgs.system}
|
dir=nix-${version}-${pkgs.system}
|
||||||
fn=$out/$dir.tar.xz
|
fn=$out/$dir.tar.xz
|
||||||
mkdir -p $out/nix-support
|
mkdir -p $out/nix-support
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
tar cvfJ $fn \
|
tar cvfJ $fn \
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
--absolute-names \
|
--absolute-names \
|
||||||
--hard-dereference \
|
--hard-dereference \
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
||||||
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
||||||
--transform "s,$NIX_STORE,$dir/store,S" \
|
--transform "s,$NIX_STORE,$dir/store,S" \
|
||||||
$TMPDIR/install \
|
$TMPDIR/install \
|
||||||
$TMPDIR/create-darwin-volume.sh \
|
$TMPDIR/create-darwin-volume.sh \
|
||||||
$TMPDIR/install-darwin-multi-user.sh \
|
$TMPDIR/install-darwin-multi-user.sh \
|
||||||
$TMPDIR/install-systemd-multi-user.sh \
|
$TMPDIR/install-systemd-multi-user.sh \
|
||||||
$TMPDIR/install-multi-user \
|
$TMPDIR/install-multi-user \
|
||||||
$TMPDIR/reginfo \
|
$TMPDIR/reginfo \
|
||||||
$(cat ${installerClosureInfo}/store-paths)
|
$(cat ${installerClosureInfo}/store-paths)
|
||||||
'';
|
|
||||||
|
|
||||||
overlayFor = getStdenv: final: prev:
|
|
||||||
let currentStdenv = getStdenv final; in
|
|
||||||
{
|
|
||||||
nixStable = prev.nix;
|
|
||||||
|
|
||||||
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
|
||||||
nixUnstable = prev.nixUnstable;
|
|
||||||
|
|
||||||
nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
|
|
||||||
name = "nix-${version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = self;
|
|
||||||
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ awsDeps;
|
|
||||||
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
preConfigure =
|
|
||||||
''
|
|
||||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
|
||||||
mkdir -p $out/lib
|
|
||||||
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
|
||||||
rm -f $out/lib/*.a
|
|
||||||
${lib.optionalString currentStdenv.isLinux ''
|
|
||||||
chmod u+w $out/lib/*.so.*
|
|
||||||
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
configureFlags = configureFlags ++
|
|
||||||
[ "--sysconfdir=/etc" ];
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $doc/nix-support
|
|
||||||
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
doInstallCheck = true;
|
overlayFor = getStdenv: final: prev:
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
let currentStdenv = getStdenv final; in
|
||||||
|
{
|
||||||
|
nixStable = prev.nix;
|
||||||
|
|
||||||
separateDebugInfo = true;
|
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
||||||
|
nixUnstable = prev.nixUnstable;
|
||||||
|
|
||||||
strictDeps = true;
|
nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
|
||||||
|
name = "nix-${version}";
|
||||||
passthru.perl-bindings = with final; currentStdenv.mkDerivation {
|
inherit version;
|
||||||
name = "nix-perl-${version}";
|
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
|
||||||
nativeBuildInputs =
|
VERSION_SUFFIX = versionSuffix;
|
||||||
[ buildPackages.autoconf-archive
|
|
||||||
buildPackages.autoreconfHook
|
|
||||||
buildPackages.pkg-config
|
|
||||||
];
|
|
||||||
|
|
||||||
buildInputs =
|
outputs = [ "out" "dev" "doc" ];
|
||||||
[ nix
|
|
||||||
curl
|
|
||||||
bzip2
|
|
||||||
xz
|
|
||||||
pkgs.perl
|
|
||||||
boost
|
|
||||||
]
|
|
||||||
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
|
||||||
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
|
||||||
|
|
||||||
configureFlags = ''
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
buildInputs = buildDeps ++ awsDeps;
|
||||||
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
|
||||||
'';
|
propagatedBuildInputs = propagatedDeps;
|
||||||
|
|
||||||
|
disallowedReferences = [ boost ];
|
||||||
|
|
||||||
|
preConfigure =
|
||||||
|
''
|
||||||
|
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||||
|
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
||||||
|
rm -f $out/lib/*.a
|
||||||
|
${lib.optionalString currentStdenv.isLinux ''
|
||||||
|
chmod u+w $out/lib/*.so.*
|
||||||
|
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||||
|
''}
|
||||||
|
${lib.optionalString currentStdenv.isDarwin ''
|
||||||
|
for LIB in $out/lib/*.dylib; do
|
||||||
|
chmod u+w $LIB
|
||||||
|
install_name_tool -id $LIB $LIB
|
||||||
|
done
|
||||||
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
|
configureFlags = configureFlags ++
|
||||||
|
[ "--sysconfdir=/etc" ];
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
||||||
|
|
||||||
|
doCheck = true;
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
mkdir -p $doc/nix-support
|
||||||
|
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
||||||
|
${lib.optionalString currentStdenv.isDarwin ''
|
||||||
|
install_name_tool \
|
||||||
|
-change ${boost}/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libnixutil.dylib
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
|
doInstallCheck = true;
|
||||||
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
separateDebugInfo = true;
|
||||||
|
|
||||||
|
strictDeps = true;
|
||||||
|
|
||||||
|
passthru.perl-bindings = with final; currentStdenv.mkDerivation {
|
||||||
|
name = "nix-perl-${version}";
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
nativeBuildInputs =
|
||||||
|
[ buildPackages.autoconf-archive
|
||||||
|
buildPackages.autoreconfHook
|
||||||
|
buildPackages.pkg-config
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs =
|
||||||
|
[ nix
|
||||||
|
curl
|
||||||
|
bzip2
|
||||||
|
xz
|
||||||
|
pkgs.perl
|
||||||
|
boost
|
||||||
|
]
|
||||||
|
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
||||||
|
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||||
|
|
||||||
|
configureFlags = ''
|
||||||
|
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
||||||
|
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
||||||
|
'';
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
||||||
|
name = "lowdown-0.9.0";
|
||||||
|
|
||||||
|
src = lowdown-src;
|
||||||
|
|
||||||
|
outputs = [ "out" "bin" "dev" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [ buildPackages.which ];
|
||||||
|
|
||||||
|
configurePhase = ''
|
||||||
|
${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
||||||
|
./configure \
|
||||||
|
PREFIX=${placeholder "dev"} \
|
||||||
|
BINDIR=${placeholder "bin"}/bin
|
||||||
|
'';
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
|
||||||
name = "lowdown-0.9.0";
|
|
||||||
|
|
||||||
src = lowdown-src;
|
|
||||||
|
|
||||||
outputs = [ "out" "bin" "dev" ];
|
|
||||||
|
|
||||||
nativeBuildInputs = [ buildPackages.which ];
|
|
||||||
|
|
||||||
configurePhase = ''
|
|
||||||
${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
|
||||||
./configure \
|
|
||||||
PREFIX=${placeholder "dev"} \
|
|
||||||
BINDIR=${placeholder "bin"}/bin
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
in {
|
in {
|
||||||
|
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
|
@ -429,19 +447,7 @@
|
||||||
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
||||||
|
|
||||||
# docker image with Nix inside
|
# docker image with Nix inside
|
||||||
dockerImage = nixpkgs.lib.genAttrs linux64BitSystems (system:
|
dockerImage = nixpkgs.lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
||||||
let
|
|
||||||
pkgs = nixpkgsFor.${system};
|
|
||||||
image = import ./docker.nix { inherit pkgs; tag = version; };
|
|
||||||
in pkgs.runCommand "docker-image-tarball-${version}"
|
|
||||||
{ meta.description = "Docker image with Nix for ${system}";
|
|
||||||
}
|
|
||||||
''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
image=$out/image.tar.gz
|
|
||||||
ln -s ${image} $image
|
|
||||||
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
|
||||||
'');
|
|
||||||
|
|
||||||
# Line coverage analysis.
|
# Line coverage analysis.
|
||||||
coverage =
|
coverage =
|
||||||
|
@ -503,29 +509,23 @@
|
||||||
inherit (self) overlay;
|
inherit (self) overlay;
|
||||||
});
|
});
|
||||||
|
|
||||||
/*
|
# Make sure that nix-env still produces the exact same result
|
||||||
# Check whether we can still evaluate all of Nixpkgs.
|
# on a particular version of Nixpkgs.
|
||||||
tests.evalNixpkgs =
|
tests.evalNixpkgs =
|
||||||
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
|
|
||||||
# FIXME: fix pkgs/top-level/make-tarball.nix in NixOS to not require a revCount.
|
|
||||||
inherit nixpkgs;
|
|
||||||
pkgs = nixpkgsFor.x86_64-linux;
|
|
||||||
officialRelease = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Check whether we can still evaluate NixOS.
|
|
||||||
tests.evalNixOS =
|
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
''
|
''
|
||||||
export NIX_STATE_DIR=$TMPDIR
|
type -p nix-env
|
||||||
|
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||||
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
|
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||||
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
|
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
||||||
|
mkdir $out
|
||||||
touch $out
|
|
||||||
'';
|
'';
|
||||||
*/
|
|
||||||
|
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||||
|
pkgs = nixpkgsFor.x86_64-linux;
|
||||||
|
nixpkgs = nixpkgs-regression;
|
||||||
|
};
|
||||||
|
|
||||||
installTests = forAllSystems (system:
|
installTests = forAllSystems (system:
|
||||||
let pkgs = nixpkgsFor.${system}; in
|
let pkgs = nixpkgsFor.${system}; in
|
||||||
|
@ -547,9 +547,9 @@
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
} // (if system == "x86_64-linux" then {
|
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
} else {}));
|
});
|
||||||
|
|
||||||
packages = forAllSystems (system: {
|
packages = forAllSystems (system: {
|
||||||
inherit (nixpkgsFor.${system}) nix;
|
inherit (nixpkgsFor.${system}) nix;
|
||||||
|
@ -594,6 +594,20 @@
|
||||||
|
|
||||||
hardeningDisable = [ "pie" ];
|
hardeningDisable = [ "pie" ];
|
||||||
};
|
};
|
||||||
|
dockerImage =
|
||||||
|
let
|
||||||
|
pkgs = nixpkgsFor.${system};
|
||||||
|
image = import ./docker.nix { inherit pkgs; tag = version; };
|
||||||
|
in
|
||||||
|
pkgs.runCommand
|
||||||
|
"docker-image-tarball-${version}"
|
||||||
|
{ meta.description = "Docker image with Nix for ${system}"; }
|
||||||
|
''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
image=$out/image.tar.gz
|
||||||
|
ln -s ${image} $image
|
||||||
|
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
} // builtins.listToAttrs (map (crossSystem: {
|
} // builtins.listToAttrs (map (crossSystem: {
|
||||||
name = "nix-${crossSystem}";
|
name = "nix-${crossSystem}";
|
||||||
value = let
|
value = let
|
||||||
|
|
399
nix-rust/Cargo.lock
generated
399
nix-rust/Cargo.lock
generated
|
@ -1,399 +0,0 @@
|
||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
[[package]]
|
|
||||||
name = "assert_matches"
|
|
||||||
version = "1.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "autocfg"
|
|
||||||
version = "0.1.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-vec"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "1.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "c2-chacha"
|
|
||||||
version = "0.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "0.1.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cloudabi"
|
|
||||||
version = "0.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fnv"
|
|
||||||
version = "1.0.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fuchsia-cprng"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.1.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hex"
|
|
||||||
version = "0.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.66"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nix-rust"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.2.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ppv-lite86"
|
|
||||||
version = "0.2.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest"
|
|
||||||
version = "0.9.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "1.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.6.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.7.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.3.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_hc"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_hc"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_isaac"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_jitter"
|
|
||||||
version = "0.1.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_os"
|
|
||||||
version = "0.1.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_pcg"
|
|
||||||
version = "0.1.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_xorshift"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rdrand"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_syscall"
|
|
||||||
version = "0.1.56"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.6.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "remove_dir_all"
|
|
||||||
version = "0.5.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusty-fork"
|
|
||||||
version = "0.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tempfile"
|
|
||||||
version = "3.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wait-timeout"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.7.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
"checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5"
|
|
||||||
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
|
||||||
"checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80"
|
|
||||||
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
|
|
||||||
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
|
||||||
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
|
|
||||||
"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
|
|
||||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
|
||||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
|
||||||
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
|
|
||||||
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
|
|
||||||
"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
|
|
||||||
"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77"
|
|
||||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|
||||||
"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
|
|
||||||
"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
|
|
||||||
"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
|
|
||||||
"checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f"
|
|
||||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
|
||||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
|
||||||
"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
|
|
||||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
|
||||||
"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
|
|
||||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
|
||||||
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
|
||||||
"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
|
|
||||||
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
|
|
||||||
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
|
|
||||||
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
|
|
||||||
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
|
|
||||||
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
|
||||||
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
|
|
||||||
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
|
|
||||||
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
|
||||||
"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
|
|
||||||
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
|
||||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
|
||||||
"checksum rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3dd93264e10c577503e926bd1430193eeb5d21b059148910082245309b424fae"
|
|
||||||
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
|
||||||
"checksum wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
|
|
||||||
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
|
|
||||||
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
|
||||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
|
@ -1,23 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "nix-rust"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["Eelco Dolstra <edolstra@gmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "nixrust"
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
libc = "0.2"
|
|
||||||
#futures-preview = { version = "=0.3.0-alpha.19" }
|
|
||||||
#hyper = "0.13.0-alpha.4"
|
|
||||||
#http = "0.1"
|
|
||||||
#tokio = { version = "0.2.0-alpha.6", default-features = false, features = ["rt-full"] }
|
|
||||||
lazy_static = "1.4"
|
|
||||||
#byteorder = "1.3"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
hex = "0.3"
|
|
||||||
assert_matches = "1.3"
|
|
||||||
proptest = "0.9"
|
|
|
@ -1,48 +0,0 @@
|
||||||
ifeq ($(OPTIMIZE), 1)
|
|
||||||
RUST_MODE = --release
|
|
||||||
RUST_DIR = release
|
|
||||||
else
|
|
||||||
RUST_MODE =
|
|
||||||
RUST_DIR = debug
|
|
||||||
endif
|
|
||||||
|
|
||||||
libnixrust_PATH := $(d)/target/$(RUST_DIR)/libnixrust.$(SO_EXT)
|
|
||||||
libnixrust_INSTALL_PATH := $(libdir)/libnixrust.$(SO_EXT)
|
|
||||||
libnixrust_LDFLAGS_USE := -L$(d)/target/$(RUST_DIR) -lnixrust
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED := -L$(libdir) -lnixrust
|
|
||||||
|
|
||||||
ifdef HOST_LINUX
|
|
||||||
libnixrust_LDFLAGS_USE += -ldl
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED += -ldl
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
libnixrust_BUILD_FLAGS = NIX_LDFLAGS="-undefined dynamic_lookup"
|
|
||||||
else
|
|
||||||
libnixrust_LDFLAGS_USE += -Wl,-rpath,$(abspath $(d)/target/$(RUST_DIR))
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$(libdir)
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(libnixrust_PATH): $(call rwildcard, $(d)/src, *.rs) $(d)/Cargo.toml
|
|
||||||
$(trace-gen) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) \
|
|
||||||
$(libnixrust_BUILD_FLAGS) \
|
|
||||||
cargo build $(RUST_MODE) $$(if [[ -d vendor ]]; then echo --offline; fi) \
|
|
||||||
&& touch target/$(RUST_DIR)/libnixrust.$(SO_EXT)
|
|
||||||
|
|
||||||
$(libnixrust_INSTALL_PATH): $(libnixrust_PATH)
|
|
||||||
$(target-gen) cp $^ $@
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
install_name_tool -id $@ $@
|
|
||||||
endif
|
|
||||||
|
|
||||||
clean: clean-rust
|
|
||||||
|
|
||||||
clean-rust:
|
|
||||||
$(suppress) rm -rfv nix-rust/target
|
|
||||||
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
check: rust-tests
|
|
||||||
|
|
||||||
rust-tests:
|
|
||||||
$(trace-test) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi)
|
|
||||||
endif
|
|
|
@ -1,77 +0,0 @@
|
||||||
use super::{error, store::path, store::StorePath, util};
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_String_new(s: &str, out: *mut String) {
|
|
||||||
// FIXME: check whether 's' is valid UTF-8?
|
|
||||||
out.write(s.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_String_drop(self_: *mut String) {
|
|
||||||
std::ptr::drop_in_place(self_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_new(
|
|
||||||
path: &str,
|
|
||||||
store_dir: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::new(std::path::Path::new(path), std::path::Path::new(store_dir))
|
|
||||||
.map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_new2(
|
|
||||||
hash: &[u8; crate::store::path::STORE_PATH_HASH_BYTES],
|
|
||||||
name: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::from_parts(*hash, name).map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_fromBaseName(
|
|
||||||
base_name: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::new_from_base_name(base_name).map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_StorePath_drop(self_: *mut StorePath) {
|
|
||||||
std::ptr::drop_in_place(self_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_to_string(self_: &StorePath) -> Vec<u8> {
|
|
||||||
let mut buf = vec![0; path::STORE_PATH_HASH_CHARS + 1 + self_.name.name().len()];
|
|
||||||
util::base32::encode_into(self_.hash.hash(), &mut buf[0..path::STORE_PATH_HASH_CHARS]);
|
|
||||||
buf[path::STORE_PATH_HASH_CHARS] = b'-';
|
|
||||||
buf[path::STORE_PATH_HASH_CHARS + 1..].clone_from_slice(self_.name.name().as_bytes());
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_less_than(a: &StorePath, b: &StorePath) -> bool {
|
|
||||||
a < b
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_eq(a: &StorePath, b: &StorePath) -> bool {
|
|
||||||
a == b
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_clone(self_: &StorePath) -> StorePath {
|
|
||||||
self_.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_name(self_: &StorePath) -> &str {
|
|
||||||
self_.name.name()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_hash_data(
|
|
||||||
self_: &StorePath,
|
|
||||||
) -> &[u8; crate::store::path::STORE_PATH_HASH_BYTES] {
|
|
||||||
self_.hash.hash()
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
InvalidPath(crate::store::StorePath),
|
|
||||||
BadStorePath(std::path::PathBuf),
|
|
||||||
NotInStore(std::path::PathBuf),
|
|
||||||
BadNarInfo,
|
|
||||||
BadBase32,
|
|
||||||
StorePathNameEmpty,
|
|
||||||
StorePathNameTooLong,
|
|
||||||
BadStorePathName,
|
|
||||||
NarSizeFieldTooBig,
|
|
||||||
BadNarString,
|
|
||||||
BadNarPadding,
|
|
||||||
BadNarVersionMagic,
|
|
||||||
MissingNarOpenTag,
|
|
||||||
MissingNarCloseTag,
|
|
||||||
MissingNarField,
|
|
||||||
BadNarField(String),
|
|
||||||
BadExecutableField,
|
|
||||||
IOError(std::io::Error),
|
|
||||||
#[cfg(unused)]
|
|
||||||
HttpError(hyper::error::Error),
|
|
||||||
Misc(String),
|
|
||||||
#[cfg(not(test))]
|
|
||||||
Foreign(CppException),
|
|
||||||
BadTarFileMemberName(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
|
||||||
fn from(err: std::io::Error) -> Self {
|
|
||||||
Error::IOError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
impl From<hyper::error::Error> for Error {
|
|
||||||
fn from(err: hyper::error::Error) -> Self {
|
|
||||||
Error::HttpError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Error::InvalidPath(_) => write!(f, "invalid path"),
|
|
||||||
Error::BadNarInfo => write!(f, ".narinfo file is corrupt"),
|
|
||||||
Error::BadStorePath(path) => write!(f, "path '{}' is not a store path", path.display()),
|
|
||||||
Error::NotInStore(path) => {
|
|
||||||
write!(f, "path '{}' is not in the Nix store", path.display())
|
|
||||||
}
|
|
||||||
Error::BadBase32 => write!(f, "invalid base32 string"),
|
|
||||||
Error::StorePathNameEmpty => write!(f, "store path name is empty"),
|
|
||||||
Error::StorePathNameTooLong => {
|
|
||||||
write!(f, "store path name is longer than 211 characters")
|
|
||||||
}
|
|
||||||
Error::BadStorePathName => write!(f, "store path name contains forbidden character"),
|
|
||||||
Error::NarSizeFieldTooBig => write!(f, "size field in NAR is too big"),
|
|
||||||
Error::BadNarString => write!(f, "NAR string is not valid UTF-8"),
|
|
||||||
Error::BadNarPadding => write!(f, "NAR padding is not zero"),
|
|
||||||
Error::BadNarVersionMagic => write!(f, "unsupported NAR version"),
|
|
||||||
Error::MissingNarOpenTag => write!(f, "NAR open tag is missing"),
|
|
||||||
Error::MissingNarCloseTag => write!(f, "NAR close tag is missing"),
|
|
||||||
Error::MissingNarField => write!(f, "expected NAR field is missing"),
|
|
||||||
Error::BadNarField(s) => write!(f, "unrecognized NAR field '{}'", s),
|
|
||||||
Error::BadExecutableField => write!(f, "bad 'executable' field in NAR"),
|
|
||||||
Error::IOError(err) => write!(f, "I/O error: {}", err),
|
|
||||||
#[cfg(unused)]
|
|
||||||
Error::HttpError(err) => write!(f, "HTTP error: {}", err),
|
|
||||||
#[cfg(not(test))]
|
|
||||||
Error::Foreign(_) => write!(f, "<C++ exception>"), // FIXME
|
|
||||||
Error::Misc(s) => write!(f, "{}", s),
|
|
||||||
Error::BadTarFileMemberName(s) => {
|
|
||||||
write!(f, "tar archive contains illegal file name '{}'", s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl From<Error> for CppException {
|
|
||||||
fn from(err: Error) -> Self {
|
|
||||||
match err {
|
|
||||||
Error::Foreign(ex) => ex,
|
|
||||||
_ => CppException::new(&err.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
#[repr(C)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CppException(*const libc::c_void); // == std::exception_ptr*
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl CppException {
|
|
||||||
fn new(s: &str) -> Self {
|
|
||||||
Self(unsafe { make_error(s) })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl Drop for CppException {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
unsafe {
|
|
||||||
destroy_error(self.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
extern "C" {
|
|
||||||
#[allow(improper_ctypes)] // YOLO
|
|
||||||
fn make_error(s: &str) -> *const libc::c_void;
|
|
||||||
|
|
||||||
fn destroy_error(exc: *const libc::c_void);
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
#[allow(improper_ctypes_definitions)]
|
|
||||||
#[cfg(not(test))]
|
|
||||||
mod c;
|
|
||||||
mod error;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod nar;
|
|
||||||
mod store;
|
|
||||||
mod util;
|
|
||||||
|
|
||||||
pub use error::Error;
|
|
|
@ -1,126 +0,0 @@
|
||||||
use crate::Error;
|
|
||||||
use byteorder::{LittleEndian, ReadBytesExt};
|
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::io::Read;
|
|
||||||
|
|
||||||
pub fn parse<R: Read>(input: &mut R) -> Result<(), Error> {
|
|
||||||
if String::read(input)? != NAR_VERSION_MAGIC {
|
|
||||||
return Err(Error::BadNarVersionMagic);
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_file(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
const NAR_VERSION_MAGIC: &str = "nix-archive-1";
|
|
||||||
|
|
||||||
fn parse_file<R: Read>(input: &mut R) -> Result<(), Error> {
|
|
||||||
if String::read(input)? != "(" {
|
|
||||||
return Err(Error::MissingNarOpenTag);
|
|
||||||
}
|
|
||||||
|
|
||||||
if String::read(input)? != "type" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
|
|
||||||
match String::read(input)?.as_ref() {
|
|
||||||
"regular" => {
|
|
||||||
let mut _executable = false;
|
|
||||||
let mut tag = String::read(input)?;
|
|
||||||
if tag == "executable" {
|
|
||||||
_executable = true;
|
|
||||||
if String::read(input)? != "" {
|
|
||||||
return Err(Error::BadExecutableField);
|
|
||||||
}
|
|
||||||
tag = String::read(input)?;
|
|
||||||
}
|
|
||||||
if tag != "contents" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _contents = Vec::<u8>::read(input)?;
|
|
||||||
if String::read(input)? != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"directory" => loop {
|
|
||||||
match String::read(input)?.as_ref() {
|
|
||||||
"entry" => {
|
|
||||||
if String::read(input)? != "(" {
|
|
||||||
return Err(Error::MissingNarOpenTag);
|
|
||||||
}
|
|
||||||
if String::read(input)? != "name" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _name = String::read(input)?;
|
|
||||||
if String::read(input)? != "node" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
parse_file(input)?;
|
|
||||||
let tag = String::read(input)?;
|
|
||||||
if tag != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
")" => break,
|
|
||||||
s => return Err(Error::BadNarField(s.into())),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"symlink" => {
|
|
||||||
if String::read(input)? != "target" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _target = String::read(input)?;
|
|
||||||
if String::read(input)? != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
s => return Err(Error::BadNarField(s.into())),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
trait Deserialize: Sized {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for String {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let buf = Deserialize::read(input)?;
|
|
||||||
Ok(String::from_utf8(buf).map_err(|_| Error::BadNarString)?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for Vec<u8> {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let n: usize = Deserialize::read(input)?;
|
|
||||||
let mut buf = vec![0; n];
|
|
||||||
input.read_exact(&mut buf)?;
|
|
||||||
skip_padding(input, n)?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skip_padding<R: Read>(input: &mut R, len: usize) -> Result<(), Error> {
|
|
||||||
if len % 8 != 0 {
|
|
||||||
let mut buf = [0; 8];
|
|
||||||
let buf = &mut buf[0..8 - (len % 8)];
|
|
||||||
input.read_exact(buf)?;
|
|
||||||
if !buf.iter().all(|b| *b == 0) {
|
|
||||||
return Err(Error::BadNarPadding);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for u64 {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
Ok(input.read_u64::<LittleEndian>()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for usize {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let n: u64 = Deserialize::read(input)?;
|
|
||||||
Ok(usize::try_from(n).map_err(|_| Error::NarSizeFieldTooBig)?)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
use super::{PathInfo, Store, StorePath};
|
|
||||||
use crate::Error;
|
|
||||||
use hyper::client::Client;
|
|
||||||
|
|
||||||
pub struct BinaryCacheStore {
|
|
||||||
base_uri: String,
|
|
||||||
client: Client<hyper::client::HttpConnector, hyper::Body>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BinaryCacheStore {
|
|
||||||
pub fn new(base_uri: String) -> Self {
|
|
||||||
Self {
|
|
||||||
base_uri,
|
|
||||||
client: Client::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Store for BinaryCacheStore {
|
|
||||||
fn query_path_info(
|
|
||||||
&self,
|
|
||||||
path: &StorePath,
|
|
||||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<PathInfo, Error>> + Send>> {
|
|
||||||
let uri = format!("{}/{}.narinfo", self.base_uri.clone(), path.hash);
|
|
||||||
let path = path.clone();
|
|
||||||
let client = self.client.clone();
|
|
||||||
let store_dir = self.store_dir().to_string();
|
|
||||||
|
|
||||||
Box::pin(async move {
|
|
||||||
let response = client.get(uri.parse::<hyper::Uri>().unwrap()).await?;
|
|
||||||
|
|
||||||
if response.status() == hyper::StatusCode::NOT_FOUND
|
|
||||||
|| response.status() == hyper::StatusCode::FORBIDDEN
|
|
||||||
{
|
|
||||||
return Err(Error::InvalidPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = response.into_body();
|
|
||||||
|
|
||||||
let mut bytes = Vec::new();
|
|
||||||
while let Some(next) = body.next().await {
|
|
||||||
bytes.extend(next?);
|
|
||||||
}
|
|
||||||
|
|
||||||
PathInfo::parse_nar_info(std::str::from_utf8(&bytes).unwrap(), &store_dir)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
pub mod path;
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod binary_cache_store;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod path_info;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod store;
|
|
||||||
|
|
||||||
pub use path::{StorePath, StorePathHash, StorePathName};
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use binary_cache_store::BinaryCacheStore;
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use path_info::PathInfo;
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use store::Store;
|
|
|
@ -1,224 +0,0 @@
|
||||||
use crate::error::Error;
|
|
||||||
use crate::util::base32;
|
|
||||||
use std::fmt;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
|
||||||
pub struct StorePath {
|
|
||||||
pub hash: StorePathHash,
|
|
||||||
pub name: StorePathName,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const STORE_PATH_HASH_BYTES: usize = 20;
|
|
||||||
pub const STORE_PATH_HASH_CHARS: usize = 32;
|
|
||||||
|
|
||||||
impl StorePath {
|
|
||||||
pub fn new(path: &Path, store_dir: &Path) -> Result<Self, Error> {
|
|
||||||
if path.parent() != Some(store_dir) {
|
|
||||||
return Err(Error::NotInStore(path.into()));
|
|
||||||
}
|
|
||||||
Self::new_from_base_name(
|
|
||||||
path.file_name()
|
|
||||||
.ok_or_else(|| Error::BadStorePath(path.into()))?
|
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| Error::BadStorePath(path.into()))?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_parts(hash: [u8; STORE_PATH_HASH_BYTES], name: &str) -> Result<Self, Error> {
|
|
||||||
Ok(StorePath {
|
|
||||||
hash: StorePathHash(hash),
|
|
||||||
name: StorePathName::new(name)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_from_base_name(base_name: &str) -> Result<Self, Error> {
|
|
||||||
if base_name.len() < STORE_PATH_HASH_CHARS + 1
|
|
||||||
|| base_name.as_bytes()[STORE_PATH_HASH_CHARS] != b'-'
|
|
||||||
{
|
|
||||||
return Err(Error::BadStorePath(base_name.into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(StorePath {
|
|
||||||
hash: StorePathHash::new(&base_name[0..STORE_PATH_HASH_CHARS])?,
|
|
||||||
name: StorePathName::new(&base_name[STORE_PATH_HASH_CHARS + 1..])?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePath {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}-{}", self.hash, self.name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct StorePathHash([u8; STORE_PATH_HASH_BYTES]);
|
|
||||||
|
|
||||||
impl StorePathHash {
|
|
||||||
pub fn new(s: &str) -> Result<Self, Error> {
|
|
||||||
assert_eq!(s.len(), STORE_PATH_HASH_CHARS);
|
|
||||||
let v = base32::decode(s)?;
|
|
||||||
assert_eq!(v.len(), STORE_PATH_HASH_BYTES);
|
|
||||||
let mut bytes: [u8; 20] = Default::default();
|
|
||||||
bytes.copy_from_slice(&v[0..STORE_PATH_HASH_BYTES]);
|
|
||||||
Ok(Self(bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> &[u8; STORE_PATH_HASH_BYTES] {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePathHash {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let mut buf = vec![0; STORE_PATH_HASH_CHARS];
|
|
||||||
base32::encode_into(&self.0, &mut buf);
|
|
||||||
f.write_str(std::str::from_utf8(&buf).unwrap())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for StorePathHash {
|
|
||||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
||||||
// Historically we've sorted store paths by their base32
|
|
||||||
// serialization, but our base32 encodes bytes in reverse
|
|
||||||
// order. So compare them in reverse order as well.
|
|
||||||
self.0.iter().rev().cmp(other.0.iter().rev())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for StorePathHash {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
|
||||||
pub struct StorePathName(String);
|
|
||||||
|
|
||||||
impl StorePathName {
|
|
||||||
pub fn new(s: &str) -> Result<Self, Error> {
|
|
||||||
if s.is_empty() {
|
|
||||||
return Err(Error::StorePathNameEmpty);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.len() > 211 {
|
|
||||||
return Err(Error::StorePathNameTooLong);
|
|
||||||
}
|
|
||||||
|
|
||||||
let is_good_path_name = s.chars().all(|c| {
|
|
||||||
c.is_ascii_alphabetic()
|
|
||||||
|| c.is_ascii_digit()
|
|
||||||
|| c == '+'
|
|
||||||
|| c == '-'
|
|
||||||
|| c == '.'
|
|
||||||
|| c == '_'
|
|
||||||
|| c == '?'
|
|
||||||
|| c == '='
|
|
||||||
});
|
|
||||||
if s.starts_with('.') || !is_good_path_name {
|
|
||||||
return Err(Error::BadStorePathName);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self(s.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePathName {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.write_str(&self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use assert_matches::assert_matches;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
let p = StorePath::new_from_base_name(&s).unwrap();
|
|
||||||
assert_eq!(p.name.0, "konsole-18.12.3");
|
|
||||||
assert_eq!(
|
|
||||||
p.hash.0,
|
|
||||||
[
|
|
||||||
0x9f, 0x76, 0x49, 0x20, 0xf6, 0x5d, 0xe9, 0x71, 0xc4, 0xca, 0x46, 0x21, 0xab, 0xff,
|
|
||||||
0x9b, 0x44, 0xef, 0x87, 0x0f, 0x3c
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::StorePathNameEmpty)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_dash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePath(_))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_short_hash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxl-konsole-18.12.3";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePath(_))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_hash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8e6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
assert_matches!(StorePath::new_from_base_name(&s), Err(Error::BadBase32));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_long_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
|
|
||||||
assert_matches!(StorePath::new_from_base_name(&s), Ok(_));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_too_long_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::StorePathNameTooLong)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bad_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-foo bar";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePathName)
|
|
||||||
);
|
|
||||||
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-kónsole";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePathName)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_roundtrip() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
assert_eq!(StorePath::new_from_base_name(&s).unwrap().to_string(), s);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,70 +0,0 @@
|
||||||
use crate::store::StorePath;
|
|
||||||
use crate::Error;
|
|
||||||
use std::collections::BTreeSet;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathInfo {
|
|
||||||
pub path: StorePath,
|
|
||||||
pub references: BTreeSet<StorePath>,
|
|
||||||
pub nar_size: u64,
|
|
||||||
pub deriver: Option<StorePath>,
|
|
||||||
|
|
||||||
// Additional binary cache info.
|
|
||||||
pub url: Option<String>,
|
|
||||||
pub compression: Option<String>,
|
|
||||||
pub file_size: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PathInfo {
|
|
||||||
pub fn parse_nar_info(nar_info: &str, store_dir: &str) -> Result<Self, Error> {
|
|
||||||
let mut path = None;
|
|
||||||
let mut references = BTreeSet::new();
|
|
||||||
let mut nar_size = None;
|
|
||||||
let mut deriver = None;
|
|
||||||
let mut url = None;
|
|
||||||
let mut compression = None;
|
|
||||||
let mut file_size = None;
|
|
||||||
|
|
||||||
for line in nar_info.lines() {
|
|
||||||
let colon = line.find(':').ok_or(Error::BadNarInfo)?;
|
|
||||||
|
|
||||||
let (name, value) = line.split_at(colon);
|
|
||||||
|
|
||||||
if !value.starts_with(": ") {
|
|
||||||
return Err(Error::BadNarInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
let value = &value[2..];
|
|
||||||
|
|
||||||
if name == "StorePath" {
|
|
||||||
path = Some(StorePath::new(std::path::Path::new(value), store_dir)?);
|
|
||||||
} else if name == "NarSize" {
|
|
||||||
nar_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
|
|
||||||
} else if name == "References" {
|
|
||||||
if !value.is_empty() {
|
|
||||||
for r in value.split(' ') {
|
|
||||||
references.insert(StorePath::new_from_base_name(r)?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if name == "Deriver" {
|
|
||||||
deriver = Some(StorePath::new_from_base_name(value)?);
|
|
||||||
} else if name == "URL" {
|
|
||||||
url = Some(value.into());
|
|
||||||
} else if name == "Compression" {
|
|
||||||
compression = Some(value.into());
|
|
||||||
} else if name == "FileSize" {
|
|
||||||
file_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(PathInfo {
|
|
||||||
path: path.ok_or(Error::BadNarInfo)?,
|
|
||||||
references,
|
|
||||||
nar_size: nar_size.ok_or(Error::BadNarInfo)?,
|
|
||||||
deriver,
|
|
||||||
url: Some(url.ok_or(Error::BadNarInfo)?),
|
|
||||||
compression,
|
|
||||||
file_size,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
use super::{PathInfo, StorePath};
|
|
||||||
use crate::Error;
|
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub trait Store: Send + Sync {
|
|
||||||
fn store_dir(&self) -> &str {
|
|
||||||
"/nix/store"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn query_path_info(
|
|
||||||
&self,
|
|
||||||
store_path: &StorePath,
|
|
||||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<PathInfo, Error>> + Send>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl dyn Store {
|
|
||||||
pub fn parse_store_path(&self, path: &Path) -> Result<StorePath, Error> {
|
|
||||||
StorePath::new(path, self.store_dir())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn compute_path_closure(
|
|
||||||
&self,
|
|
||||||
roots: BTreeSet<StorePath>,
|
|
||||||
) -> Result<BTreeMap<StorePath, PathInfo>, Error> {
|
|
||||||
let mut done = BTreeSet::new();
|
|
||||||
let mut result = BTreeMap::new();
|
|
||||||
let mut pending = vec![];
|
|
||||||
|
|
||||||
for root in roots {
|
|
||||||
pending.push(self.query_path_info(&root));
|
|
||||||
done.insert(root);
|
|
||||||
}
|
|
||||||
|
|
||||||
while !pending.is_empty() {
|
|
||||||
let (info, _, remaining) = futures::future::select_all(pending).await;
|
|
||||||
pending = remaining;
|
|
||||||
|
|
||||||
let info = info?;
|
|
||||||
|
|
||||||
for path in &info.references {
|
|
||||||
if !done.contains(path) {
|
|
||||||
pending.push(self.query_path_info(&path));
|
|
||||||
done.insert(path.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.insert(info.path.clone(), info);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,160 +0,0 @@
|
||||||
use crate::error::Error;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
pub fn encoded_len(input_len: usize) -> usize {
|
|
||||||
if input_len == 0 {
|
|
||||||
0
|
|
||||||
} else {
|
|
||||||
(input_len * 8 - 1) / 5 + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decoded_len(input_len: usize) -> usize {
|
|
||||||
input_len * 5 / 8
|
|
||||||
}
|
|
||||||
|
|
||||||
static BASE32_CHARS: &[u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = {
|
|
||||||
let mut xs = [0xffu8; 256];
|
|
||||||
for (n, c) in BASE32_CHARS.iter().enumerate() {
|
|
||||||
xs[*c as usize] = n as u8;
|
|
||||||
}
|
|
||||||
Box::new(xs)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encode(input: &[u8]) -> String {
|
|
||||||
let mut buf = vec![0; encoded_len(input.len())];
|
|
||||||
encode_into(input, &mut buf);
|
|
||||||
std::str::from_utf8(&buf).unwrap().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encode_into(input: &[u8], output: &mut [u8]) {
|
|
||||||
let len = encoded_len(input.len());
|
|
||||||
assert_eq!(len, output.len());
|
|
||||||
|
|
||||||
let mut nr_bits_left: usize = 0;
|
|
||||||
let mut bits_left: u16 = 0;
|
|
||||||
let mut pos = len;
|
|
||||||
|
|
||||||
for b in input {
|
|
||||||
bits_left |= (*b as u16) << nr_bits_left;
|
|
||||||
nr_bits_left += 8;
|
|
||||||
while nr_bits_left > 5 {
|
|
||||||
output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
|
|
||||||
pos -= 1;
|
|
||||||
bits_left >>= 5;
|
|
||||||
nr_bits_left -= 5;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if nr_bits_left > 0 {
|
|
||||||
output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
|
|
||||||
pos -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(pos, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode(input: &str) -> Result<Vec<u8>, crate::Error> {
|
|
||||||
let mut res = Vec::with_capacity(decoded_len(input.len()));
|
|
||||||
|
|
||||||
let mut nr_bits_left: usize = 0;
|
|
||||||
let mut bits_left: u16 = 0;
|
|
||||||
|
|
||||||
for c in input.chars().rev() {
|
|
||||||
let b = BASE32_CHARS_REVERSE[c as usize];
|
|
||||||
if b == 0xff {
|
|
||||||
return Err(Error::BadBase32);
|
|
||||||
}
|
|
||||||
bits_left |= (b as u16) << nr_bits_left;
|
|
||||||
nr_bits_left += 5;
|
|
||||||
if nr_bits_left >= 8 {
|
|
||||||
res.push((bits_left & 0xff) as u8);
|
|
||||||
bits_left >>= 8;
|
|
||||||
nr_bits_left -= 8;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if nr_bits_left > 0 && bits_left != 0 {
|
|
||||||
return Err(Error::BadBase32);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use assert_matches::assert_matches;
|
|
||||||
use hex;
|
|
||||||
use proptest::proptest;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode() {
|
|
||||||
assert_eq!(encode(&[]), "");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(&hex::decode("0839703786356bca59b0f4a32987eb2e6de43ae8").unwrap()),
|
|
||||||
"x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(
|
|
||||||
&hex::decode("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
|
|
||||||
.unwrap()
|
|
||||||
),
|
|
||||||
"1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(
|
|
||||||
&hex::decode("ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
|
|
||||||
.unwrap()
|
|
||||||
),
|
|
||||||
"2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode() {
|
|
||||||
assert_eq!(hex::encode(decode("").unwrap()), "");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88").unwrap()),
|
|
||||||
"0839703786356bca59b0f4a32987eb2e6de43ae8"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s").unwrap()),
|
|
||||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx").unwrap()),
|
|
||||||
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
decode("xoxf8v9fxf3jk8zln1cwlsrmhqvp0f88"),
|
|
||||||
Err(Error::BadBase32)
|
|
||||||
);
|
|
||||||
assert_matches!(
|
|
||||||
decode("2b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"),
|
|
||||||
Err(Error::BadBase32)
|
|
||||||
);
|
|
||||||
assert_matches!(decode("2"), Err(Error::BadBase32));
|
|
||||||
assert_matches!(decode("2gs"), Err(Error::BadBase32));
|
|
||||||
assert_matches!(decode("2gs8"), Err(Error::BadBase32));
|
|
||||||
}
|
|
||||||
|
|
||||||
proptest! {
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn roundtrip(s: Vec<u8>) {
|
|
||||||
assert_eq!(s, decode(&encode(&s)).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
pub mod base32;
|
|
|
@ -576,18 +576,37 @@ create_directories() {
|
||||||
# since this bit is cross-platform:
|
# since this bit is cross-platform:
|
||||||
# - first try with `command -vp` to try and find
|
# - first try with `command -vp` to try and find
|
||||||
# chown in the usual places
|
# chown in the usual places
|
||||||
|
# * to work around some sort of deficiency in
|
||||||
|
# `command -p` in macOS bash 3.2, we also add
|
||||||
|
# PATH="$(getconf PATH 2>/dev/null)". As long as
|
||||||
|
# getconf is found, this should set a sane PATH
|
||||||
|
# which `command -p` in bash 3.2 appears to use.
|
||||||
|
# A bash with a properly-working `command -p`
|
||||||
|
# should ignore this hard-set PATH in favor of
|
||||||
|
# whatever it obtains internally. See
|
||||||
|
# github.com/NixOS/nix/issues/5768
|
||||||
# - fall back on `command -v` which would find
|
# - fall back on `command -v` which would find
|
||||||
# any chown on path
|
# any chown on path
|
||||||
# if we don't find one, the command is already
|
# if we don't find one, the command is already
|
||||||
# hiding behind || true, and the general state
|
# hiding behind || true, and the general state
|
||||||
# should be one the user can repair once they
|
# should be one the user can repair once they
|
||||||
# figure out where chown is...
|
# figure out where chown is...
|
||||||
local get_chr_own="$(command -vp chown)"
|
local get_chr_own="$(PATH="$(getconf PATH 2>/dev/null)" command -vp chown)"
|
||||||
if [[ -z "$get_chr_own" ]]; then
|
if [[ -z "$get_chr_own" ]]; then
|
||||||
get_chr_own="$(command -v chown)"
|
get_chr_own="$(command -v chown)"
|
||||||
fi
|
fi
|
||||||
_sudo "to take root ownership of existing Nix store files" \
|
|
||||||
"$get_chr_own" -R "root:$NIX_BUILD_GROUP_NAME" "$NIX_ROOT" || true
|
if [[ -z "$get_chr_own" ]]; then
|
||||||
|
reminder <<EOF
|
||||||
|
I wanted to take root ownership of existing Nix store files,
|
||||||
|
but I couldn't locate 'chown'. (You may need to fix your PATH.)
|
||||||
|
To manually change file ownership, you can run:
|
||||||
|
sudo chown -R 'root:$NIX_BUILD_GROUP_NAME' '$NIX_ROOT'
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
_sudo "to take root ownership of existing Nix store files" \
|
||||||
|
"$get_chr_own" -R "root:$NIX_BUILD_GROUP_NAME" "$NIX_ROOT" || true
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
_sudo "to make the basic directory structure of Nix (part 1)" \
|
_sudo "to make the basic directory structure of Nix (part 1)" \
|
||||||
install -dv -m 0755 /nix /nix/var /nix/var/log /nix/var/log/nix /nix/var/log/nix/drvs /nix/var/nix{,/db,/gcroots,/profiles,/temproots,/userpool} /nix/var/nix/{gcroots,profiles}/per-user
|
install -dv -m 0755 /nix /nix/var /nix/var/log /nix/var/log/nix /nix/var/log/nix/drvs /nix/var/nix{,/db,/gcroots,/profiles,/temproots,/userpool} /nix/var/nix/{gcroots,profiles}/per-user
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
|
||||||
src = ./.;
|
src = ./.;
|
||||||
}).shellNix
|
}).shellNix
|
||||||
|
|
|
@ -54,6 +54,36 @@ void StoreCommand::run()
|
||||||
run(getStore());
|
run(getStore());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CopyCommand::CopyCommand()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "from",
|
||||||
|
.description = "URL of the source Nix store.",
|
||||||
|
.labels = {"store-uri"},
|
||||||
|
.handler = {&srcUri},
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "to",
|
||||||
|
.description = "URL of the destination Nix store.",
|
||||||
|
.labels = {"store-uri"},
|
||||||
|
.handler = {&dstUri},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> CopyCommand::createStore()
|
||||||
|
{
|
||||||
|
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> CopyCommand::getDstStore()
|
||||||
|
{
|
||||||
|
if (srcUri.empty() && dstUri.empty())
|
||||||
|
throw UsageError("you must pass '--from' and/or '--to'");
|
||||||
|
|
||||||
|
return dstUri.empty() ? openStore() : openStore(dstUri);
|
||||||
|
}
|
||||||
|
|
||||||
EvalCommand::EvalCommand()
|
EvalCommand::EvalCommand()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
|
@ -65,16 +95,33 @@ EvalCommand::EvalCommand()
|
||||||
|
|
||||||
extern std::function<void(const Error * error, const Env & env, const Expr & expr)> debuggerHook;
|
extern std::function<void(const Error * error, const Env & env, const Expr & expr)> debuggerHook;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
EvalCommand::~EvalCommand()
|
||||||
|
{
|
||||||
|
if (evalState)
|
||||||
|
evalState->printStats();
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> EvalCommand::getEvalStore()
|
||||||
|
{
|
||||||
|
if (!evalStore)
|
||||||
|
evalStore = evalStoreUrl ? openStore(*evalStoreUrl) : getStore();
|
||||||
|
return ref<Store>(evalStore);
|
||||||
|
}
|
||||||
|
|
||||||
ref<EvalState> EvalCommand::getEvalState()
|
ref<EvalState> EvalCommand::getEvalState()
|
||||||
{
|
{
|
||||||
if (!evalState) {
|
if (!evalState) {
|
||||||
evalState =
|
evalState =
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
|
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
|
||||||
#else
|
searchPath, getEvalStore(), getStore())
|
||||||
|
#else
|
||||||
std::make_shared<EvalState>(
|
std::make_shared<EvalState>(
|
||||||
#endif
|
searchPath, getEvalStore(), getStore())
|
||||||
searchPath, getEvalStore(), getStore());
|
#endif
|
||||||
|
;
|
||||||
if (startReplOnEvalErrors)
|
if (startReplOnEvalErrors)
|
||||||
debuggerHook = [evalState{ref<EvalState>(evalState)}](const Error * error, const Env & env, const Expr & expr) {
|
debuggerHook = [evalState{ref<EvalState>(evalState)}](const Error * error, const Env & env, const Expr & expr) {
|
||||||
// clear the screen.
|
// clear the screen.
|
||||||
|
@ -113,19 +160,6 @@ ref<EvalState> EvalCommand::getEvalState()
|
||||||
return ref<EvalState>(evalState);
|
return ref<EvalState>(evalState);
|
||||||
}
|
}
|
||||||
|
|
||||||
EvalCommand::~EvalCommand()
|
|
||||||
{
|
|
||||||
if (evalState)
|
|
||||||
evalState->printStats();
|
|
||||||
}
|
|
||||||
|
|
||||||
ref<Store> EvalCommand::getEvalStore()
|
|
||||||
{
|
|
||||||
if (!evalStore)
|
|
||||||
evalStore = evalStoreUrl ? openStore(*evalStoreUrl) : getStore();
|
|
||||||
return ref<Store>(evalStore);
|
|
||||||
}
|
|
||||||
|
|
||||||
BuiltPathsCommand::BuiltPathsCommand(bool recursive)
|
BuiltPathsCommand::BuiltPathsCommand(bool recursive)
|
||||||
: recursive(recursive)
|
: recursive(recursive)
|
||||||
{
|
{
|
||||||
|
|
|
@ -43,6 +43,19 @@ private:
|
||||||
std::shared_ptr<Store> _store;
|
std::shared_ptr<Store> _store;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* A command that copies something between `--from` and `--to`
|
||||||
|
stores. */
|
||||||
|
struct CopyCommand : virtual StoreCommand
|
||||||
|
{
|
||||||
|
std::string srcUri, dstUri;
|
||||||
|
|
||||||
|
CopyCommand();
|
||||||
|
|
||||||
|
ref<Store> createStore() override;
|
||||||
|
|
||||||
|
ref<Store> getDstStore();
|
||||||
|
};
|
||||||
|
|
||||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||||
{
|
{
|
||||||
bool startReplOnEvalErrors = false;
|
bool startReplOnEvalErrors = false;
|
||||||
|
|
|
@ -198,8 +198,9 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
||||||
prefix_ = "";
|
prefix_ = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
|
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||||
state->forceValue(v1);
|
Value &v1(*v);
|
||||||
|
state->forceValue(v1, pos);
|
||||||
Value v2;
|
Value v2;
|
||||||
state->autoCallFunction(*autoArgs, v1, v2);
|
state->autoCallFunction(*autoArgs, v1, v2);
|
||||||
|
|
||||||
|
@ -345,6 +346,18 @@ Installable::getCursor(EvalState & state)
|
||||||
return cursors[0];
|
return cursors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static StorePath getDeriver(
|
||||||
|
ref<Store> store,
|
||||||
|
const Installable & i,
|
||||||
|
const StorePath & drvPath)
|
||||||
|
{
|
||||||
|
auto derivers = store->queryValidDerivers(drvPath);
|
||||||
|
if (derivers.empty())
|
||||||
|
throw Error("'%s' does not have a known deriver", i.what());
|
||||||
|
// FIXME: use all derivers?
|
||||||
|
return *derivers.begin();
|
||||||
|
}
|
||||||
|
|
||||||
struct InstallableStorePath : Installable
|
struct InstallableStorePath : Installable
|
||||||
{
|
{
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
|
@ -353,7 +366,7 @@ struct InstallableStorePath : Installable
|
||||||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||||
: store(store), storePath(std::move(storePath)) { }
|
: store(store), storePath(std::move(storePath)) { }
|
||||||
|
|
||||||
std::string what() override { return store->printStorePath(storePath); }
|
std::string what() const override { return store->printStorePath(storePath); }
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override
|
DerivedPaths toDerivedPaths() override
|
||||||
{
|
{
|
||||||
|
@ -374,6 +387,15 @@ struct InstallableStorePath : Installable
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePathSet toDrvPaths(ref<Store> store) override
|
||||||
|
{
|
||||||
|
if (storePath.isDerivation()) {
|
||||||
|
return {storePath};
|
||||||
|
} else {
|
||||||
|
return {getDeriver(store, *this, storePath)};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<StorePath> getStorePath() override
|
std::optional<StorePath> getStorePath() override
|
||||||
{
|
{
|
||||||
return storePath;
|
return storePath;
|
||||||
|
@ -402,6 +424,14 @@ DerivedPaths InstallableValue::toDerivedPaths()
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
||||||
|
{
|
||||||
|
StorePathSet res;
|
||||||
|
for (auto & drv : toDerivations())
|
||||||
|
res.insert(drv.drvPath);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
struct InstallableAttrPath : InstallableValue
|
struct InstallableAttrPath : InstallableValue
|
||||||
{
|
{
|
||||||
SourceExprCommand & cmd;
|
SourceExprCommand & cmd;
|
||||||
|
@ -412,12 +442,12 @@ struct InstallableAttrPath : InstallableValue
|
||||||
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
std::string what() override { return attrPath; }
|
std::string what() const override { return attrPath; }
|
||||||
|
|
||||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||||
{
|
{
|
||||||
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
||||||
state.forceValue(*vRes);
|
state.forceValue(*vRes, pos);
|
||||||
return {vRes, pos};
|
return {vRes, pos};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -467,7 +497,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
|
||||||
state.forceValue(*aOutputs->value);
|
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
||||||
|
|
||||||
return aOutputs->value;
|
return aOutputs->value;
|
||||||
}
|
}
|
||||||
|
@ -492,7 +522,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
auto vFlake = state.allocValue();
|
auto vFlake = state.allocValue();
|
||||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||||
|
|
||||||
state.forceAttrs(*vFlake);
|
state.forceAttrs(*vFlake, noPos);
|
||||||
|
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
@ -579,7 +609,7 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
|
||||||
for (auto & attrPath : getActualAttrPaths()) {
|
for (auto & attrPath : getActualAttrPaths()) {
|
||||||
try {
|
try {
|
||||||
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
||||||
state.forceValue(*v);
|
state.forceValue(*v, pos);
|
||||||
return {v, pos};
|
return {v, pos};
|
||||||
} catch (AttrPathNotFound & e) {
|
} catch (AttrPathNotFound & e) {
|
||||||
}
|
}
|
||||||
|
@ -836,11 +866,7 @@ StorePathSet toDerivations(
|
||||||
[&](const DerivedPath::Opaque & bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
if (!useDeriver)
|
if (!useDeriver)
|
||||||
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
||||||
auto derivers = store->queryValidDerivers(bo.path);
|
drvPaths.insert(getDeriver(store, *i, bo.path));
|
||||||
if (derivers.empty())
|
|
||||||
throw Error("'%s' does not have a known deriver", i->what());
|
|
||||||
// FIXME: use all derivers?
|
|
||||||
drvPaths.insert(*derivers.begin());
|
|
||||||
},
|
},
|
||||||
[&](const DerivedPath::Built & bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
drvPaths.insert(bfd.drvPath);
|
drvPaths.insert(bfd.drvPath);
|
||||||
|
|
|
@ -33,10 +33,15 @@ struct Installable
|
||||||
{
|
{
|
||||||
virtual ~Installable() { }
|
virtual ~Installable() { }
|
||||||
|
|
||||||
virtual std::string what() = 0;
|
virtual std::string what() const = 0;
|
||||||
|
|
||||||
virtual DerivedPaths toDerivedPaths() = 0;
|
virtual DerivedPaths toDerivedPaths() = 0;
|
||||||
|
|
||||||
|
virtual StorePathSet toDrvPaths(ref<Store> store)
|
||||||
|
{
|
||||||
|
throw Error("'%s' cannot be converted to a derivation path", what());
|
||||||
|
}
|
||||||
|
|
||||||
DerivedPath toDerivedPath();
|
DerivedPath toDerivedPath();
|
||||||
|
|
||||||
UnresolvedApp toApp(EvalState & state);
|
UnresolvedApp toApp(EvalState & state);
|
||||||
|
@ -81,6 +86,8 @@ struct InstallableValue : Installable
|
||||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override;
|
DerivedPaths toDerivedPaths() override;
|
||||||
|
|
||||||
|
StorePathSet toDrvPaths(ref<Store> store) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct InstallableFlake : InstallableValue
|
struct InstallableFlake : InstallableValue
|
||||||
|
@ -99,7 +106,7 @@ struct InstallableFlake : InstallableValue
|
||||||
Strings && prefixes,
|
Strings && prefixes,
|
||||||
const flake::LockFlags & lockFlags);
|
const flake::LockFlags & lockFlags);
|
||||||
|
|
||||||
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||||
|
|
||||||
std::vector<std::string> getActualAttrPaths();
|
std::vector<std::string> getActualAttrPaths();
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,9 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||||
|
|
||||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
|
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -I src/nix
|
||||||
|
|
||||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread
|
|
||||||
# libcmd_LDFLAGS += -llowdown -pthread
|
# libcmd_LDFLAGS += -llowdown -pthread
|
||||||
|
# libcmd_LDFLAGS = $(EDITLINE_LIBS) -llowdown -pthread
|
||||||
|
libcmd_LDFLAGS += $(LOWDOWN_LIBS) -pthread
|
||||||
|
|
||||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers libnix
|
libcmd_LIBS = libstore libutil libexpr libmain libfetchers libnix
|
||||||
|
|
||||||
|
|
|
@ -347,7 +347,7 @@ StringSet NixRepl::completePrefix(string prefix)
|
||||||
Expr * e = parseString(expr);
|
Expr * e = parseString(expr);
|
||||||
Value v;
|
Value v;
|
||||||
e->eval(*state, *env, v);
|
e->eval(*state, *env, v);
|
||||||
state->forceAttrs(v);
|
state->forceAttrs(v, noPos);
|
||||||
|
|
||||||
for (auto & i : *v.attrs) {
|
for (auto & i : *v.attrs) {
|
||||||
string name = i.name;
|
string name = i.name;
|
||||||
|
@ -521,7 +521,7 @@ bool NixRepl::processLine(string line)
|
||||||
if (v.type() == nPath || v.type() == nString) {
|
if (v.type() == nPath || v.type() == nString) {
|
||||||
PathSet context;
|
PathSet context;
|
||||||
auto filename = state->coerceToString(noPos, v, context);
|
auto filename = state->coerceToString(noPos, v, context);
|
||||||
pos.file = state->symbols.create(filename);
|
pos.file = state->symbols.create(*filename);
|
||||||
} else if (v.isLambda()) {
|
} else if (v.isLambda()) {
|
||||||
pos = v.lambda.fun->pos;
|
pos = v.lambda.fun->pos;
|
||||||
} else {
|
} else {
|
||||||
|
@ -737,7 +737,7 @@ void NixRepl::loadFiles()
|
||||||
|
|
||||||
void NixRepl::addAttrsToScope(Value & attrs)
|
void NixRepl::addAttrsToScope(Value & attrs)
|
||||||
{
|
{
|
||||||
state->forceAttrs(attrs);
|
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); });
|
||||||
if (displ + attrs.attrs->size() >= envSize)
|
if (displ + attrs.attrs->size() >= envSize)
|
||||||
throw Error("environment full; cannot add more variables");
|
throw Error("environment full; cannot add more variables");
|
||||||
|
|
||||||
|
@ -766,7 +766,7 @@ void NixRepl::addVarToScope(const Symbol & name, Value & v)
|
||||||
|
|
||||||
Expr * NixRepl::parseString(string s)
|
Expr * NixRepl::parseString(string s)
|
||||||
{
|
{
|
||||||
Expr * e = state->parseExprFromString(s, curDir, staticEnv);
|
Expr * e = state->parseExprFromString(std::move(s), curDir, staticEnv);
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -775,7 +775,7 @@ void NixRepl::evalString(string s, Value & v)
|
||||||
{
|
{
|
||||||
Expr * e = parseString(s);
|
Expr * e = parseString(s);
|
||||||
e->eval(*state, *env, v);
|
e->eval(*state, *env, v);
|
||||||
state->forceValue(v);
|
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -805,7 +805,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
|
||||||
str.flush();
|
str.flush();
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
state->forceValue(v);
|
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
|
|
||||||
switch (v.type()) {
|
switch (v.type()) {
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
||||||
Value * vNew = state.allocValue();
|
Value * vNew = state.allocValue();
|
||||||
state.autoCallFunction(autoArgs, *v, *vNew);
|
state.autoCallFunction(autoArgs, *v, *vNew);
|
||||||
v = vNew;
|
v = vNew;
|
||||||
state.forceValue(*v);
|
state.forceValue(*v, noPos);
|
||||||
|
|
||||||
/* It should evaluate to either a set or an expression,
|
/* It should evaluate to either a set or an expression,
|
||||||
according to what is specified in the attrPath. */
|
according to what is specified in the attrPath. */
|
||||||
|
@ -121,7 +121,7 @@ Pos findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||||
std::string filename(pos, 0, colon);
|
std::string filename(pos, 0, colon);
|
||||||
unsigned int lineno;
|
unsigned int lineno;
|
||||||
try {
|
try {
|
||||||
lineno = std::stoi(std::string(pos, colon + 1));
|
lineno = std::stoi(std::string(pos, colon + 1, string::npos));
|
||||||
} catch (std::invalid_argument & e) {
|
} catch (std::invalid_argument & e) {
|
||||||
throw ParseError("cannot parse line number '%s'", pos);
|
throw ParseError("cannot parse line number '%s'", pos);
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,26 +7,19 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* Allocate a new array of attributes for an attribute set with a specific
|
/* Allocate a new array of attributes for an attribute set with a specific
|
||||||
capacity. The space is implicitly reserved after the Bindings
|
capacity. The space is implicitly reserved after the Bindings
|
||||||
structure. */
|
structure. */
|
||||||
Bindings * EvalState::allocBindings(size_t capacity)
|
Bindings * EvalState::allocBindings(size_t capacity)
|
||||||
{
|
{
|
||||||
|
if (capacity == 0)
|
||||||
|
return &emptyBindings;
|
||||||
if (capacity > std::numeric_limits<Bindings::size_t>::max())
|
if (capacity > std::numeric_limits<Bindings::size_t>::max())
|
||||||
throw Error("attribute set of size %d is too big", capacity);
|
throw Error("attribute set of size %d is too big", capacity);
|
||||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void EvalState::mkAttrs(Value & v, size_t capacity)
|
|
||||||
{
|
|
||||||
if (capacity == 0) {
|
|
||||||
v = vEmptySet;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
v.mkAttrs(allocBindings(capacity));
|
|
||||||
nrAttrsets++;
|
nrAttrsets++;
|
||||||
nrAttrsInAttrsets += capacity;
|
nrAttrsInAttrsets += capacity;
|
||||||
|
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -41,15 +34,36 @@ Value * EvalState::allocAttr(Value & vAttrs, const Symbol & name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Value * EvalState::allocAttr(Value & vAttrs, const std::string & name)
|
Value * EvalState::allocAttr(Value & vAttrs, std::string_view name)
|
||||||
{
|
{
|
||||||
return allocAttr(vAttrs, symbols.create(name));
|
return allocAttr(vAttrs, symbols.create(name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Value & BindingsBuilder::alloc(const Symbol & name, ptr<Pos> pos)
|
||||||
|
{
|
||||||
|
auto value = state.allocValue();
|
||||||
|
bindings->push_back(Attr(name, value, pos));
|
||||||
|
return *value;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Value & BindingsBuilder::alloc(std::string_view name, ptr<Pos> pos)
|
||||||
|
{
|
||||||
|
return alloc(state.symbols.create(name), pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Bindings::sort()
|
void Bindings::sort()
|
||||||
{
|
{
|
||||||
std::sort(begin(), end());
|
if (size_) std::sort(begin(), end());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Value & Value::mkAttrs(BindingsBuilder & bindings)
|
||||||
|
{
|
||||||
|
mkAttrs(bindings.finish());
|
||||||
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -113,5 +113,52 @@ public:
|
||||||
friend class EvalState;
|
friend class EvalState;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* A wrapper around Bindings that ensures that its always in sorted
|
||||||
|
order at the end. The only way to consume a BindingsBuilder is to
|
||||||
|
call finish(), which sorts the bindings. */
|
||||||
|
class BindingsBuilder
|
||||||
|
{
|
||||||
|
Bindings * bindings;
|
||||||
|
|
||||||
|
public:
|
||||||
|
// needed by std::back_inserter
|
||||||
|
using value_type = Attr;
|
||||||
|
|
||||||
|
EvalState & state;
|
||||||
|
|
||||||
|
BindingsBuilder(EvalState & state, Bindings * bindings)
|
||||||
|
: bindings(bindings), state(state)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
void insert(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
|
||||||
|
{
|
||||||
|
insert(Attr(name, value, pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
void insert(const Attr & attr)
|
||||||
|
{
|
||||||
|
push_back(attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
void push_back(const Attr & attr)
|
||||||
|
{
|
||||||
|
bindings->push_back(attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
Value & alloc(const Symbol & name, ptr<Pos> pos = ptr(&noPos));
|
||||||
|
|
||||||
|
Value & alloc(std::string_view name, ptr<Pos> pos = ptr(&noPos));
|
||||||
|
|
||||||
|
Bindings * finish()
|
||||||
|
{
|
||||||
|
bindings->sort();
|
||||||
|
return bindings;
|
||||||
|
}
|
||||||
|
|
||||||
|
Bindings * alreadySorted()
|
||||||
|
{
|
||||||
|
return bindings;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,17 +73,16 @@ MixEvalArgs::MixEvalArgs()
|
||||||
|
|
||||||
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
{
|
{
|
||||||
Bindings * res = state.allocBindings(autoArgs.size());
|
auto res = state.buildBindings(autoArgs.size());
|
||||||
for (auto & i : autoArgs) {
|
for (auto & i : autoArgs) {
|
||||||
Value * v = state.allocValue();
|
auto v = state.allocValue();
|
||||||
if (i.second[0] == 'E')
|
if (i.second[0] == 'E')
|
||||||
state.mkThunk_(*v, state.parseExprFromString(string(i.second, 1), absPath(".")));
|
state.mkThunk_(*v, state.parseExprFromString(string(i.second, 1), absPath(".")));
|
||||||
else
|
else
|
||||||
mkString(*v, string(i.second, 1));
|
v->mkString(((std::string_view) i.second).substr(1));
|
||||||
res->push_back(Attr(state.symbols.create(i.first), v));
|
res.insert(state.symbols.create(i.first), v);
|
||||||
}
|
}
|
||||||
res->sort();
|
return res.finish();
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Path lookupFileArg(EvalState & state, string s)
|
Path lookupFileArg(EvalState & state, string s)
|
||||||
|
|
|
@ -336,7 +336,7 @@ Value & AttrCursor::getValue()
|
||||||
if (!_value) {
|
if (!_value) {
|
||||||
if (parent) {
|
if (parent) {
|
||||||
auto & vParent = parent->first->getValue();
|
auto & vParent = parent->first->getValue();
|
||||||
root->state.forceAttrs(vParent);
|
root->state.forceAttrs(vParent, noPos);
|
||||||
auto attr = vParent.attrs->get(parent->second);
|
auto attr = vParent.attrs->get(parent->second);
|
||||||
if (!attr)
|
if (!attr)
|
||||||
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||||
|
@ -381,7 +381,7 @@ Value & AttrCursor::forceValue()
|
||||||
auto & v = getValue();
|
auto & v = getValue();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
root->state.forceValue(v);
|
root->state.forceValue(v, noPos);
|
||||||
} catch (EvalError &) {
|
} catch (EvalError &) {
|
||||||
debug("setting '%s' to failed", getAttrPathStr());
|
debug("setting '%s' to failed", getAttrPathStr());
|
||||||
if (root->db)
|
if (root->db)
|
||||||
|
|
|
@ -15,12 +15,6 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
|
|
||||||
{
|
|
||||||
throw TypeError(s, showType(v));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
|
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
|
||||||
{
|
{
|
||||||
throw TypeError({
|
throw TypeError({
|
||||||
|
@ -31,6 +25,13 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
|
||||||
|
|
||||||
|
|
||||||
void EvalState::forceValue(Value & v, const Pos & pos)
|
void EvalState::forceValue(Value & v, const Pos & pos)
|
||||||
|
{
|
||||||
|
forceValue(v, [&]() { return pos; });
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<typename Callable>
|
||||||
|
void EvalState::forceValue(Value & v, Callable getPos)
|
||||||
{
|
{
|
||||||
if (v.isThunk()) {
|
if (v.isThunk()) {
|
||||||
Env * env = v.thunk.env;
|
Env * env = v.thunk.env;
|
||||||
|
@ -47,31 +48,22 @@ void EvalState::forceValue(Value & v, const Pos & pos)
|
||||||
else if (v.isApp())
|
else if (v.isApp())
|
||||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
callFunction(*v.app.left, *v.app.right, v, noPos);
|
||||||
else if (v.isBlackhole())
|
else if (v.isBlackhole())
|
||||||
throwEvalError(pos, "infinite recursion encountered");
|
throwEvalError(getPos(), "infinite recursion encountered");
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline void EvalState::forceAttrs(Value & v)
|
|
||||||
{
|
|
||||||
forceValue(v);
|
|
||||||
if (v.type() != nAttrs)
|
|
||||||
throwTypeError("value is %1% while a set was expected", v);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
|
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceAttrs(v, [&]() { return pos; });
|
||||||
if (v.type() != nAttrs)
|
|
||||||
throwTypeError(pos, "value is %1% while a set was expected", v);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline void EvalState::forceList(Value & v)
|
template <typename Callable>
|
||||||
|
inline void EvalState::forceAttrs(Value & v, Callable getPos)
|
||||||
{
|
{
|
||||||
forceValue(v);
|
forceValue(v, getPos);
|
||||||
if (!v.isList())
|
if (v.type() != nAttrs)
|
||||||
throwTypeError("value is %1% while a list was expected", v);
|
throwTypeError(getPos(), "value is %1% while a set was expected", v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
|
#include "types.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
@ -38,6 +39,19 @@ namespace nix {
|
||||||
|
|
||||||
std::function<void(const Error * error, const Env & env, const Expr & expr)> debuggerHook;
|
std::function<void(const Error * error, const Env & env, const Expr & expr)> debuggerHook;
|
||||||
|
|
||||||
|
static char * allocString(size_t size)
|
||||||
|
{
|
||||||
|
char * t;
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
t = (char *) GC_MALLOC_ATOMIC(size);
|
||||||
|
#else
|
||||||
|
t = malloc(size);
|
||||||
|
#endif
|
||||||
|
if (!t) throw std::bad_alloc();
|
||||||
|
return t;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
static char * dupString(const char * s)
|
static char * dupString(const char * s)
|
||||||
{
|
{
|
||||||
char * t;
|
char * t;
|
||||||
|
@ -147,7 +161,7 @@ void printValue(std::ostream & str, std::set<const Value *> & active, const Valu
|
||||||
str << v.fpoint;
|
str << v.fpoint;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw Error("invalid value");
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
active.erase(&v);
|
active.erase(&v);
|
||||||
|
@ -207,7 +221,7 @@ string showType(const Value & v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Pos Value::determinePos(const Pos &pos) const
|
Pos Value::determinePos(const Pos & pos) const
|
||||||
{
|
{
|
||||||
switch (internalType) {
|
switch (internalType) {
|
||||||
case tAttrs: return *attrs->pos;
|
case tAttrs: return *attrs->pos;
|
||||||
|
@ -414,11 +428,22 @@ EvalState::EvalState(
|
||||||
, sDescription(symbols.create("description"))
|
, sDescription(symbols.create("description"))
|
||||||
, sSelf(symbols.create("self"))
|
, sSelf(symbols.create("self"))
|
||||||
, sEpsilon(symbols.create(""))
|
, sEpsilon(symbols.create(""))
|
||||||
|
, sStartSet(symbols.create("startSet"))
|
||||||
|
, sOperator(symbols.create("operator"))
|
||||||
|
, sKey(symbols.create("key"))
|
||||||
|
, sPath(symbols.create("path"))
|
||||||
|
, sPrefix(symbols.create("prefix"))
|
||||||
, repair(NoRepair)
|
, repair(NoRepair)
|
||||||
|
, emptyBindings(0)
|
||||||
, store(store)
|
, store(store)
|
||||||
, buildStore(buildStore ? buildStore : store)
|
, buildStore(buildStore ? buildStore : store)
|
||||||
, debugStop(true)
|
, debugStop(true)
|
||||||
, regexCache(makeRegexCache())
|
, regexCache(makeRegexCache())
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
, valueAllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
|
||||||
|
#else
|
||||||
|
, valueAllocCache(std::make_shared<void *>(nullptr))
|
||||||
|
#endif
|
||||||
, baseEnv(allocEnv(128))
|
, baseEnv(allocEnv(128))
|
||||||
, staticBaseEnv(new StaticEnv(false, 0))
|
, staticBaseEnv(new StaticEnv(false, 0))
|
||||||
{
|
{
|
||||||
|
@ -457,8 +482,6 @@ EvalState::EvalState(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
vEmptySet.mkAttrs(allocBindings(0));
|
|
||||||
|
|
||||||
createBaseEnv();
|
createBaseEnv();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -616,7 +639,7 @@ Value * EvalState::addPrimOp(const string & name,
|
||||||
auto vPrimOp = allocValue();
|
auto vPrimOp = allocValue();
|
||||||
vPrimOp->mkPrimOp(new PrimOp { .fun = primOp, .arity = 1, .name = sym });
|
vPrimOp->mkPrimOp(new PrimOp { .fun = primOp, .arity = 1, .name = sym });
|
||||||
Value v;
|
Value v;
|
||||||
mkApp(v, *vPrimOp, *vPrimOp);
|
v.mkApp(vPrimOp, vPrimOp);
|
||||||
return addConstant(name, v);
|
return addConstant(name, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -638,7 +661,7 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
|
||||||
auto vPrimOp = allocValue();
|
auto vPrimOp = allocValue();
|
||||||
vPrimOp->mkPrimOp(new PrimOp(std::move(primOp)));
|
vPrimOp->mkPrimOp(new PrimOp(std::move(primOp)));
|
||||||
Value v;
|
Value v;
|
||||||
mkApp(v, *vPrimOp, *vPrimOp);
|
v.mkApp(vPrimOp, vPrimOp);
|
||||||
return addConstant(primOp.name, v);
|
return addConstant(primOp.name, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -863,6 +886,13 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v, Env & env, Expr *expr))
|
||||||
|
// {
|
||||||
|
// auto error = TypeError({
|
||||||
|
// .msg = hintfmt(s, showType(v))
|
||||||
|
// .errPos = e ;
|
||||||
|
// }
|
||||||
|
|
||||||
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1, Env & env, Expr *expr))
|
LocalNoInlineNoReturn(void throwAssertionError(const Pos & pos, const char * s, const string & s1, Env & env, Expr *expr))
|
||||||
{
|
{
|
||||||
auto error = AssertionError({
|
auto error = AssertionError({
|
||||||
|
@ -935,14 +965,14 @@ DebugTraceStacker::DebugTraceStacker(EvalState &evalState, DebugTrace t)
|
||||||
debuggerHook(0, t.env, t.expr);
|
debuggerHook(0, t.env, t.expr);
|
||||||
}
|
}
|
||||||
|
|
||||||
void mkString(Value & v, const char * s)
|
void Value::mkString(std::string_view s)
|
||||||
{
|
{
|
||||||
v.mkString(dupString(s));
|
mkString(dupStringWithLen(s.data(), s.size()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Value & mkString(Value & v, std::string_view s, const PathSet & context)
|
|
||||||
|
static void copyContextToValue(Value & v, const PathSet & context)
|
||||||
{
|
{
|
||||||
v.mkString(dupStringWithLen(s.data(), s.size()));
|
|
||||||
if (!context.empty()) {
|
if (!context.empty()) {
|
||||||
size_t n = 0;
|
size_t n = 0;
|
||||||
v.string.context = (const char * *)
|
v.string.context = (const char * *)
|
||||||
|
@ -951,13 +981,24 @@ Value & mkString(Value & v, std::string_view s, const PathSet & context)
|
||||||
v.string.context[n++] = dupString(i.c_str());
|
v.string.context[n++] = dupString(i.c_str());
|
||||||
v.string.context[n] = 0;
|
v.string.context[n] = 0;
|
||||||
}
|
}
|
||||||
return v;
|
}
|
||||||
|
|
||||||
|
void Value::mkString(std::string_view s, const PathSet & context)
|
||||||
|
{
|
||||||
|
mkString(s);
|
||||||
|
copyContextToValue(*this, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Value::mkStringMove(const char * s, const PathSet & context)
|
||||||
|
{
|
||||||
|
mkString(s);
|
||||||
|
copyContextToValue(*this, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void mkPath(Value & v, const char * s)
|
void Value::mkPath(std::string_view s)
|
||||||
{
|
{
|
||||||
v.mkPath(dupString(s));
|
mkPath(dupStringWithLen(s.data(), s.size()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -994,15 +1035,15 @@ Value * EvalState::allocValue()
|
||||||
GC_malloc_many returns a linked list of objects of the given size, where the first word
|
GC_malloc_many returns a linked list of objects of the given size, where the first word
|
||||||
of each object is also the pointer to the next object in the list. This also means that we
|
of each object is also the pointer to the next object in the list. This also means that we
|
||||||
have to explicitly clear the first word of every object we take. */
|
have to explicitly clear the first word of every object we take. */
|
||||||
if (!valueAllocCache) {
|
if (!*valueAllocCache) {
|
||||||
valueAllocCache = GC_malloc_many(sizeof(Value));
|
*valueAllocCache = GC_malloc_many(sizeof(Value));
|
||||||
if (!valueAllocCache) throw std::bad_alloc();
|
if (!*valueAllocCache) throw std::bad_alloc();
|
||||||
}
|
}
|
||||||
|
|
||||||
/* GC_NEXT is a convenience macro for accessing the first word of an object.
|
/* GC_NEXT is a convenience macro for accessing the first word of an object.
|
||||||
Take the first list item, advance the list to the next item, and clear the next pointer. */
|
Take the first list item, advance the list to the next item, and clear the next pointer. */
|
||||||
void * p = valueAllocCache;
|
void * p = *valueAllocCache;
|
||||||
GC_PTR_STORE_AND_DIRTY(&valueAllocCache, GC_NEXT(p));
|
GC_PTR_STORE_AND_DIRTY(&*valueAllocCache, GC_NEXT(p));
|
||||||
GC_NEXT(p) = nullptr;
|
GC_NEXT(p) = nullptr;
|
||||||
|
|
||||||
nrValues++;
|
nrValues++;
|
||||||
|
@ -1061,13 +1102,13 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
|
||||||
void EvalState::mkPos(Value & v, ptr<Pos> pos)
|
void EvalState::mkPos(Value & v, ptr<Pos> pos)
|
||||||
{
|
{
|
||||||
if (pos->file.set()) {
|
if (pos->file.set()) {
|
||||||
mkAttrs(v, 3);
|
auto attrs = buildBindings(3);
|
||||||
mkString(*allocAttr(v, sFile), pos->file);
|
attrs.alloc(sFile).mkString(pos->file);
|
||||||
mkInt(*allocAttr(v, sLine), pos->line);
|
attrs.alloc(sLine).mkInt(pos->line);
|
||||||
mkInt(*allocAttr(v, sColumn), pos->column);
|
attrs.alloc(sColumn).mkInt(pos->column);
|
||||||
v.attrs->sort();
|
v.mkAttrs(attrs);
|
||||||
} else
|
} else
|
||||||
mkNull(v);
|
v.mkNull();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1256,8 +1297,8 @@ void ExprPath::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
state.mkAttrs(v, attrs.size() + dynamicAttrs.size());
|
v.mkAttrs(state.buildBindings(attrs.size() + dynamicAttrs.size()).finish());
|
||||||
Env *dynamicEnv = &env;
|
auto dynamicEnv = &env;
|
||||||
|
|
||||||
if (recursive) {
|
if (recursive) {
|
||||||
/* Create a new environment that contains the attributes in
|
/* Create a new environment that contains the attributes in
|
||||||
|
@ -1294,7 +1335,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||||
Hence we need __overrides.) */
|
Hence we need __overrides.) */
|
||||||
if (hasOverrides) {
|
if (hasOverrides) {
|
||||||
Value * vOverrides = (*v.attrs)[overrides->second.displ].value;
|
Value * vOverrides = (*v.attrs)[overrides->second.displ].value;
|
||||||
state.forceAttrs(*vOverrides);
|
state.forceAttrs(*vOverrides, [&]() { return vOverrides->determinePos(noPos); });
|
||||||
Bindings * newBnds = state.allocBindings(v.attrs->capacity() + vOverrides->attrs->size());
|
Bindings * newBnds = state.allocBindings(v.attrs->capacity() + vOverrides->attrs->size());
|
||||||
for (auto & i : *v.attrs)
|
for (auto & i : *v.attrs)
|
||||||
newBnds->push_back(i);
|
newBnds->push_back(i);
|
||||||
|
@ -1453,20 +1494,20 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
|
||||||
e->eval(state, env, vTmp);
|
e->eval(state, env, vTmp);
|
||||||
|
|
||||||
for (auto & i : attrPath) {
|
for (auto & i : attrPath) {
|
||||||
state.forceValue(*vAttrs);
|
state.forceValue(*vAttrs, noPos);
|
||||||
Bindings::iterator j;
|
Bindings::iterator j;
|
||||||
Symbol name = getName(i, state, env);
|
Symbol name = getName(i, state, env);
|
||||||
if (vAttrs->type() != nAttrs ||
|
if (vAttrs->type() != nAttrs ||
|
||||||
(j = vAttrs->attrs->find(name)) == vAttrs->attrs->end())
|
(j = vAttrs->attrs->find(name)) == vAttrs->attrs->end())
|
||||||
{
|
{
|
||||||
mkBool(v, false);
|
v.mkBool(false);
|
||||||
return;
|
return;
|
||||||
} else {
|
} else {
|
||||||
vAttrs = j->value;
|
vAttrs = j->value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mkBool(v, true);
|
v.mkBool(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1540,7 +1581,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
/* Nope, so show the first unexpected argument to the
|
/* Nope, so show the first unexpected argument to the
|
||||||
user. */
|
user. */
|
||||||
for (auto & i : *args[0]->attrs)
|
for (auto & i : *args[0]->attrs)
|
||||||
if (lambda.formals->argNames.find(i.name) == lambda.formals->argNames.end())
|
if (!lambda.formals->has(i.name))
|
||||||
throwTypeError(pos, "%1% called with unexpected argument '%2%'",
|
throwTypeError(pos, "%1% called with unexpected argument '%2%'",
|
||||||
lambda, i.name, *fun.lambda.env, &lambda);
|
lambda, i.name, *fun.lambda.env, &lambda);
|
||||||
abort(); // can't happen
|
abort(); // can't happen
|
||||||
|
@ -1676,14 +1717,16 @@ void EvalState::incrFunctionCall(ExprLambda * fun)
|
||||||
|
|
||||||
void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||||
{
|
{
|
||||||
forceValue(fun);
|
auto pos = fun.determinePos(noPos);
|
||||||
|
|
||||||
|
forceValue(fun, pos);
|
||||||
|
|
||||||
if (fun.type() == nAttrs) {
|
if (fun.type() == nAttrs) {
|
||||||
auto found = fun.attrs->find(sFunctor);
|
auto found = fun.attrs->find(sFunctor);
|
||||||
if (found != fun.attrs->end()) {
|
if (found != fun.attrs->end()) {
|
||||||
Value * v = allocValue();
|
Value * v = allocValue();
|
||||||
callFunction(*found->value, fun, *v, noPos);
|
callFunction(*found->value, fun, *v, pos);
|
||||||
forceValue(*v);
|
forceValue(*v, pos);
|
||||||
return autoCallFunction(args, *v, res);
|
return autoCallFunction(args, *v, res);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1693,22 +1736,20 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
Value * actualArgs = allocValue();
|
auto attrs = buildBindings(std::max(static_cast<uint32_t>(fun.lambda.fun->formals->formals.size()), args.size()));
|
||||||
mkAttrs(*actualArgs, std::max(static_cast<uint32_t>(fun.lambda.fun->formals->formals.size()), args.size()));
|
|
||||||
|
|
||||||
if (fun.lambda.fun->formals->ellipsis) {
|
if (fun.lambda.fun->formals->ellipsis) {
|
||||||
// If the formals have an ellipsis (eg the function accepts extra args) pass
|
// If the formals have an ellipsis (eg the function accepts extra args) pass
|
||||||
// all available automatic arguments (which includes arguments specified on
|
// all available automatic arguments (which includes arguments specified on
|
||||||
// the command line via --arg/--argstr)
|
// the command line via --arg/--argstr)
|
||||||
for (auto& v : args) {
|
for (auto & v : args)
|
||||||
actualArgs->attrs->push_back(v);
|
attrs.insert(v);
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, only pass the arguments that the function accepts
|
// Otherwise, only pass the arguments that the function accepts
|
||||||
for (auto & i : fun.lambda.fun->formals->formals) {
|
for (auto & i : fun.lambda.fun->formals->formals) {
|
||||||
Bindings::iterator j = args.find(i.name);
|
Bindings::iterator j = args.find(i.name);
|
||||||
if (j != args.end()) {
|
if (j != args.end()) {
|
||||||
actualArgs->attrs->push_back(*j);
|
attrs.insert(*j);
|
||||||
} else if (!i.def) {
|
} else if (!i.def) {
|
||||||
throwMissingArgumentError(i.pos, R"(cannot evaluate a function that has an argument without a value ('%1%')
|
throwMissingArgumentError(i.pos, R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||||
|
|
||||||
|
@ -1722,9 +1763,7 @@ https://nixos.org/manual/nix/stable/#ss-functions.)",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
actualArgs->attrs->sort();
|
callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos);
|
||||||
|
|
||||||
callFunction(fun, *actualArgs, res, noPos);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1759,7 +1798,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
mkBool(v, !state.evalBool(env, e));
|
v.mkBool(!state.evalBool(env, e));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1767,7 +1806,7 @@ void ExprOpEq::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
Value v1; e1->eval(state, env, v1);
|
Value v1; e1->eval(state, env, v1);
|
||||||
Value v2; e2->eval(state, env, v2);
|
Value v2; e2->eval(state, env, v2);
|
||||||
mkBool(v, state.eqValues(v1, v2));
|
v.mkBool(state.eqValues(v1, v2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1775,25 +1814,25 @@ void ExprOpNEq::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
Value v1; e1->eval(state, env, v1);
|
Value v1; e1->eval(state, env, v1);
|
||||||
Value v2; e2->eval(state, env, v2);
|
Value v2; e2->eval(state, env, v2);
|
||||||
mkBool(v, !state.eqValues(v1, v2));
|
v.mkBool(!state.eqValues(v1, v2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void ExprOpAnd::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpAnd::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
mkBool(v, state.evalBool(env, e1, pos) && state.evalBool(env, e2, pos));
|
v.mkBool(state.evalBool(env, e1, pos) && state.evalBool(env, e2, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void ExprOpOr::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpOr::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
mkBool(v, state.evalBool(env, e1, pos) || state.evalBool(env, e2, pos));
|
v.mkBool(state.evalBool(env, e1, pos) || state.evalBool(env, e2, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void ExprOpImpl::eval(EvalState & state, Env & env, Value & v)
|
void ExprOpImpl::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
mkBool(v, !state.evalBool(env, e1, pos) || state.evalBool(env, e2, pos));
|
v.mkBool(!state.evalBool(env, e1, pos) || state.evalBool(env, e2, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1808,7 +1847,7 @@ void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v)
|
||||||
if (v1.attrs->size() == 0) { v = v2; return; }
|
if (v1.attrs->size() == 0) { v = v2; return; }
|
||||||
if (v2.attrs->size() == 0) { v = v1; return; }
|
if (v2.attrs->size() == 0) { v = v1; return; }
|
||||||
|
|
||||||
state.mkAttrs(v, v1.attrs->size() + v2.attrs->size());
|
auto attrs = state.buildBindings(v1.attrs->size() + v2.attrs->size());
|
||||||
|
|
||||||
/* Merge the sets, preferring values from the second set. Make
|
/* Merge the sets, preferring values from the second set. Make
|
||||||
sure to keep the resulting vector in sorted order. */
|
sure to keep the resulting vector in sorted order. */
|
||||||
|
@ -1817,17 +1856,19 @@ void ExprOpUpdate::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
while (i != v1.attrs->end() && j != v2.attrs->end()) {
|
while (i != v1.attrs->end() && j != v2.attrs->end()) {
|
||||||
if (i->name == j->name) {
|
if (i->name == j->name) {
|
||||||
v.attrs->push_back(*j);
|
attrs.insert(*j);
|
||||||
++i; ++j;
|
++i; ++j;
|
||||||
}
|
}
|
||||||
else if (i->name < j->name)
|
else if (i->name < j->name)
|
||||||
v.attrs->push_back(*i++);
|
attrs.insert(*i++);
|
||||||
else
|
else
|
||||||
v.attrs->push_back(*j++);
|
attrs.insert(*j++);
|
||||||
}
|
}
|
||||||
|
|
||||||
while (i != v1.attrs->end()) v.attrs->push_back(*i++);
|
while (i != v1.attrs->end()) attrs.insert(*i++);
|
||||||
while (j != v2.attrs->end()) v.attrs->push_back(*j++);
|
while (j != v2.attrs->end()) attrs.insert(*j++);
|
||||||
|
|
||||||
|
v.mkAttrs(attrs.alreadySorted());
|
||||||
|
|
||||||
state.nrOpUpdateValuesCopied += v.attrs->size();
|
state.nrOpUpdateValuesCopied += v.attrs->size();
|
||||||
}
|
}
|
||||||
|
@ -1874,15 +1915,39 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po
|
||||||
void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
std::ostringstream s;
|
std::vector<BackedStringView> s;
|
||||||
|
size_t sSize = 0;
|
||||||
NixInt n = 0;
|
NixInt n = 0;
|
||||||
NixFloat nf = 0;
|
NixFloat nf = 0;
|
||||||
|
|
||||||
bool first = !forceString;
|
bool first = !forceString;
|
||||||
ValueType firstType = nString;
|
ValueType firstType = nString;
|
||||||
|
|
||||||
|
const auto str = [&] {
|
||||||
|
std::string result;
|
||||||
|
result.reserve(sSize);
|
||||||
|
for (const auto & part : s) result += *part;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
/* c_str() is not str().c_str() because we want to create a string
|
||||||
|
Value. allocating a GC'd string directly and moving it into a
|
||||||
|
Value lets us avoid an allocation and copy. */
|
||||||
|
const auto c_str = [&] {
|
||||||
|
char * result = allocString(sSize + 1);
|
||||||
|
char * tmp = result;
|
||||||
|
for (const auto & part : s) {
|
||||||
|
memcpy(tmp, part->data(), part->size());
|
||||||
|
tmp += part->size();
|
||||||
|
}
|
||||||
|
*tmp = 0;
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
Value values[es->size()];
|
||||||
|
Value * vTmpP = values;
|
||||||
|
|
||||||
for (auto & [i_pos, i] : *es) {
|
for (auto & [i_pos, i] : *es) {
|
||||||
Value vTmp;
|
Value & vTmp = *vTmpP++;
|
||||||
i->eval(state, env, vTmp);
|
i->eval(state, env, vTmp);
|
||||||
|
|
||||||
/* If the first element is a path, then the result will also
|
/* If the first element is a path, then the result will also
|
||||||
|
@ -1911,26 +1976,29 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
nf += vTmp.fpoint;
|
nf += vTmp.fpoint;
|
||||||
} else
|
} else
|
||||||
throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp), env, this);
|
throwEvalError(i_pos, "cannot add %1% to a float", showType(vTmp), env, this);
|
||||||
} else
|
} else {
|
||||||
|
if (s.empty()) s.reserve(es->size());
|
||||||
/* skip canonization of first path, which would only be not
|
/* skip canonization of first path, which would only be not
|
||||||
canonized in the first place if it's coming from a ./${foo} type
|
canonized in the first place if it's coming from a ./${foo} type
|
||||||
path */
|
path */
|
||||||
s << state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first);
|
auto part = state.coerceToString(i_pos, vTmp, context, false, firstType == nString, !first);
|
||||||
|
sSize += part->size();
|
||||||
|
s.emplace_back(std::move(part));
|
||||||
|
}
|
||||||
|
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (firstType == nInt)
|
if (firstType == nInt)
|
||||||
mkInt(v, n);
|
v.mkInt(n);
|
||||||
else if (firstType == nFloat)
|
else if (firstType == nFloat)
|
||||||
mkFloat(v, nf);
|
v.mkFloat(nf);
|
||||||
else if (firstType == nPath) {
|
else if (firstType == nPath) {
|
||||||
if (!context.empty())
|
if (!context.empty())
|
||||||
throwEvalError(pos, "a string that refers to a store path cannot be appended to a path");
|
throwEvalError(pos, "a string that refers to a store path cannot be appended to a path");
|
||||||
auto path = canonPath(s.str());
|
v.mkPath(canonPath(str()));
|
||||||
mkPath(v, path.c_str());
|
|
||||||
} else
|
} else
|
||||||
mkString(v, s.str(), context);
|
v.mkStringMove(c_str(), context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1949,7 +2017,7 @@ void EvalState::forceValueDeep(Value & v)
|
||||||
recurse = [&](Value & v) {
|
recurse = [&](Value & v) {
|
||||||
if (!seen.insert(&v).second) return;
|
if (!seen.insert(&v).second) return;
|
||||||
|
|
||||||
forceValue(v);
|
forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
|
|
||||||
if (v.type() == nAttrs) {
|
if (v.type() == nAttrs) {
|
||||||
for (auto & i : *v.attrs)
|
for (auto & i : *v.attrs)
|
||||||
|
@ -2028,14 +2096,14 @@ void EvalState::forceFunction(Value & v, const Pos & pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string EvalState::forceString(Value & v, const Pos & pos)
|
std::string_view EvalState::forceString(Value & v, const Pos & pos)
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nString) {
|
if (v.type() != nString) {
|
||||||
throwTypeError(pos, "value is %1% while a string was expected", v,
|
throwTypeError(pos, "value is %1% while a string was expected", v,
|
||||||
fakeEnv(1), 0);
|
fakeEnv(1), 0);
|
||||||
}
|
}
|
||||||
return string(v.string.s);
|
return v.string.s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2070,17 +2138,17 @@ std::vector<std::pair<Path, std::string>> Value::getContext()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string EvalState::forceString(Value & v, PathSet & context, const Pos & pos)
|
std::string_view EvalState::forceString(Value & v, PathSet & context, const Pos & pos)
|
||||||
{
|
{
|
||||||
string s = forceString(v, pos);
|
auto s = forceString(v, pos);
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string EvalState::forceStringNoCtx(Value & v, const Pos & pos)
|
std::string_view EvalState::forceStringNoCtx(Value & v, const Pos & pos)
|
||||||
{
|
{
|
||||||
string s = forceString(v, pos);
|
auto s = forceString(v, pos);
|
||||||
if (v.string.context) {
|
if (v.string.context) {
|
||||||
if (pos)
|
if (pos)
|
||||||
throwEvalError(pos, "the string '%1%' is not allowed to refer to a store path (such as '%2%')",
|
throwEvalError(pos, "the string '%1%' is not allowed to refer to a store path (such as '%2%')",
|
||||||
|
@ -2098,7 +2166,7 @@ bool EvalState::isDerivation(Value & v)
|
||||||
if (v.type() != nAttrs) return false;
|
if (v.type() != nAttrs) return false;
|
||||||
Bindings::iterator i = v.attrs->find(sType);
|
Bindings::iterator i = v.attrs->find(sType);
|
||||||
if (i == v.attrs->end()) return false;
|
if (i == v.attrs->end()) return false;
|
||||||
forceValue(*i->value);
|
forceValue(*i->value, *i->pos);
|
||||||
if (i->value->type() != nString) return false;
|
if (i->value->type() != nString) return false;
|
||||||
return strcmp(i->value->string.s, "derivation") == 0;
|
return strcmp(i->value->string.s, "derivation") == 0;
|
||||||
}
|
}
|
||||||
|
@ -2111,34 +2179,35 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
|
||||||
if (i != v.attrs->end()) {
|
if (i != v.attrs->end()) {
|
||||||
Value v1;
|
Value v1;
|
||||||
callFunction(*i->value, v, v1, pos);
|
callFunction(*i->value, v, v1, pos);
|
||||||
return coerceToString(pos, v1, context, coerceMore, copyToStore);
|
return coerceToString(pos, v1, context, coerceMore, copyToStore).toOwned();
|
||||||
}
|
}
|
||||||
|
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
|
BackedStringView EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||||
bool coerceMore, bool copyToStore, bool canonicalizePath)
|
bool coerceMore, bool copyToStore, bool canonicalizePath)
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
|
|
||||||
string s;
|
|
||||||
|
|
||||||
if (v.type() == nString) {
|
if (v.type() == nString) {
|
||||||
copyContext(v, context);
|
copyContext(v, context);
|
||||||
return v.string.s;
|
return std::string_view(v.string.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v.type() == nPath) {
|
if (v.type() == nPath) {
|
||||||
Path path(canonicalizePath ? canonPath(v.path) : v.path);
|
BackedStringView path(PathView(v.path));
|
||||||
return copyToStore ? copyPathToStore(context, path) : path;
|
if (canonicalizePath)
|
||||||
|
path = canonPath(*path);
|
||||||
|
if (copyToStore)
|
||||||
|
path = copyPathToStore(context, std::move(path).toOwned());
|
||||||
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v.type() == nAttrs) {
|
if (v.type() == nAttrs) {
|
||||||
auto maybeString = tryAttrsToString(pos, v, context, coerceMore, copyToStore);
|
auto maybeString = tryAttrsToString(pos, v, context, coerceMore, copyToStore);
|
||||||
if (maybeString) {
|
if (maybeString)
|
||||||
return *maybeString;
|
return std::move(*maybeString);
|
||||||
}
|
|
||||||
auto i = v.attrs->find(sOutPath);
|
auto i = v.attrs->find(sOutPath);
|
||||||
if (i == v.attrs->end())
|
if (i == v.attrs->end())
|
||||||
throwTypeError(pos, "cannot coerce a set to a string",
|
throwTypeError(pos, "cannot coerce a set to a string",
|
||||||
|
@ -2161,14 +2230,13 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||||
if (v.isList()) {
|
if (v.isList()) {
|
||||||
string result;
|
string result;
|
||||||
for (auto [n, v2] : enumerate(v.listItems())) {
|
for (auto [n, v2] : enumerate(v.listItems())) {
|
||||||
result += coerceToString(pos, *v2,
|
result += *coerceToString(pos, *v2, context, coerceMore, copyToStore);
|
||||||
context, coerceMore, copyToStore);
|
|
||||||
if (n < v.listSize() - 1
|
if (n < v.listSize() - 1
|
||||||
/* !!! not quite correct */
|
/* !!! not quite correct */
|
||||||
&& (!v2->isList() || v2->listSize() != 0))
|
&& (!v2->isList() || v2->listSize() != 0))
|
||||||
result += " ";
|
result += " ";
|
||||||
}
|
}
|
||||||
return result;
|
return std::move(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2205,7 +2273,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
|
||||||
|
|
||||||
Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context)
|
Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context)
|
||||||
{
|
{
|
||||||
string path = coerceToString(pos, v, context, false, false);
|
string path = coerceToString(pos, v, context, false, false).toOwned();
|
||||||
if (path == "" || path[0] != '/')
|
if (path == "" || path[0] != '/')
|
||||||
throwEvalError(pos, "string '%1%' doesn't represent an absolute path", path,
|
throwEvalError(pos, "string '%1%' doesn't represent an absolute path", path,
|
||||||
fakeEnv(1), 0);
|
fakeEnv(1), 0);
|
||||||
|
@ -2215,8 +2283,8 @@ Path EvalState::coerceToPath(const Pos & pos, Value & v, PathSet & context)
|
||||||
|
|
||||||
bool EvalState::eqValues(Value & v1, Value & v2)
|
bool EvalState::eqValues(Value & v1, Value & v2)
|
||||||
{
|
{
|
||||||
forceValue(v1);
|
forceValue(v1, noPos);
|
||||||
forceValue(v2);
|
forceValue(v2, noPos);
|
||||||
|
|
||||||
/* !!! Hack to support some old broken code that relies on pointer
|
/* !!! Hack to support some old broken code that relies on pointer
|
||||||
equality tests between sets. (Specifically, builderDefs calls
|
equality tests between sets. (Specifically, builderDefs calls
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "attr-set.hh"
|
#include "attr-set.hh"
|
||||||
|
#include "types.hh"
|
||||||
#include "value.hh"
|
#include "value.hh"
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
#include "symbol-table.hh"
|
#include "symbol-table.hh"
|
||||||
|
@ -49,8 +50,6 @@ struct Env
|
||||||
|
|
||||||
valmap * mapStaticEnvBindings(const StaticEnv &se, const Env &env);
|
valmap * mapStaticEnvBindings(const StaticEnv &se, const Env &env);
|
||||||
|
|
||||||
Value & mkString(Value & v, std::string_view s, const PathSet & context = PathSet());
|
|
||||||
|
|
||||||
void copyContext(const Value & v, PathSet & context);
|
void copyContext(const Value & v, PathSet & context);
|
||||||
|
|
||||||
|
|
||||||
|
@ -93,7 +92,8 @@ public:
|
||||||
sContentAddressed,
|
sContentAddressed,
|
||||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||||
sRecurseForDerivations,
|
sRecurseForDerivations,
|
||||||
sDescription, sSelf, sEpsilon;
|
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||||
|
sPrefix;
|
||||||
Symbol sDerivationNix;
|
Symbol sDerivationNix;
|
||||||
|
|
||||||
/* If set, force copying files to the Nix store even if they
|
/* If set, force copying files to the Nix store even if they
|
||||||
|
@ -104,7 +104,7 @@ public:
|
||||||
mode. */
|
mode. */
|
||||||
std::optional<PathSet> allowedPaths;
|
std::optional<PathSet> allowedPaths;
|
||||||
|
|
||||||
Value vEmptySet;
|
Bindings emptyBindings;
|
||||||
|
|
||||||
/* Store used to materialise .drv files. */
|
/* Store used to materialise .drv files. */
|
||||||
const ref<Store> store;
|
const ref<Store> store;
|
||||||
|
@ -148,7 +148,7 @@ private:
|
||||||
std::shared_ptr<RegexCache> regexCache;
|
std::shared_ptr<RegexCache> regexCache;
|
||||||
|
|
||||||
/* Allocation cache for GC'd Value objects. */
|
/* Allocation cache for GC'd Value objects. */
|
||||||
void * valueAllocCache = nullptr;
|
std::shared_ptr<void *> valueAllocCache;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
@ -195,8 +195,8 @@ public:
|
||||||
Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv);
|
Expr * parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv);
|
||||||
|
|
||||||
/* Parse a Nix expression from the specified string. */
|
/* Parse a Nix expression from the specified string. */
|
||||||
Expr * parseExprFromString(std::string_view s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
Expr * parseExprFromString(std::string s, const Path & basePath, , std::shared_ptr<StaticEnv> & staticEnv);
|
||||||
Expr * parseExprFromString(std::string_view s, const Path & basePath);
|
Expr * parseExprFromString(std::string s, const Path & basePath);
|
||||||
|
|
||||||
Expr * parseStdin();
|
Expr * parseStdin();
|
||||||
|
|
||||||
|
@ -216,8 +216,8 @@ public:
|
||||||
void resetFileCache();
|
void resetFileCache();
|
||||||
|
|
||||||
/* Look up a file in the search path. */
|
/* Look up a file in the search path. */
|
||||||
Path findFile(const string & path);
|
Path findFile(const std::string_view path);
|
||||||
Path findFile(SearchPath & searchPath, const string & path, const Pos & pos = noPos);
|
Path findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos = noPos);
|
||||||
|
|
||||||
/* If the specified search path element is a URI, download it. */
|
/* If the specified search path element is a URI, download it. */
|
||||||
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
|
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
|
||||||
|
@ -236,7 +236,10 @@ public:
|
||||||
of the evaluation of the thunk. If `v' is a delayed function
|
of the evaluation of the thunk. If `v' is a delayed function
|
||||||
application, call the function and overwrite `v' with the
|
application, call the function and overwrite `v' with the
|
||||||
result. Otherwise, this is a no-op. */
|
result. Otherwise, this is a no-op. */
|
||||||
inline void forceValue(Value & v, const Pos & pos = noPos);
|
inline void forceValue(Value & v, const Pos & pos);
|
||||||
|
|
||||||
|
template <typename Callable>
|
||||||
|
inline void forceValue(Value & v, Callable getPos);
|
||||||
|
|
||||||
/* Force a value, then recursively force list elements and
|
/* Force a value, then recursively force list elements and
|
||||||
attributes. */
|
attributes. */
|
||||||
|
@ -246,14 +249,17 @@ public:
|
||||||
NixInt forceInt(Value & v, const Pos & pos);
|
NixInt forceInt(Value & v, const Pos & pos);
|
||||||
NixFloat forceFloat(Value & v, const Pos & pos);
|
NixFloat forceFloat(Value & v, const Pos & pos);
|
||||||
bool forceBool(Value & v, const Pos & pos);
|
bool forceBool(Value & v, const Pos & pos);
|
||||||
inline void forceAttrs(Value & v);
|
|
||||||
inline void forceAttrs(Value & v, const Pos & pos);
|
void forceAttrs(Value & v, const Pos & pos);
|
||||||
inline void forceList(Value & v);
|
|
||||||
|
template <typename Callable>
|
||||||
|
inline void forceAttrs(Value & v, Callable getPos);
|
||||||
|
|
||||||
inline void forceList(Value & v, const Pos & pos);
|
inline void forceList(Value & v, const Pos & pos);
|
||||||
void forceFunction(Value & v, const Pos & pos); // either lambda or primop
|
void forceFunction(Value & v, const Pos & pos); // either lambda or primop
|
||||||
string forceString(Value & v, const Pos & pos = noPos);
|
std::string_view forceString(Value & v, const Pos & pos = noPos);
|
||||||
string forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
std::string_view forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
||||||
string forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
std::string_view forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
||||||
|
|
||||||
/* Return true iff the value `v' denotes a derivation (i.e. a
|
/* Return true iff the value `v' denotes a derivation (i.e. a
|
||||||
set with attribute `type = "derivation"'). */
|
set with attribute `type = "derivation"'). */
|
||||||
|
@ -266,7 +272,7 @@ public:
|
||||||
string. If `coerceMore' is set, also converts nulls, integers,
|
string. If `coerceMore' is set, also converts nulls, integers,
|
||||||
booleans and lists to a string. If `copyToStore' is set,
|
booleans and lists to a string. If `copyToStore' is set,
|
||||||
referenced paths are copied to the Nix store as a side effect. */
|
referenced paths are copied to the Nix store as a side effect. */
|
||||||
string coerceToString(const Pos & pos, Value & v, PathSet & context,
|
BackedStringView coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||||
bool coerceMore = false, bool copyToStore = true,
|
bool coerceMore = false, bool copyToStore = true,
|
||||||
bool canonicalizePath = true);
|
bool canonicalizePath = true);
|
||||||
|
|
||||||
|
@ -324,7 +330,7 @@ private:
|
||||||
friend struct ExprAttrs;
|
friend struct ExprAttrs;
|
||||||
friend struct ExprLet;
|
friend struct ExprLet;
|
||||||
|
|
||||||
Expr * parse(const char * text, FileOrigin origin, const Path & path,
|
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
|
||||||
const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -353,12 +359,16 @@ public:
|
||||||
Env & allocEnv(size_t size);
|
Env & allocEnv(size_t size);
|
||||||
|
|
||||||
Value * allocAttr(Value & vAttrs, const Symbol & name);
|
Value * allocAttr(Value & vAttrs, const Symbol & name);
|
||||||
Value * allocAttr(Value & vAttrs, const std::string & name);
|
Value * allocAttr(Value & vAttrs, std::string_view name);
|
||||||
|
|
||||||
Bindings * allocBindings(size_t capacity);
|
Bindings * allocBindings(size_t capacity);
|
||||||
|
|
||||||
|
BindingsBuilder buildBindings(size_t capacity)
|
||||||
|
{
|
||||||
|
return BindingsBuilder(*this, allocBindings(capacity));
|
||||||
|
}
|
||||||
|
|
||||||
void mkList(Value & v, size_t length);
|
void mkList(Value & v, size_t length);
|
||||||
void mkAttrs(Value & v, size_t capacity);
|
|
||||||
void mkThunk_(Value & v, Expr * expr);
|
void mkThunk_(Value & v, Expr * expr);
|
||||||
void mkPos(Value & v, ptr<Pos> pos);
|
void mkPos(Value & v, ptr<Pos> pos);
|
||||||
|
|
||||||
|
@ -411,6 +421,9 @@ private:
|
||||||
friend struct ExprSelect;
|
friend struct ExprSelect;
|
||||||
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||||
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||||
|
friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||||
|
|
||||||
|
friend struct Value;
|
||||||
};
|
};
|
||||||
|
|
||||||
class DebugTraceStacker {
|
class DebugTraceStacker {
|
||||||
|
|
|
@ -89,11 +89,11 @@ static void expectType(EvalState & state, ValueType type,
|
||||||
|
|
||||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||||
EvalState & state, Value * value, const Pos & pos,
|
EvalState & state, Value * value, const Pos & pos,
|
||||||
const std::optional<Path> & baseDir);
|
const std::optional<Path> & baseDir, InputPath lockRootPath);
|
||||||
|
|
||||||
static FlakeInput parseFlakeInput(EvalState & state,
|
static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
const std::string & inputName, Value * value, const Pos & pos,
|
const std::string & inputName, Value * value, const Pos & pos,
|
||||||
const std::optional<Path> & baseDir)
|
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||||
{
|
{
|
||||||
expectType(state, nAttrs, *value, pos);
|
expectType(state, nAttrs, *value, pos);
|
||||||
|
|
||||||
|
@ -117,10 +117,12 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
expectType(state, nBool, *attr.value, *attr.pos);
|
expectType(state, nBool, *attr.value, *attr.pos);
|
||||||
input.isFlake = attr.value->boolean;
|
input.isFlake = attr.value->boolean;
|
||||||
} else if (attr.name == sInputs) {
|
} else if (attr.name == sInputs) {
|
||||||
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir);
|
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir, lockRootPath);
|
||||||
} else if (attr.name == sFollows) {
|
} else if (attr.name == sFollows) {
|
||||||
expectType(state, nString, *attr.value, *attr.pos);
|
expectType(state, nString, *attr.value, *attr.pos);
|
||||||
input.follows = parseInputPath(attr.value->string.s);
|
auto follows(parseInputPath(attr.value->string.s));
|
||||||
|
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||||
|
input.follows = follows;
|
||||||
} else {
|
} else {
|
||||||
switch (attr.value->type()) {
|
switch (attr.value->type()) {
|
||||||
case nString:
|
case nString:
|
||||||
|
@ -166,7 +168,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
|
|
||||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||||
EvalState & state, Value * value, const Pos & pos,
|
EvalState & state, Value * value, const Pos & pos,
|
||||||
const std::optional<Path> & baseDir)
|
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||||
{
|
{
|
||||||
std::map<FlakeId, FlakeInput> inputs;
|
std::map<FlakeId, FlakeInput> inputs;
|
||||||
|
|
||||||
|
@ -178,7 +180,8 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||||
inputAttr.name,
|
inputAttr.name,
|
||||||
inputAttr.value,
|
inputAttr.value,
|
||||||
*inputAttr.pos,
|
*inputAttr.pos,
|
||||||
baseDir));
|
baseDir,
|
||||||
|
lockRootPath));
|
||||||
}
|
}
|
||||||
|
|
||||||
return inputs;
|
return inputs;
|
||||||
|
@ -188,7 +191,8 @@ static Flake getFlake(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
const FlakeRef & originalRef,
|
const FlakeRef & originalRef,
|
||||||
bool allowLookup,
|
bool allowLookup,
|
||||||
FlakeCache & flakeCache)
|
FlakeCache & flakeCache,
|
||||||
|
InputPath lockRootPath)
|
||||||
{
|
{
|
||||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||||
state, originalRef, allowLookup, flakeCache);
|
state, originalRef, allowLookup, flakeCache);
|
||||||
|
@ -223,7 +227,7 @@ static Flake getFlake(
|
||||||
auto sInputs = state.symbols.create("inputs");
|
auto sInputs = state.symbols.create("inputs");
|
||||||
|
|
||||||
if (auto inputs = vInfo.attrs->get(sInputs))
|
if (auto inputs = vInfo.attrs->get(sInputs))
|
||||||
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir);
|
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir, lockRootPath);
|
||||||
|
|
||||||
auto sOutputs = state.symbols.create("outputs");
|
auto sOutputs = state.symbols.create("outputs");
|
||||||
|
|
||||||
|
@ -250,7 +254,13 @@ static Flake getFlake(
|
||||||
for (auto & setting : *nixConfig->value->attrs) {
|
for (auto & setting : *nixConfig->value->attrs) {
|
||||||
forceTrivialValue(state, *setting.value, *setting.pos);
|
forceTrivialValue(state, *setting.value, *setting.pos);
|
||||||
if (setting.value->type() == nString)
|
if (setting.value->type() == nString)
|
||||||
flake.config.settings.insert({setting.name, state.forceStringNoCtx(*setting.value, *setting.pos)});
|
flake.config.settings.insert({setting.name, string(state.forceStringNoCtx(*setting.value, *setting.pos))});
|
||||||
|
else if (setting.value->type() == nPath) {
|
||||||
|
PathSet emptyContext = {};
|
||||||
|
flake.config.settings.emplace(
|
||||||
|
setting.name,
|
||||||
|
state.coerceToString(*setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
|
||||||
|
}
|
||||||
else if (setting.value->type() == nInt)
|
else if (setting.value->type() == nInt)
|
||||||
flake.config.settings.insert({setting.name, state.forceInt(*setting.value, *setting.pos)});
|
flake.config.settings.insert({setting.name, state.forceInt(*setting.value, *setting.pos)});
|
||||||
else if (setting.value->type() == nBool)
|
else if (setting.value->type() == nBool)
|
||||||
|
@ -261,7 +271,7 @@ static Flake getFlake(
|
||||||
if (elem->type() != nString)
|
if (elem->type() != nString)
|
||||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||||
setting.name, showType(*setting.value));
|
setting.name, showType(*setting.value));
|
||||||
ss.push_back(state.forceStringNoCtx(*elem, *setting.pos));
|
ss.emplace_back(state.forceStringNoCtx(*elem, *setting.pos));
|
||||||
}
|
}
|
||||||
flake.config.settings.insert({setting.name, ss});
|
flake.config.settings.insert({setting.name, ss});
|
||||||
}
|
}
|
||||||
|
@ -283,6 +293,11 @@ static Flake getFlake(
|
||||||
return flake;
|
return flake;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
|
||||||
|
{
|
||||||
|
return getFlake(state, originalRef, allowLookup, flakeCache, {});
|
||||||
|
}
|
||||||
|
|
||||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
||||||
{
|
{
|
||||||
FlakeCache flakeCache;
|
FlakeCache flakeCache;
|
||||||
|
@ -328,22 +343,12 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
std::vector<FlakeRef> parents;
|
std::vector<FlakeRef> parents;
|
||||||
|
|
||||||
struct LockParent {
|
|
||||||
/* The path to this parent. */
|
|
||||||
InputPath path;
|
|
||||||
|
|
||||||
/* Whether we are currently inside a top-level lockfile
|
|
||||||
(inputs absolute) or subordinate lockfile (inputs
|
|
||||||
relative). */
|
|
||||||
bool absolute;
|
|
||||||
};
|
|
||||||
|
|
||||||
std::function<void(
|
std::function<void(
|
||||||
const FlakeInputs & flakeInputs,
|
const FlakeInputs & flakeInputs,
|
||||||
std::shared_ptr<Node> node,
|
std::shared_ptr<Node> node,
|
||||||
const InputPath & inputPathPrefix,
|
const InputPath & inputPathPrefix,
|
||||||
std::shared_ptr<const Node> oldNode,
|
std::shared_ptr<const Node> oldNode,
|
||||||
const LockParent & parent,
|
const InputPath & lockRootPath,
|
||||||
const Path & parentPath,
|
const Path & parentPath,
|
||||||
bool trustLock)>
|
bool trustLock)>
|
||||||
computeLocks;
|
computeLocks;
|
||||||
|
@ -353,7 +358,7 @@ LockedFlake lockFlake(
|
||||||
std::shared_ptr<Node> node,
|
std::shared_ptr<Node> node,
|
||||||
const InputPath & inputPathPrefix,
|
const InputPath & inputPathPrefix,
|
||||||
std::shared_ptr<const Node> oldNode,
|
std::shared_ptr<const Node> oldNode,
|
||||||
const LockParent & parent,
|
const InputPath & lockRootPath,
|
||||||
const Path & parentPath,
|
const Path & parentPath,
|
||||||
bool trustLock)
|
bool trustLock)
|
||||||
{
|
{
|
||||||
|
@ -398,17 +403,7 @@ LockedFlake lockFlake(
|
||||||
if (input.follows) {
|
if (input.follows) {
|
||||||
InputPath target;
|
InputPath target;
|
||||||
|
|
||||||
if (parent.absolute && !hasOverride) {
|
target.insert(target.end(), input.follows->begin(), input.follows->end());
|
||||||
target = *input.follows;
|
|
||||||
} else {
|
|
||||||
if (hasOverride) {
|
|
||||||
target = inputPathPrefix;
|
|
||||||
target.pop_back();
|
|
||||||
} else
|
|
||||||
target = parent.path;
|
|
||||||
|
|
||||||
for (auto & i : *input.follows) target.push_back(i);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||||
node->inputs.insert_or_assign(id, target);
|
node->inputs.insert_or_assign(id, target);
|
||||||
|
@ -481,23 +476,25 @@ LockedFlake lockFlake(
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
auto absoluteFollows(lockRootPath);
|
||||||
|
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
|
||||||
fakeInputs.emplace(i.first, FlakeInput {
|
fakeInputs.emplace(i.first, FlakeInput {
|
||||||
.follows = *follows,
|
.follows = absoluteFollows,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
LockParent newParent {
|
auto localPath(parentPath);
|
||||||
.path = inputPath,
|
// If this input is a path, recurse it down.
|
||||||
.absolute = true
|
// This allows us to resolve path inputs relative to the current flake.
|
||||||
};
|
if ((*input.ref).input.getType() == "path")
|
||||||
|
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||||
computeLocks(
|
computeLocks(
|
||||||
mustRefetch
|
mustRefetch
|
||||||
? getFlake(state, oldLock->lockedRef, false, flakeCache).inputs
|
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||||
: fakeInputs,
|
: fakeInputs,
|
||||||
childNode, inputPath, oldLock, newParent, parentPath, !mustRefetch);
|
childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
/* We need to create a new lock file entry. So fetch
|
/* We need to create a new lock file entry. So fetch
|
||||||
|
@ -516,7 +513,7 @@ LockedFlake lockFlake(
|
||||||
if (localRef.input.getType() == "path")
|
if (localRef.input.getType() == "path")
|
||||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||||
|
|
||||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
|
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||||
|
|
||||||
/* Note: in case of an --override-input, we use
|
/* Note: in case of an --override-input, we use
|
||||||
the *original* ref (input2.ref) for the
|
the *original* ref (input2.ref) for the
|
||||||
|
@ -537,13 +534,6 @@ LockedFlake lockFlake(
|
||||||
parents.push_back(*input.ref);
|
parents.push_back(*input.ref);
|
||||||
Finally cleanup([&]() { parents.pop_back(); });
|
Finally cleanup([&]() { parents.pop_back(); });
|
||||||
|
|
||||||
// Follows paths from existing inputs in the top-level lockfile are absolute,
|
|
||||||
// whereas paths in subordinate lockfiles are relative to those lockfiles.
|
|
||||||
LockParent newParent {
|
|
||||||
.path = inputPath,
|
|
||||||
.absolute = oldLock ? true : false
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Recursively process the inputs of this
|
/* Recursively process the inputs of this
|
||||||
flake. Also, unless we already have this flake
|
flake. Also, unless we already have this flake
|
||||||
in the top-level lock file, use this flake's
|
in the top-level lock file, use this flake's
|
||||||
|
@ -554,7 +544,7 @@ LockedFlake lockFlake(
|
||||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||||
: LockFile::read(
|
: LockFile::read(
|
||||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
||||||
newParent, localPath, false);
|
oldLock ? lockRootPath : inputPath, localPath, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
|
@ -572,17 +562,12 @@ LockedFlake lockFlake(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
LockParent parent {
|
|
||||||
.path = {},
|
|
||||||
.absolute = true
|
|
||||||
};
|
|
||||||
|
|
||||||
// Bring in the current ref for relative path resolution if we have it
|
// Bring in the current ref for relative path resolution if we have it
|
||||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||||
|
|
||||||
computeLocks(
|
computeLocks(
|
||||||
flake.inputs, newLockFile.root, {},
|
flake.inputs, newLockFile.root, {},
|
||||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath, false);
|
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
|
||||||
|
|
||||||
for (auto & i : lockFlags.inputOverrides)
|
for (auto & i : lockFlags.inputOverrides)
|
||||||
if (!overridesUsed.count(i.first))
|
if (!overridesUsed.count(i.first))
|
||||||
|
@ -629,12 +614,24 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
newLockFile.write(path);
|
newLockFile.write(path);
|
||||||
|
|
||||||
|
std::optional<std::string> commitMessage = std::nullopt;
|
||||||
|
if (lockFlags.commitLockFile) {
|
||||||
|
std::string cm;
|
||||||
|
|
||||||
|
cm = settings.commitLockFileSummary.get();
|
||||||
|
|
||||||
|
if (cm == "") {
|
||||||
|
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
||||||
|
}
|
||||||
|
|
||||||
|
cm += "\n\nFlake lock file updates:\n\n";
|
||||||
|
cm += filterANSIEscapes(diff, true);
|
||||||
|
commitMessage = cm;
|
||||||
|
}
|
||||||
|
|
||||||
topRef.input.markChangedFile(
|
topRef.input.markChangedFile(
|
||||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||||
lockFlags.commitLockFile
|
commitMessage);
|
||||||
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
|
|
||||||
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
|
|
||||||
: std::nullopt);
|
|
||||||
|
|
||||||
/* Rewriting the lockfile changed the top-level
|
/* Rewriting the lockfile changed the top-level
|
||||||
repo, so we should re-read it. FIXME: we could
|
repo, so we should re-read it. FIXME: we could
|
||||||
|
@ -682,7 +679,7 @@ void callFlake(EvalState & state,
|
||||||
auto vTmp1 = state.allocValue();
|
auto vTmp1 = state.allocValue();
|
||||||
auto vTmp2 = state.allocValue();
|
auto vTmp2 = state.allocValue();
|
||||||
|
|
||||||
mkString(*vLocks, lockedFlake.lockFile.to_string());
|
vLocks->mkString(lockedFlake.lockFile.to_string());
|
||||||
|
|
||||||
emitTreeAttrs(
|
emitTreeAttrs(
|
||||||
state,
|
state,
|
||||||
|
@ -692,7 +689,7 @@ void callFlake(EvalState & state,
|
||||||
false,
|
false,
|
||||||
lockedFlake.flake.forceDirty);
|
lockedFlake.flake.forceDirty);
|
||||||
|
|
||||||
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
|
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||||
|
|
||||||
if (!state.vCallFlake) {
|
if (!state.vCallFlake) {
|
||||||
state.vCallFlake = allocRootValue(state.allocValue());
|
state.vCallFlake = allocRootValue(state.allocValue());
|
||||||
|
@ -710,7 +707,7 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
{
|
{
|
||||||
state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
|
state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
|
||||||
|
|
||||||
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
|
string flakeRefS(state.forceStringNoCtx(*args[0], pos));
|
||||||
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
||||||
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
|
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
|
||||||
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||||
|
|
|
@ -122,6 +122,28 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
|
||||||
if (isFlake) {
|
if (isFlake) {
|
||||||
|
|
||||||
|
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||||
|
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||||
|
|
||||||
|
// Save device to detect filesystem boundary
|
||||||
|
dev_t device = lstat(path).st_dev;
|
||||||
|
bool found = false;
|
||||||
|
while (path != "/") {
|
||||||
|
if (pathExists(path + "/flake.nix")) {
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
} else if (pathExists(path + "/.git"))
|
||||||
|
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||||
|
else {
|
||||||
|
if (lstat(path).st_dev != device)
|
||||||
|
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||||
|
}
|
||||||
|
path = dirOf(path);
|
||||||
|
}
|
||||||
|
if (!found)
|
||||||
|
throw BadURL("could not find a flake.nix file");
|
||||||
|
}
|
||||||
|
|
||||||
if (!S_ISDIR(lstat(path).st_mode))
|
if (!S_ISDIR(lstat(path).st_mode))
|
||||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||||
|
|
||||||
|
|
|
@ -104,10 +104,10 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
||||||
/* For each output... */
|
/* For each output... */
|
||||||
for (auto elem : i->value->listItems()) {
|
for (auto elem : i->value->listItems()) {
|
||||||
/* Evaluate the corresponding set. */
|
/* Evaluate the corresponding set. */
|
||||||
string name = state->forceStringNoCtx(*elem, *i->pos);
|
string name(state->forceStringNoCtx(*elem, *i->pos));
|
||||||
Bindings::iterator out = attrs->find(state->symbols.create(name));
|
Bindings::iterator out = attrs->find(state->symbols.create(name));
|
||||||
if (out == attrs->end()) continue; // FIXME: throw error?
|
if (out == attrs->end()) continue; // FIXME: throw error?
|
||||||
state->forceAttrs(*out->value);
|
state->forceAttrs(*out->value, *i->pos);
|
||||||
|
|
||||||
/* And evaluate its ‘outPath’ attribute. */
|
/* And evaluate its ‘outPath’ attribute. */
|
||||||
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
||||||
|
@ -172,7 +172,7 @@ StringSet DrvInfo::queryMetaNames()
|
||||||
|
|
||||||
bool DrvInfo::checkMeta(Value & v)
|
bool DrvInfo::checkMeta(Value & v)
|
||||||
{
|
{
|
||||||
state->forceValue(v);
|
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
if (v.type() == nList) {
|
if (v.type() == nList) {
|
||||||
for (auto elem : v.listItems())
|
for (auto elem : v.listItems())
|
||||||
if (!checkMeta(*elem)) return false;
|
if (!checkMeta(*elem)) return false;
|
||||||
|
@ -254,15 +254,14 @@ bool DrvInfo::queryMetaBool(const string & name, bool def)
|
||||||
void DrvInfo::setMeta(const string & name, Value * v)
|
void DrvInfo::setMeta(const string & name, Value * v)
|
||||||
{
|
{
|
||||||
getMeta();
|
getMeta();
|
||||||
Bindings * old = meta;
|
auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0));
|
||||||
meta = state->allocBindings(1 + (old ? old->size() : 0));
|
|
||||||
Symbol sym = state->symbols.create(name);
|
Symbol sym = state->symbols.create(name);
|
||||||
if (old)
|
if (meta)
|
||||||
for (auto i : *old)
|
for (auto i : *meta)
|
||||||
if (i.name != sym)
|
if (i.name != sym)
|
||||||
meta->push_back(i);
|
attrs.insert(i);
|
||||||
if (v) meta->push_back(Attr(sym, v));
|
if (v) attrs.insert(sym, v);
|
||||||
meta->sort();
|
meta = attrs.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -279,7 +278,7 @@ static bool getDerivation(EvalState & state, Value & v,
|
||||||
bool ignoreAssertionFailures)
|
bool ignoreAssertionFailures)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
state.forceValue(v);
|
state.forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||||
if (!state.isDerivation(v)) return true;
|
if (!state.isDerivation(v)) return true;
|
||||||
|
|
||||||
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
||||||
|
|
|
@ -37,10 +37,10 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||||
ValueMap attrs;
|
ValueMap attrs;
|
||||||
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
||||||
{
|
{
|
||||||
Value & v = parent->value(state);
|
auto attrs2 = state.buildBindings(attrs.size());
|
||||||
state.mkAttrs(v, attrs.size());
|
|
||||||
for (auto & i : attrs)
|
for (auto & i : attrs)
|
||||||
v.attrs->push_back(Attr(i.first, i.second));
|
attrs2.insert(i.first, i.second);
|
||||||
|
parent->value(state).mkAttrs(attrs2.alreadySorted());
|
||||||
return std::move(parent);
|
return std::move(parent);
|
||||||
}
|
}
|
||||||
void add() override { v = nullptr; }
|
void add() override { v = nullptr; }
|
||||||
|
@ -76,45 +76,51 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||||
EvalState & state;
|
EvalState & state;
|
||||||
std::unique_ptr<JSONState> rs;
|
std::unique_ptr<JSONState> rs;
|
||||||
|
|
||||||
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
|
|
||||||
{
|
|
||||||
f(rs->value(state), args...);
|
|
||||||
rs->add();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||||
|
|
||||||
bool null()
|
bool null()
|
||||||
{
|
{
|
||||||
return handle_value(mkNull);
|
rs->value(state).mkNull();
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool boolean(bool val)
|
bool boolean(bool val)
|
||||||
{
|
{
|
||||||
return handle_value(mkBool, val);
|
rs->value(state).mkBool(val);
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool number_integer(number_integer_t val)
|
bool number_integer(number_integer_t val)
|
||||||
{
|
{
|
||||||
return handle_value(mkInt, val);
|
rs->value(state).mkInt(val);
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool number_unsigned(number_unsigned_t val)
|
bool number_unsigned(number_unsigned_t val)
|
||||||
{
|
{
|
||||||
return handle_value(mkInt, val);
|
rs->value(state).mkInt(val);
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool number_float(number_float_t val, const string_t & s)
|
bool number_float(number_float_t val, const string_t & s)
|
||||||
{
|
{
|
||||||
return handle_value(mkFloat, val);
|
rs->value(state).mkFloat(val);
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool string(string_t & val)
|
bool string(string_t & val)
|
||||||
{
|
{
|
||||||
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
|
rs->value(state).mkString(val);
|
||||||
|
rs->add();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
||||||
bool binary(binary_t&)
|
bool binary(binary_t&)
|
||||||
{
|
{
|
||||||
|
@ -157,7 +163,7 @@ public:
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
void parseJSON(EvalState & state, const string & s_, Value & v)
|
void parseJSON(EvalState & state, const std::string_view & s_, Value & v)
|
||||||
{
|
{
|
||||||
JSONSax parser(state, v);
|
JSONSax parser(state, v);
|
||||||
bool res = json::sax_parse(s_, &parser);
|
bool res = json::sax_parse(s_, &parser);
|
||||||
|
|
|
@ -8,6 +8,6 @@ namespace nix {
|
||||||
|
|
||||||
MakeError(JSONParseError, EvalError);
|
MakeError(JSONParseError, EvalError);
|
||||||
|
|
||||||
void parseJSON(EvalState & state, const string & s, Value & v);
|
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,29 +64,32 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// FIXME: optimize
|
// we make use of the fact that the parser receives a private copy of the input
|
||||||
static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length)
|
// string and can munge around in it.
|
||||||
|
static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
||||||
{
|
{
|
||||||
string t;
|
char * result = s;
|
||||||
t.reserve(length);
|
char * t = s;
|
||||||
char c;
|
char c;
|
||||||
|
// the input string is terminated with *two* NULs, so we can safely take
|
||||||
|
// *one* character after the one being checked against.
|
||||||
while ((c = *s++)) {
|
while ((c = *s++)) {
|
||||||
if (c == '\\') {
|
if (c == '\\') {
|
||||||
assert(*s);
|
|
||||||
c = *s++;
|
c = *s++;
|
||||||
if (c == 'n') t += '\n';
|
if (c == 'n') *t = '\n';
|
||||||
else if (c == 'r') t += '\r';
|
else if (c == 'r') *t = '\r';
|
||||||
else if (c == 't') t += '\t';
|
else if (c == 't') *t = '\t';
|
||||||
else t += c;
|
else *t = c;
|
||||||
}
|
}
|
||||||
else if (c == '\r') {
|
else if (c == '\r') {
|
||||||
/* Normalise CR and CR/LF into LF. */
|
/* Normalise CR and CR/LF into LF. */
|
||||||
t += '\n';
|
*t = '\n';
|
||||||
if (*s == '\n') s++; /* cr/lf */
|
if (*s == '\n') s++; /* cr/lf */
|
||||||
}
|
}
|
||||||
else t += c;
|
else *t = c;
|
||||||
|
t++;
|
||||||
}
|
}
|
||||||
return new ExprString(symbols.create(t));
|
return {result, size_t(t - result)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -139,7 +142,7 @@ or { return OR_KW; }
|
||||||
\/\/ { return UPDATE; }
|
\/\/ { return UPDATE; }
|
||||||
\+\+ { return CONCAT; }
|
\+\+ { return CONCAT; }
|
||||||
|
|
||||||
{ID} { yylval->id = strdup(yytext); return ID; }
|
{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; }
|
||||||
{INT} { errno = 0;
|
{INT} { errno = 0;
|
||||||
try {
|
try {
|
||||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||||
|
@ -173,7 +176,7 @@ or { return OR_KW; }
|
||||||
/* It is impossible to match strings ending with '$' with one
|
/* It is impossible to match strings ending with '$' with one
|
||||||
regex because trailing contexts are only valid at the end
|
regex because trailing contexts are only valid at the end
|
||||||
of a rule. (A sane but undocumented limitation.) */
|
of a rule. (A sane but undocumented limitation.) */
|
||||||
yylval->e = unescapeStr(data->symbols, yytext, yyleng);
|
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
||||||
return STR;
|
return STR;
|
||||||
}
|
}
|
||||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||||
|
@ -188,26 +191,26 @@ or { return OR_KW; }
|
||||||
|
|
||||||
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
|
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
|
||||||
<IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ {
|
<IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ {
|
||||||
yylval->e = new ExprIndStr(yytext);
|
yylval->str = {yytext, (size_t) yyleng, true};
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\'\'\$ |
|
<IND_STRING>\'\'\$ |
|
||||||
<IND_STRING>\$ {
|
<IND_STRING>\$ {
|
||||||
yylval->e = new ExprIndStr("$");
|
yylval->str = {"$", 1};
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\'\'\' {
|
<IND_STRING>\'\'\' {
|
||||||
yylval->e = new ExprIndStr("''");
|
yylval->str = {"''", 2};
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\'\'\\{ANY} {
|
<IND_STRING>\'\'\\{ANY} {
|
||||||
yylval->e = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||||
<IND_STRING>\'\' { POP_STATE(); return IND_STRING_CLOSE; }
|
<IND_STRING>\'\' { POP_STATE(); return IND_STRING_CLOSE; }
|
||||||
<IND_STRING>\' {
|
<IND_STRING>\' {
|
||||||
yylval->e = new ExprIndStr("'");
|
yylval->str = {"'", 1};
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -221,14 +224,14 @@ or { return OR_KW; }
|
||||||
<PATH_START>{PATH_SEG} {
|
<PATH_START>{PATH_SEG} {
|
||||||
POP_STATE();
|
POP_STATE();
|
||||||
PUSH_STATE(INPATH_SLASH);
|
PUSH_STATE(INPATH_SLASH);
|
||||||
yylval->path = strdup(yytext);
|
yylval->path = {yytext, (size_t) yyleng};
|
||||||
return PATH;
|
return PATH;
|
||||||
}
|
}
|
||||||
|
|
||||||
<PATH_START>{HPATH_START} {
|
<PATH_START>{HPATH_START} {
|
||||||
POP_STATE();
|
POP_STATE();
|
||||||
PUSH_STATE(INPATH_SLASH);
|
PUSH_STATE(INPATH_SLASH);
|
||||||
yylval->path = strdup(yytext);
|
yylval->path = {yytext, (size_t) yyleng};
|
||||||
return HPATH;
|
return HPATH;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,7 +240,7 @@ or { return OR_KW; }
|
||||||
PUSH_STATE(INPATH_SLASH);
|
PUSH_STATE(INPATH_SLASH);
|
||||||
else
|
else
|
||||||
PUSH_STATE(INPATH);
|
PUSH_STATE(INPATH);
|
||||||
yylval->path = strdup(yytext);
|
yylval->path = {yytext, (size_t) yyleng};
|
||||||
return PATH;
|
return PATH;
|
||||||
}
|
}
|
||||||
{HPATH} {
|
{HPATH} {
|
||||||
|
@ -245,7 +248,7 @@ or { return OR_KW; }
|
||||||
PUSH_STATE(INPATH_SLASH);
|
PUSH_STATE(INPATH_SLASH);
|
||||||
else
|
else
|
||||||
PUSH_STATE(INPATH);
|
PUSH_STATE(INPATH);
|
||||||
yylval->path = strdup(yytext);
|
yylval->path = {yytext, (size_t) yyleng};
|
||||||
return HPATH;
|
return HPATH;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -261,7 +264,7 @@ or { return OR_KW; }
|
||||||
PUSH_STATE(INPATH_SLASH);
|
PUSH_STATE(INPATH_SLASH);
|
||||||
else
|
else
|
||||||
PUSH_STATE(INPATH);
|
PUSH_STATE(INPATH);
|
||||||
yylval->e = new ExprString(data->symbols.create(string(yytext)));
|
yylval->str = {yytext, (size_t) yyleng};
|
||||||
return STR;
|
return STR;
|
||||||
}
|
}
|
||||||
<INPATH>{ANY} |
|
<INPATH>{ANY} |
|
||||||
|
@ -280,8 +283,8 @@ or { return OR_KW; }
|
||||||
throw ParseError("path has a trailing slash");
|
throw ParseError("path has a trailing slash");
|
||||||
}
|
}
|
||||||
|
|
||||||
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
|
{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; }
|
||||||
{URI} { yylval->uri = strdup(yytext); return URI; }
|
{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; }
|
||||||
|
|
||||||
[ \t\r\n]+ /* eat up whitespace */
|
[ \t\r\n]+ /* eat up whitespace */
|
||||||
\#[^\r\n]* /* single-line comments */
|
\#[^\r\n]* /* single-line comments */
|
||||||
|
|
|
@ -190,7 +190,7 @@ void ExprConcatStrings::show(std::ostream & str) const
|
||||||
str << "(";
|
str << "(";
|
||||||
for (auto & i : *es) {
|
for (auto & i : *es) {
|
||||||
if (first) first = false; else str << " + ";
|
if (first) first = false; else str << " + ";
|
||||||
str << i.second;
|
str << *i.second;
|
||||||
}
|
}
|
||||||
str << ")";
|
str << ")";
|
||||||
}
|
}
|
||||||
|
@ -527,7 +527,7 @@ string ExprLambda::showNamePos() const
|
||||||
size_t SymbolTable::totalSize() const
|
size_t SymbolTable::totalSize() const
|
||||||
{
|
{
|
||||||
size_t n = 0;
|
size_t n = 0;
|
||||||
for (auto & i : symbols)
|
for (auto & i : store)
|
||||||
n += i.size();
|
n += i.size();
|
||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,7 +98,7 @@ struct ExprInt : Expr
|
||||||
{
|
{
|
||||||
NixInt n;
|
NixInt n;
|
||||||
Value v;
|
Value v;
|
||||||
ExprInt(NixInt n) : n(n) { mkInt(v, n); };
|
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env);
|
||||||
Pos* getPos() { return 0; }
|
Pos* getPos() { return 0; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
|
@ -108,7 +108,7 @@ struct ExprFloat : Expr
|
||||||
{
|
{
|
||||||
NixFloat nf;
|
NixFloat nf;
|
||||||
Value v;
|
Value v;
|
||||||
ExprFloat(NixFloat nf) : nf(nf) { mkFloat(v, nf); };
|
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env);
|
||||||
Pos* getPos() { return 0; }
|
Pos* getPos() { return 0; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
|
@ -116,22 +116,14 @@ struct ExprFloat : Expr
|
||||||
|
|
||||||
struct ExprString : Expr
|
struct ExprString : Expr
|
||||||
{
|
{
|
||||||
Symbol s;
|
string s;
|
||||||
Value v;
|
Value v;
|
||||||
ExprString(const Symbol & s) : s(s) { mkString(v, s); };
|
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
||||||
Value * maybeThunk(EvalState & state, Env & env);
|
Value * maybeThunk(EvalState & state, Env & env);
|
||||||
Pos* getPos() { return 0; }
|
Pos* getPos() { return 0; }
|
||||||
COMMON_METHODS
|
COMMON_METHODS
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Temporary class used during parsing of indented strings. */
|
|
||||||
struct ExprIndStr : Expr
|
|
||||||
{
|
|
||||||
string s;
|
|
||||||
ExprIndStr(const string & s) : s(s) { };
|
|
||||||
Pos* getPos() { return 0; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ExprPath : Expr
|
struct ExprPath : Expr
|
||||||
{
|
{
|
||||||
string s;
|
string s;
|
||||||
|
@ -237,10 +229,25 @@ struct Formal
|
||||||
|
|
||||||
struct Formals
|
struct Formals
|
||||||
{
|
{
|
||||||
typedef std::list<Formal> Formals_;
|
typedef std::vector<Formal> Formals_;
|
||||||
Formals_ formals;
|
Formals_ formals;
|
||||||
std::set<Symbol> argNames; // used during parsing
|
|
||||||
bool ellipsis;
|
bool ellipsis;
|
||||||
|
|
||||||
|
bool has(Symbol arg) const {
|
||||||
|
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
||||||
|
[] (const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||||
|
return it != formals.end() && it->name == arg;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Formal> lexicographicOrder() const
|
||||||
|
{
|
||||||
|
std::vector<Formal> result(formals.begin(), formals.end());
|
||||||
|
std::sort(result.begin(), result.end(),
|
||||||
|
[] (const Formal & a, const Formal & b) {
|
||||||
|
return std::string_view(a.name) < std::string_view(b.name);
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ExprLambda : Expr
|
struct ExprLambda : Expr
|
||||||
|
@ -253,11 +260,6 @@ struct ExprLambda : Expr
|
||||||
ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
|
ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
|
||||||
: pos(pos), arg(arg), formals(formals), body(body)
|
: pos(pos), arg(arg), formals(formals), body(body)
|
||||||
{
|
{
|
||||||
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
|
|
||||||
throw ParseError({
|
|
||||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
|
||||||
.errPos = pos
|
|
||||||
});
|
|
||||||
};
|
};
|
||||||
void setName(Symbol & name);
|
void setName(Symbol & name);
|
||||||
string showNamePos() const;
|
string showNamePos() const;
|
||||||
|
|
|
@ -16,6 +16,8 @@
|
||||||
#ifndef BISON_HEADER
|
#ifndef BISON_HEADER
|
||||||
#define BISON_HEADER
|
#define BISON_HEADER
|
||||||
|
|
||||||
|
#include <variant>
|
||||||
|
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
|
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
|
@ -40,8 +42,22 @@ namespace nix {
|
||||||
{ };
|
{ };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct ParserFormals {
|
||||||
|
std::vector<Formal> formals;
|
||||||
|
bool ellipsis = false;
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// using C a struct allows us to avoid having to define the special
|
||||||
|
// members that using string_view here would implicitly delete.
|
||||||
|
struct StringToken {
|
||||||
|
const char * p;
|
||||||
|
size_t l;
|
||||||
|
bool hasIndentation;
|
||||||
|
operator std::string_view() const { return {p, l}; }
|
||||||
|
};
|
||||||
|
|
||||||
#define YY_DECL int yylex \
|
#define YY_DECL int yylex \
|
||||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
||||||
|
|
||||||
|
@ -141,21 +157,46 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
|
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||||
|
Pos pos = noPos, Symbol arg = {})
|
||||||
{
|
{
|
||||||
if (!formals->argNames.insert(formal.name).second)
|
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||||
|
[] (const auto & a, const auto & b) {
|
||||||
|
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||||
|
});
|
||||||
|
|
||||||
|
std::optional<std::pair<Symbol, Pos>> duplicate;
|
||||||
|
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||||
|
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||||
|
continue;
|
||||||
|
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||||
|
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||||
|
}
|
||||||
|
if (duplicate)
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("duplicate formal function argument '%1%'",
|
.msg = hintfmt("duplicate formal function argument '%1%'", duplicate->first),
|
||||||
formal.name),
|
.errPos = duplicate->second
|
||||||
|
});
|
||||||
|
|
||||||
|
Formals result;
|
||||||
|
result.ellipsis = formals->ellipsis;
|
||||||
|
result.formals = std::move(formals->formals);
|
||||||
|
|
||||||
|
if (arg.set() && result.has(arg))
|
||||||
|
throw ParseError({
|
||||||
|
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||||
.errPos = pos
|
.errPos = pos
|
||||||
});
|
});
|
||||||
formals->formals.push_front(formal);
|
|
||||||
|
delete formals;
|
||||||
|
return new Formals(std::move(result));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<std::pair<Pos, Expr *> > & es)
|
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols,
|
||||||
|
vector<std::pair<Pos, std::variant<Expr *, StringToken> > > & es)
|
||||||
{
|
{
|
||||||
if (es.empty()) return new ExprString(symbols.create(""));
|
if (es.empty()) return new ExprString("");
|
||||||
|
|
||||||
/* Figure out the minimum indentation. Note that by design
|
/* Figure out the minimum indentation. Note that by design
|
||||||
whitespace-only final lines are not taken into account. (So
|
whitespace-only final lines are not taken into account. (So
|
||||||
|
@ -164,20 +205,20 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<st
|
||||||
size_t minIndent = 1000000;
|
size_t minIndent = 1000000;
|
||||||
size_t curIndent = 0;
|
size_t curIndent = 0;
|
||||||
for (auto & [i_pos, i] : es) {
|
for (auto & [i_pos, i] : es) {
|
||||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(i);
|
auto * str = std::get_if<StringToken>(&i);
|
||||||
if (!e) {
|
if (!str || !str->hasIndentation) {
|
||||||
/* Anti-quotations end the current start-of-line whitespace. */
|
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||||
if (atStartOfLine) {
|
if (atStartOfLine) {
|
||||||
atStartOfLine = false;
|
atStartOfLine = false;
|
||||||
if (curIndent < minIndent) minIndent = curIndent;
|
if (curIndent < minIndent) minIndent = curIndent;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
for (size_t j = 0; j < str->l; ++j) {
|
||||||
if (atStartOfLine) {
|
if (atStartOfLine) {
|
||||||
if (e->s[j] == ' ')
|
if (str->p[j] == ' ')
|
||||||
curIndent++;
|
curIndent++;
|
||||||
else if (e->s[j] == '\n') {
|
else if (str->p[j] == '\n') {
|
||||||
/* Empty line, doesn't influence minimum
|
/* Empty line, doesn't influence minimum
|
||||||
indentation. */
|
indentation. */
|
||||||
curIndent = 0;
|
curIndent = 0;
|
||||||
|
@ -185,7 +226,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<st
|
||||||
atStartOfLine = false;
|
atStartOfLine = false;
|
||||||
if (curIndent < minIndent) minIndent = curIndent;
|
if (curIndent < minIndent) minIndent = curIndent;
|
||||||
}
|
}
|
||||||
} else if (e->s[j] == '\n') {
|
} else if (str->p[j] == '\n') {
|
||||||
atStartOfLine = true;
|
atStartOfLine = true;
|
||||||
curIndent = 0;
|
curIndent = 0;
|
||||||
}
|
}
|
||||||
|
@ -197,33 +238,31 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<st
|
||||||
atStartOfLine = true;
|
atStartOfLine = true;
|
||||||
size_t curDropped = 0;
|
size_t curDropped = 0;
|
||||||
size_t n = es.size();
|
size_t n = es.size();
|
||||||
for (vector<std::pair<Pos, Expr *> >::iterator i = es.begin(); i != es.end(); ++i, --n) {
|
auto i = es.begin();
|
||||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(i->second);
|
const auto trimExpr = [&] (Expr * e) {
|
||||||
if (!e) {
|
atStartOfLine = false;
|
||||||
atStartOfLine = false;
|
curDropped = 0;
|
||||||
curDropped = 0;
|
es2->emplace_back(i->first, e);
|
||||||
es2->push_back(*i);
|
};
|
||||||
continue;
|
const auto trimString = [&] (const StringToken & t) {
|
||||||
}
|
|
||||||
|
|
||||||
string s2;
|
string s2;
|
||||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
for (size_t j = 0; j < t.l; ++j) {
|
||||||
if (atStartOfLine) {
|
if (atStartOfLine) {
|
||||||
if (e->s[j] == ' ') {
|
if (t.p[j] == ' ') {
|
||||||
if (curDropped++ >= minIndent)
|
if (curDropped++ >= minIndent)
|
||||||
s2 += e->s[j];
|
s2 += t.p[j];
|
||||||
}
|
}
|
||||||
else if (e->s[j] == '\n') {
|
else if (t.p[j] == '\n') {
|
||||||
curDropped = 0;
|
curDropped = 0;
|
||||||
s2 += e->s[j];
|
s2 += t.p[j];
|
||||||
} else {
|
} else {
|
||||||
atStartOfLine = false;
|
atStartOfLine = false;
|
||||||
curDropped = 0;
|
curDropped = 0;
|
||||||
s2 += e->s[j];
|
s2 += t.p[j];
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
s2 += e->s[j];
|
s2 += t.p[j];
|
||||||
if (e->s[j] == '\n') atStartOfLine = true;
|
if (t.p[j] == '\n') atStartOfLine = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,7 +274,10 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<st
|
||||||
s2 = string(s2, 0, p + 1);
|
s2 = string(s2, 0, p + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
es2->emplace_back(i->first, new ExprString(symbols.create(s2)));
|
es2->emplace_back(i->first, new ExprString(s2));
|
||||||
|
};
|
||||||
|
for (; i != es.end(); ++i, --n) {
|
||||||
|
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* If this is a single string, then don't do a concatenation. */
|
/* If this is a single string, then don't do a concatenation. */
|
||||||
|
@ -270,15 +312,17 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
||||||
nix::Expr * e;
|
nix::Expr * e;
|
||||||
nix::ExprList * list;
|
nix::ExprList * list;
|
||||||
nix::ExprAttrs * attrs;
|
nix::ExprAttrs * attrs;
|
||||||
nix::Formals * formals;
|
nix::ParserFormals * formals;
|
||||||
nix::Formal * formal;
|
nix::Formal * formal;
|
||||||
nix::NixInt n;
|
nix::NixInt n;
|
||||||
nix::NixFloat nf;
|
nix::NixFloat nf;
|
||||||
const char * id; // !!! -> Symbol
|
StringToken id; // !!! -> Symbol
|
||||||
char * path;
|
StringToken path;
|
||||||
char * uri;
|
StringToken uri;
|
||||||
|
StringToken str;
|
||||||
std::vector<nix::AttrName> * attrNames;
|
std::vector<nix::AttrName> * attrNames;
|
||||||
std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts;
|
std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts;
|
||||||
|
std::vector<std::pair<nix::Pos, std::variant<nix::Expr *, StringToken> > > * ind_string_parts;
|
||||||
}
|
}
|
||||||
|
|
||||||
%type <e> start expr expr_function expr_if expr_op
|
%type <e> start expr expr_function expr_if expr_op
|
||||||
|
@ -288,11 +332,12 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
||||||
%type <formals> formals
|
%type <formals> formals
|
||||||
%type <formal> formal
|
%type <formal> formal
|
||||||
%type <attrNames> attrs attrpath
|
%type <attrNames> attrs attrpath
|
||||||
%type <string_parts> string_parts_interpolated ind_string_parts
|
%type <string_parts> string_parts_interpolated
|
||||||
|
%type <ind_string_parts> ind_string_parts
|
||||||
%type <e> path_start string_parts string_attr
|
%type <e> path_start string_parts string_attr
|
||||||
%type <id> attr
|
%type <id> attr
|
||||||
%token <id> ID ATTRPATH
|
%token <id> ID ATTRPATH
|
||||||
%token <e> STR IND_STR
|
%token <str> STR IND_STR
|
||||||
%token <n> INT
|
%token <n> INT
|
||||||
%token <nf> FLOAT
|
%token <nf> FLOAT
|
||||||
%token <path> PATH HPATH SPATH PATH_END
|
%token <path> PATH HPATH SPATH PATH_END
|
||||||
|
@ -325,11 +370,17 @@ expr_function
|
||||||
: ID ':' expr_function
|
: ID ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
||||||
| '{' formals '}' ':' expr_function
|
| '{' formals '}' ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), $2, $5); }
|
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), toFormals(*data, $2), $5); }
|
||||||
| '{' formals '}' '@' ID ':' expr_function
|
| '{' formals '}' '@' ID ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($5), $2, $7); }
|
{
|
||||||
|
Symbol arg = data->symbols.create($5);
|
||||||
|
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
||||||
|
}
|
||||||
| ID '@' '{' formals '}' ':' expr_function
|
| ID '@' '{' formals '}' ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), $4, $7); }
|
{
|
||||||
|
Symbol arg = data->symbols.create($1);
|
||||||
|
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
||||||
|
}
|
||||||
| ASSERT expr ';' expr_function
|
| ASSERT expr ';' expr_function
|
||||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||||
| WITH expr ';' expr_function
|
| WITH expr ';' expr_function
|
||||||
|
@ -398,7 +449,8 @@ expr_select
|
||||||
|
|
||||||
expr_simple
|
expr_simple
|
||||||
: ID {
|
: ID {
|
||||||
if (strcmp($1, "__curPos") == 0)
|
std::string_view s = "__curPos";
|
||||||
|
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||||
$$ = new ExprPos(CUR_POS);
|
$$ = new ExprPos(CUR_POS);
|
||||||
else
|
else
|
||||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
||||||
|
@ -415,11 +467,11 @@ expr_simple
|
||||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||||
}
|
}
|
||||||
| SPATH {
|
| SPATH {
|
||||||
string path($1 + 1, strlen($1) - 2);
|
string path($1.p + 1, $1.l - 2);
|
||||||
$$ = new ExprCall(CUR_POS,
|
$$ = new ExprCall(CUR_POS,
|
||||||
new ExprVar(data->symbols.create("__findFile")),
|
new ExprVar(data->symbols.create("__findFile")),
|
||||||
{new ExprVar(data->symbols.create("__nixPath")),
|
{new ExprVar(data->symbols.create("__nixPath")),
|
||||||
new ExprString(data->symbols.create(path))});
|
new ExprString(path)});
|
||||||
}
|
}
|
||||||
| URI {
|
| URI {
|
||||||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
|
static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
|
||||||
|
@ -428,7 +480,7 @@ expr_simple
|
||||||
.msg = hintfmt("URL literals are disabled"),
|
.msg = hintfmt("URL literals are disabled"),
|
||||||
.errPos = CUR_POS
|
.errPos = CUR_POS
|
||||||
});
|
});
|
||||||
$$ = new ExprString(data->symbols.create($1));
|
$$ = new ExprString(string($1));
|
||||||
}
|
}
|
||||||
| '(' expr ')' { $$ = $2; }
|
| '(' expr ')' { $$ = $2; }
|
||||||
/* Let expressions `let {..., body = ...}' are just desugared
|
/* Let expressions `let {..., body = ...}' are just desugared
|
||||||
|
@ -443,32 +495,33 @@ expr_simple
|
||||||
;
|
;
|
||||||
|
|
||||||
string_parts
|
string_parts
|
||||||
: STR
|
: STR { $$ = new ExprString(string($1)); }
|
||||||
| string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, $1); }
|
| string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, $1); }
|
||||||
| { $$ = new ExprString(data->symbols.create("")); }
|
| { $$ = new ExprString(""); }
|
||||||
;
|
;
|
||||||
|
|
||||||
string_parts_interpolated
|
string_parts_interpolated
|
||||||
: string_parts_interpolated STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
: string_parts_interpolated STR
|
||||||
|
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(string($2))); }
|
||||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||||
| DOLLAR_CURLY expr '}' { $$ = new vector<std::pair<Pos, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); }
|
| DOLLAR_CURLY expr '}' { $$ = new vector<std::pair<Pos, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); }
|
||||||
| STR DOLLAR_CURLY expr '}' {
|
| STR DOLLAR_CURLY expr '}' {
|
||||||
$$ = new vector<std::pair<Pos, Expr *> >;
|
$$ = new vector<std::pair<Pos, Expr *> >;
|
||||||
$$->emplace_back(makeCurPos(@1, data), $1);
|
$$->emplace_back(makeCurPos(@1, data), new ExprString(string($1)));
|
||||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
$$->emplace_back(makeCurPos(@2, data), $3);
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
path_start
|
path_start
|
||||||
: PATH {
|
: PATH {
|
||||||
Path path(absPath($1, data->basePath));
|
Path path(absPath({$1.p, $1.l}, data->basePath));
|
||||||
/* add back in the trailing '/' to the first segment */
|
/* add back in the trailing '/' to the first segment */
|
||||||
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
|
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||||
path += "/";
|
path += "/";
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(path);
|
||||||
}
|
}
|
||||||
| HPATH {
|
| HPATH {
|
||||||
Path path(getHome() + string($1 + 1));
|
Path path(getHome() + string($1.p + 1, $1.l - 1));
|
||||||
$$ = new ExprPath(path);
|
$$ = new ExprPath(path);
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
@ -476,7 +529,7 @@ path_start
|
||||||
ind_string_parts
|
ind_string_parts
|
||||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
||||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||||
| { $$ = new vector<std::pair<Pos, Expr *> >; }
|
| { $$ = new vector<std::pair<Pos, std::variant<Expr *, StringToken> > >; }
|
||||||
;
|
;
|
||||||
|
|
||||||
binds
|
binds
|
||||||
|
@ -508,7 +561,7 @@ attrs
|
||||||
{ $$ = $1;
|
{ $$ = $1;
|
||||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(str->s));
|
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
|
@ -525,7 +578,7 @@ attrpath
|
||||||
{ $$ = $1;
|
{ $$ = $1;
|
||||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(str->s));
|
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
$$->push_back(AttrName($3));
|
$$->push_back(AttrName($3));
|
||||||
|
@ -535,7 +588,7 @@ attrpath
|
||||||
{ $$ = new vector<AttrName>;
|
{ $$ = new vector<AttrName>;
|
||||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(str->s));
|
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
$$->push_back(AttrName($1));
|
$$->push_back(AttrName($1));
|
||||||
|
@ -544,7 +597,7 @@ attrpath
|
||||||
|
|
||||||
attr
|
attr
|
||||||
: ID { $$ = $1; }
|
: ID { $$ = $1; }
|
||||||
| OR_KW { $$ = "or"; }
|
| OR_KW { $$ = {"or", 2}; }
|
||||||
;
|
;
|
||||||
|
|
||||||
string_attr
|
string_attr
|
||||||
|
@ -559,13 +612,13 @@ expr_list
|
||||||
|
|
||||||
formals
|
formals
|
||||||
: formal ',' formals
|
: formal ',' formals
|
||||||
{ $$ = $3; addFormal(CUR_POS, $$, *$1); }
|
{ $$ = $3; $$->formals.push_back(*$1); }
|
||||||
| formal
|
| formal
|
||||||
{ $$ = new Formals; addFormal(CUR_POS, $$, *$1); $$->ellipsis = false; }
|
{ $$ = new ParserFormals; $$->formals.push_back(*$1); $$->ellipsis = false; }
|
||||||
|
|
|
|
||||||
{ $$ = new Formals; $$->ellipsis = false; }
|
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
||||||
| ELLIPSIS
|
| ELLIPSIS
|
||||||
{ $$ = new Formals; $$->ellipsis = true; }
|
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
||||||
;
|
;
|
||||||
|
|
||||||
formal
|
formal
|
||||||
|
@ -590,8 +643,8 @@ formal
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parse(const char * text, FileOrigin origin,
|
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
||||||
const Path & path, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
const PathView path, const PathView basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
yyscan_t scanner;
|
yyscan_t scanner;
|
||||||
ParseData data(*this);
|
ParseData data(*this);
|
||||||
|
@ -610,7 +663,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
|
||||||
data.basePath = basePath;
|
data.basePath = basePath;
|
||||||
|
|
||||||
yylex_init(&scanner);
|
yylex_init(&scanner);
|
||||||
yy_scan_string(text, scanner);
|
yy_scan_buffer(text, length, scanner);
|
||||||
int res = yyparse(scanner, &data);
|
int res = yyparse(scanner, &data);
|
||||||
yylex_destroy(scanner);
|
yylex_destroy(scanner);
|
||||||
|
|
||||||
|
@ -656,26 +709,33 @@ Expr * EvalState::parseExprFromFile(const Path & path)
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv)
|
Expr * EvalState::parseExprFromFile(const Path & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
|
auto buffer = readFile(path);
|
||||||
|
// readFile should have left some extra space for terminators
|
||||||
|
buffer.append("\0\0", 2);
|
||||||
|
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
{
|
{
|
||||||
return parse(s.data(), foString, "", basePath, staticEnv);
|
s.append("\0\0", 2);
|
||||||
|
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
|
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
|
||||||
{
|
{
|
||||||
return parseExprFromString(s, basePath, staticBaseEnv);
|
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseStdin()
|
Expr * EvalState::parseStdin()
|
||||||
{
|
{
|
||||||
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
|
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
|
||||||
return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
|
auto buffer = drainFD(0);
|
||||||
|
// drainFD should have left some extra space for terminators
|
||||||
|
buffer.append("\0\0", 2);
|
||||||
|
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -695,24 +755,24 @@ void EvalState::addToSearchPath(const string & s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path EvalState::findFile(const string & path)
|
Path EvalState::findFile(const std::string_view path)
|
||||||
{
|
{
|
||||||
return findFile(searchPath, path);
|
return findFile(searchPath, path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos & pos)
|
Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos)
|
||||||
{
|
{
|
||||||
for (auto & i : searchPath) {
|
for (auto & i : searchPath) {
|
||||||
std::string suffix;
|
std::string suffix;
|
||||||
if (i.first.empty())
|
if (i.first.empty())
|
||||||
suffix = "/" + path;
|
suffix = concatStrings("/", path);
|
||||||
else {
|
else {
|
||||||
auto s = i.first.size();
|
auto s = i.first.size();
|
||||||
if (path.compare(0, s, i.first) != 0 ||
|
if (path.compare(0, s, i.first) != 0 ||
|
||||||
(path.size() > s && path[s] != '/'))
|
(path.size() > s && path[s] != '/'))
|
||||||
continue;
|
continue;
|
||||||
suffix = path.size() == s ? "" : "/" + string(path, s);
|
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
||||||
}
|
}
|
||||||
auto r = resolveSearchPathElem(i);
|
auto r = resolveSearchPathElem(i);
|
||||||
if (!r.first) continue;
|
if (!r.first) continue;
|
||||||
|
@ -721,7 +781,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasPrefix(path, "nix/"))
|
if (hasPrefix(path, "nix/"))
|
||||||
return corepkgsPrefix + path.substr(4);
|
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||||
|
|
||||||
throw ThrownError({
|
throw ThrownError({
|
||||||
.msg = hintfmt(evalSettings.pureEval
|
.msg = hintfmt(evalSettings.pureEval
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -7,8 +7,8 @@ namespace nix {
|
||||||
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
string s = state.coerceToString(pos, *args[0], context);
|
auto s = state.coerceToString(pos, *args[0], context);
|
||||||
mkString(v, s, PathSet());
|
v.mkString(*s);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
||||||
|
@ -18,7 +18,7 @@ static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args,
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
state.forceString(*args[0], context, pos);
|
state.forceString(*args[0], context, pos);
|
||||||
mkBool(v, !context.empty());
|
v.mkBool(!context.empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||||
|
@ -33,13 +33,13 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
PathSet context;
|
PathSet context;
|
||||||
string s = state.coerceToString(pos, *args[0], context);
|
auto s = state.coerceToString(pos, *args[0], context);
|
||||||
|
|
||||||
PathSet context2;
|
PathSet context2;
|
||||||
for (auto & p : context)
|
for (auto & p : context)
|
||||||
context2.insert(p.at(0) == '=' ? string(p, 1) : p);
|
context2.insert(p.at(0) == '=' ? string(p, 1) : p);
|
||||||
|
|
||||||
mkString(v, s, context2);
|
v.mkString(*s, context2);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
||||||
|
@ -103,27 +103,26 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
state.mkAttrs(v, contextInfos.size());
|
auto attrs = state.buildBindings(contextInfos.size());
|
||||||
|
|
||||||
auto sPath = state.symbols.create("path");
|
auto sPath = state.symbols.create("path");
|
||||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||||
for (const auto & info : contextInfos) {
|
for (const auto & info : contextInfos) {
|
||||||
auto & infoVal = *state.allocAttr(v, state.symbols.create(info.first));
|
auto infoAttrs = state.buildBindings(3);
|
||||||
state.mkAttrs(infoVal, 3);
|
|
||||||
if (info.second.path)
|
if (info.second.path)
|
||||||
mkBool(*state.allocAttr(infoVal, sPath), true);
|
infoAttrs.alloc(sPath).mkBool(true);
|
||||||
if (info.second.allOutputs)
|
if (info.second.allOutputs)
|
||||||
mkBool(*state.allocAttr(infoVal, sAllOutputs), true);
|
infoAttrs.alloc(sAllOutputs).mkBool(true);
|
||||||
if (!info.second.outputs.empty()) {
|
if (!info.second.outputs.empty()) {
|
||||||
auto & outputsVal = *state.allocAttr(infoVal, state.sOutputs);
|
auto & outputsVal = infoAttrs.alloc(state.sOutputs);
|
||||||
state.mkList(outputsVal, info.second.outputs.size());
|
state.mkList(outputsVal, info.second.outputs.size());
|
||||||
size_t i = 0;
|
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||||
for (const auto & output : info.second.outputs)
|
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||||
mkString(*(outputsVal.listElems()[i++] = state.allocValue()), output);
|
|
||||||
}
|
}
|
||||||
infoVal.attrs->sort();
|
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
||||||
}
|
}
|
||||||
v.attrs->sort();
|
|
||||||
|
v.mkAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
||||||
|
@ -182,12 +181,12 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
||||||
}
|
}
|
||||||
for (auto elem : iter->value->listItems()) {
|
for (auto elem : iter->value->listItems()) {
|
||||||
auto name = state.forceStringNoCtx(*elem, *iter->pos);
|
auto name = state.forceStringNoCtx(*elem, *iter->pos);
|
||||||
context.insert("!" + name + "!" + string(i.name));
|
context.insert(concatStrings("!", name, "!", i.name));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mkString(v, orig, context);
|
v.mkString(orig, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
|
static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
|
||||||
|
|
|
@ -12,7 +12,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
std::string url;
|
std::string url;
|
||||||
std::optional<Hash> rev;
|
std::optional<Hash> rev;
|
||||||
std::optional<std::string> ref;
|
std::optional<std::string> ref;
|
||||||
std::string name = "source";
|
std::string_view name = "source";
|
||||||
PathSet context;
|
PathSet context;
|
||||||
|
|
||||||
state.forceValue(*args[0], pos);
|
state.forceValue(*args[0], pos);
|
||||||
|
@ -22,14 +22,14 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
state.forceAttrs(*args[0], pos);
|
state.forceAttrs(*args[0], pos);
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
string n(attr.name);
|
std::string_view n(attr.name);
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
|
url = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||||
else if (n == "rev") {
|
else if (n == "rev") {
|
||||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||||
// be both a revision or a branch/tag name.
|
// be both a revision or a branch/tag name.
|
||||||
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
if (std::regex_match(value, revRegex))
|
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||||
rev = Hash::parseAny(value, htSHA1);
|
rev = Hash::parseAny(value, htSHA1);
|
||||||
else
|
else
|
||||||
ref = value;
|
ref = value;
|
||||||
|
@ -50,7 +50,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
});
|
});
|
||||||
|
|
||||||
} else
|
} else
|
||||||
url = state.coerceToString(pos, *args[0], context, false, false);
|
url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||||
|
|
||||||
// FIXME: git externals probably can be used to bypass the URI
|
// FIXME: git externals probably can be used to bypass the URI
|
||||||
// whitelist. Ah well.
|
// whitelist. Ah well.
|
||||||
|
@ -62,7 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
attrs.insert_or_assign("type", "hg");
|
attrs.insert_or_assign("type", "hg");
|
||||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
||||||
attrs.insert_or_assign("name", name);
|
attrs.insert_or_assign("name", string(name));
|
||||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
if (ref) attrs.insert_or_assign("ref", *ref);
|
||||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
|
@ -70,19 +70,19 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
// FIXME: use name
|
// FIXME: use name
|
||||||
auto [tree, input2] = input.fetch(state.store);
|
auto [tree, input2] = input.fetch(state.store);
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
auto attrs2 = state.buildBindings(8);
|
||||||
auto storePath = state.store->printStorePath(tree.storePath);
|
auto storePath = state.store->printStorePath(tree.storePath);
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
attrs2.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||||
if (input2.getRef())
|
if (input2.getRef())
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2.getRef());
|
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||||
if (auto revCount = input2.getRevCount())
|
if (auto revCount = input2.getRevCount())
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
attrs2.alloc("revCount").mkInt(*revCount);
|
||||||
v.attrs->sort();
|
v.mkAttrs(attrs2);
|
||||||
|
|
||||||
state.allowPath(tree.storePath);
|
state.allowPath(tree.storePath);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,49 +21,48 @@ void emitTreeAttrs(
|
||||||
{
|
{
|
||||||
assert(input.isImmutable());
|
assert(input.isImmutable());
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
auto attrs = state.buildBindings(8);
|
||||||
|
|
||||||
auto storePath = state.store->printStorePath(tree.storePath);
|
auto storePath = state.store->printStorePath(tree.storePath);
|
||||||
|
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
attrs.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||||
|
|
||||||
// FIXME: support arbitrary input attributes.
|
// FIXME: support arbitrary input attributes.
|
||||||
|
|
||||||
auto narHash = input.getNarHash();
|
auto narHash = input.getNarHash();
|
||||||
assert(narHash);
|
assert(narHash);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||||
narHash->to_string(SRI, true));
|
|
||||||
|
|
||||||
if (input.getType() == "git")
|
if (input.getType() == "git")
|
||||||
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
|
attrs.alloc("submodules").mkBool(
|
||||||
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||||
|
|
||||||
if (!forceDirty) {
|
if (!forceDirty) {
|
||||||
|
|
||||||
if (auto rev = input.getRev()) {
|
if (auto rev = input.getRev()) {
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
|
attrs.alloc("rev").mkString(rev->gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
|
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||||
} else if (emptyRevFallback) {
|
} else if (emptyRevFallback) {
|
||||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||||
auto emptyHash = Hash(htSHA1);
|
auto emptyHash = Hash(htSHA1);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
|
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
|
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto revCount = input.getRevCount())
|
if (auto revCount = input.getRevCount())
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
attrs.alloc("revCount").mkInt(*revCount);
|
||||||
else if (emptyRevFallback)
|
else if (emptyRevFallback)
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
|
attrs.alloc("revCount").mkInt(0);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (auto lastModified = input.getLastModified()) {
|
if (auto lastModified = input.getLastModified()) {
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
|
attrs.alloc("lastModified").mkInt(*lastModified);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("lastModifiedDate")),
|
attrs.alloc("lastModifiedDate").mkString(
|
||||||
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||||
}
|
}
|
||||||
|
|
||||||
v.attrs->sort();
|
v.mkAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
|
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
|
||||||
|
@ -126,7 +125,7 @@ static void fetchTree(
|
||||||
if (attr.name == state.sType) continue;
|
if (attr.name == state.sType) continue;
|
||||||
state.forceValue(*attr.value, *attr.pos);
|
state.forceValue(*attr.value, *attr.pos);
|
||||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||||
auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false);
|
auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||||
attrs.emplace(attr.name,
|
attrs.emplace(attr.name,
|
||||||
attr.name == "url"
|
attr.name == "url"
|
||||||
? type == "git"
|
? type == "git"
|
||||||
|
@ -152,7 +151,7 @@ static void fetchTree(
|
||||||
|
|
||||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
} else {
|
} else {
|
||||||
auto url = state.coerceToString(pos, *args[0], context, false, false);
|
auto url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||||
|
|
||||||
if (type == "git") {
|
if (type == "git") {
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
|
@ -248,7 +247,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
state.allowPath(storePath);
|
state.allowPath(storePath);
|
||||||
|
|
||||||
auto path = state.store->printStorePath(storePath);
|
auto path = state.store->printStorePath(storePath);
|
||||||
mkString(v, path, PathSet({path}));
|
v.mkString(path, PathSet({path}));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
|
|
@ -9,7 +9,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
{
|
{
|
||||||
auto toml = state.forceStringNoCtx(*args[0], pos);
|
auto toml = state.forceStringNoCtx(*args[0], pos);
|
||||||
|
|
||||||
std::istringstream tomlStream(toml);
|
std::istringstream tomlStream(string{toml});
|
||||||
|
|
||||||
std::function<void(Value &, toml::value)> visit;
|
std::function<void(Value &, toml::value)> visit;
|
||||||
|
|
||||||
|
@ -24,15 +24,12 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
size_t size = 0;
|
size_t size = 0;
|
||||||
for (auto & i : table) { (void) i; size++; }
|
for (auto & i : table) { (void) i; size++; }
|
||||||
|
|
||||||
state.mkAttrs(v, size);
|
auto attrs = state.buildBindings(size);
|
||||||
|
|
||||||
for(auto & elem: table) {
|
for(auto & elem : table)
|
||||||
|
visit(attrs.alloc(elem.first), elem.second);
|
||||||
|
|
||||||
auto & v2 = *state.allocAttr(v, state.symbols.create(elem.first));
|
v.mkAttrs(attrs);
|
||||||
visit(v2, elem.second);
|
|
||||||
}
|
|
||||||
|
|
||||||
v.attrs->sort();
|
|
||||||
}
|
}
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::array:
|
case toml::value_t::array:
|
||||||
|
@ -46,16 +43,16 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
}
|
}
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::boolean:
|
case toml::value_t::boolean:
|
||||||
mkBool(v, toml::get<bool>(t));
|
v.mkBool(toml::get<bool>(t));
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::integer:
|
case toml::value_t::integer:
|
||||||
mkInt(v, toml::get<int64_t>(t));
|
v.mkInt(toml::get<int64_t>(t));
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::floating:
|
case toml::value_t::floating:
|
||||||
mkFloat(v, toml::get<NixFloat>(t));
|
v.mkFloat(toml::get<NixFloat>(t));
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::string:
|
case toml::value_t::string:
|
||||||
mkString(v, toml::get<std::string>(t));
|
v.mkString(toml::get<std::string>(t));
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::local_datetime:
|
case toml::value_t::local_datetime:
|
||||||
case toml::value_t::offset_datetime:
|
case toml::value_t::offset_datetime:
|
||||||
|
@ -65,7 +62,7 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
throw std::runtime_error("Dates and times are not supported");
|
throw std::runtime_error("Dates and times are not supported");
|
||||||
break;;
|
break;;
|
||||||
case toml::value_t::empty:
|
case toml::value_t::empty:
|
||||||
mkNull(v);
|
v.mkNull();
|
||||||
break;;
|
break;;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <list>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <unordered_set>
|
#include <unordered_map>
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
|
|
||||||
|
@ -70,15 +71,21 @@ public:
|
||||||
class SymbolTable
|
class SymbolTable
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
typedef std::unordered_set<string> Symbols;
|
std::unordered_map<std::string_view, Symbol> symbols;
|
||||||
Symbols symbols;
|
std::list<string> store;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Symbol create(std::string_view s)
|
Symbol create(std::string_view s)
|
||||||
{
|
{
|
||||||
// FIXME: avoid allocation if 's' already exists in the symbol table.
|
// Most symbols are looked up more than once, so we trade off insertion performance
|
||||||
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
|
// for lookup performance.
|
||||||
return Symbol(&*res.first);
|
// TODO: could probably be done more efficiently with transparent Hash and Equals
|
||||||
|
// on the original implementation using unordered_set
|
||||||
|
auto it = symbols.find(s);
|
||||||
|
if (it != symbols.end()) return it->second;
|
||||||
|
|
||||||
|
const string & rawSym = store.emplace_back(s);
|
||||||
|
return symbols.emplace(rawSym, Symbol(&rawSym)).first->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t size() const
|
size_t size() const
|
||||||
|
@ -91,7 +98,7 @@ public:
|
||||||
template<typename T>
|
template<typename T>
|
||||||
void dump(T callback)
|
void dump(T callback)
|
||||||
{
|
{
|
||||||
for (auto & s : symbols)
|
for (auto & s : store)
|
||||||
callback(s);
|
callback(s);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -142,7 +142,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||||
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
|
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
|
||||||
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||||
XMLOpenElement _(doc, "attrspat", attrs);
|
XMLOpenElement _(doc, "attrspat", attrs);
|
||||||
for (auto & i : v.lambda.fun->formals->formals)
|
for (auto & i : v.lambda.fun->formals->lexicographicOrder())
|
||||||
doc.writeEmptyElement("attr", singletonAttrs("name", i.name));
|
doc.writeEmptyElement("attr", singletonAttrs("name", i.name));
|
||||||
} else
|
} else
|
||||||
doc.writeEmptyElement("varpat", singletonAttrs("name", v.lambda.fun->arg));
|
doc.writeEmptyElement("varpat", singletonAttrs("name", v.lambda.fun->arg));
|
||||||
|
|
|
@ -10,6 +10,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class BindingsBuilder;
|
||||||
|
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
tInt = 1,
|
tInt = 1,
|
||||||
|
@ -235,6 +237,17 @@ public:
|
||||||
string.context = context;
|
string.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void mkString(std::string_view s);
|
||||||
|
|
||||||
|
void mkString(std::string_view s, const PathSet & context);
|
||||||
|
|
||||||
|
void mkStringMove(const char * s, const PathSet & context);
|
||||||
|
|
||||||
|
inline void mkString(const Symbol & s)
|
||||||
|
{
|
||||||
|
mkString(((const std::string &) s).c_str());
|
||||||
|
}
|
||||||
|
|
||||||
inline void mkPath(const char * s)
|
inline void mkPath(const char * s)
|
||||||
{
|
{
|
||||||
clearValue();
|
clearValue();
|
||||||
|
@ -242,6 +255,8 @@ public:
|
||||||
path = s;
|
path = s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void mkPath(std::string_view s);
|
||||||
|
|
||||||
inline void mkNull()
|
inline void mkNull()
|
||||||
{
|
{
|
||||||
clearValue();
|
clearValue();
|
||||||
|
@ -255,6 +270,8 @@ public:
|
||||||
attrs = a;
|
attrs = a;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Value & mkAttrs(BindingsBuilder & bindings);
|
||||||
|
|
||||||
inline void mkList(size_t size)
|
inline void mkList(size_t size)
|
||||||
{
|
{
|
||||||
clearValue();
|
clearValue();
|
||||||
|
@ -344,7 +361,7 @@ public:
|
||||||
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
|
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
|
||||||
}
|
}
|
||||||
|
|
||||||
Pos determinePos(const Pos &pos) const;
|
Pos determinePos(const Pos & pos) const;
|
||||||
|
|
||||||
/* Check whether forcing this value requires a trivial amount of
|
/* Check whether forcing this value requires a trivial amount of
|
||||||
computation. In particular, function applications are
|
computation. In particular, function applications are
|
||||||
|
@ -383,45 +400,6 @@ public:
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// TODO: Remove these static functions, replace call sites with v.mk* instead
|
|
||||||
static inline void mkInt(Value & v, NixInt n)
|
|
||||||
{
|
|
||||||
v.mkInt(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void mkFloat(Value & v, NixFloat n)
|
|
||||||
{
|
|
||||||
v.mkFloat(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void mkBool(Value & v, bool b)
|
|
||||||
{
|
|
||||||
v.mkBool(b);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void mkNull(Value & v)
|
|
||||||
{
|
|
||||||
v.mkNull();
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void mkApp(Value & v, Value & left, Value & right)
|
|
||||||
{
|
|
||||||
v.mkApp(&left, &right);
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline void mkString(Value & v, const Symbol & s)
|
|
||||||
{
|
|
||||||
v.mkString(((const string &) s).c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void mkString(Value & v, const char * s);
|
|
||||||
|
|
||||||
|
|
||||||
void mkPath(Value & v, const char * s);
|
|
||||||
|
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
||||||
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
|
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
|
||||||
|
|
|
@ -67,18 +67,18 @@ DownloadFileResult downloadFile(
|
||||||
storePath = std::move(cached->storePath);
|
storePath = std::move(cached->storePath);
|
||||||
} else {
|
} else {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(res.data, sink);
|
||||||
auto hash = hashString(htSHA256, *res.data);
|
auto hash = hashString(htSHA256, res.data);
|
||||||
ValidPathInfo info {
|
ValidPathInfo info {
|
||||||
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||||
hashString(htSHA256, *sink.s),
|
hashString(htSHA256, sink.s),
|
||||||
};
|
};
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s.size();
|
||||||
info.ca = FixedOutputHash {
|
info.ca = FixedOutputHash {
|
||||||
.method = FileIngestionMethod::Flat,
|
.method = FileIngestionMethod::Flat,
|
||||||
.hash = hash,
|
.hash = hash,
|
||||||
};
|
};
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource(sink.s);
|
||||||
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||||
storePath = std::move(info.path);
|
storePath = std::move(info.path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ BinaryCacheStore::BinaryCacheStore(const Params & params)
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
sink << narVersionMagic1;
|
sink << narVersionMagic1;
|
||||||
narMagic = *sink.s;
|
narMagic = sink.s;
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::init()
|
void BinaryCacheStore::init()
|
||||||
|
@ -68,7 +68,7 @@ void BinaryCacheStore::upsertFile(const std::string & path,
|
||||||
}
|
}
|
||||||
|
|
||||||
void BinaryCacheStore::getFile(const std::string & path,
|
void BinaryCacheStore::getFile(const std::string & path,
|
||||||
Callback<std::shared_ptr<std::string>> callback) noexcept
|
Callback<std::optional<std::string>> callback) noexcept
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
callback(getFile(path));
|
callback(getFile(path));
|
||||||
|
@ -77,9 +77,9 @@ void BinaryCacheStore::getFile(const std::string & path,
|
||||||
|
|
||||||
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
||||||
{
|
{
|
||||||
std::promise<std::shared_ptr<std::string>> promise;
|
std::promise<std::optional<std::string>> promise;
|
||||||
getFile(path,
|
getFile(path,
|
||||||
{[&](std::future<std::shared_ptr<std::string>> result) {
|
{[&](std::future<std::optional<std::string>> result) {
|
||||||
try {
|
try {
|
||||||
promise.set_value(result.get());
|
promise.set_value(result.get());
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -89,15 +89,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
||||||
sink(*promise.get_future().get());
|
sink(*promise.get_future().get());
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
|
std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||||
{
|
{
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
try {
|
try {
|
||||||
getFile(path, sink);
|
getFile(path, sink);
|
||||||
} catch (NoSuchBinaryCacheFile &) {
|
} catch (NoSuchBinaryCacheFile &) {
|
||||||
return nullptr;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
return sink.s;
|
return std::move(sink.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
|
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
|
||||||
|
@ -367,11 +367,11 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||||
|
|
||||||
getFile(narInfoFile,
|
getFile(narInfoFile,
|
||||||
{[=](std::future<std::shared_ptr<std::string>> fut) {
|
{[=](std::future<std::optional<std::string>> fut) {
|
||||||
try {
|
try {
|
||||||
auto data = fut.get();
|
auto data = fut.get();
|
||||||
|
|
||||||
if (!data) return (*callbackPtr)(nullptr);
|
if (!data) return (*callbackPtr)({});
|
||||||
|
|
||||||
stats.narInfoRead++;
|
stats.narInfoRead++;
|
||||||
|
|
||||||
|
@ -429,7 +429,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
auto source = StringSource { *sink.s };
|
StringSource source(sink.s);
|
||||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||||
ValidPathInfo info { path, nar.first };
|
ValidPathInfo info { path, nar.first };
|
||||||
info.narSize = nar.second;
|
info.narSize = nar.second;
|
||||||
|
@ -446,11 +446,11 @@ void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
|
||||||
|
|
||||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||||
|
|
||||||
Callback<std::shared_ptr<std::string>> newCallback = {
|
Callback<std::optional<std::string>> newCallback = {
|
||||||
[=](std::future<std::shared_ptr<std::string>> fut) {
|
[=](std::future<std::optional<std::string>> fut) {
|
||||||
try {
|
try {
|
||||||
auto data = fut.get();
|
auto data = fut.get();
|
||||||
if (!data) return (*callbackPtr)(nullptr);
|
if (!data) return (*callbackPtr)({});
|
||||||
|
|
||||||
auto realisation = Realisation::fromJSON(
|
auto realisation = Realisation::fromJSON(
|
||||||
nlohmann::json::parse(*data), outputInfoFilePath);
|
nlohmann::json::parse(*data), outputInfoFilePath);
|
||||||
|
@ -490,7 +490,7 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
|
||||||
writeNarInfo(narInfo);
|
writeNarInfo(narInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||||
{
|
{
|
||||||
auto drvPath = path;
|
auto drvPath = path;
|
||||||
|
|
||||||
|
@ -498,10 +498,10 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
||||||
try {
|
try {
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
// FIXME: add a "Log" field to .narinfo
|
// FIXME: add a "Log" field to .narinfo
|
||||||
if (!info->deriver) return nullptr;
|
if (!info->deriver) return std::nullopt;
|
||||||
drvPath = *info->deriver;
|
drvPath = *info->deriver;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
return nullptr;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -512,4 +512,14 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
||||||
return getFile(logPath);
|
return getFile(logPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||||
|
{
|
||||||
|
assert(drvPath.isDerivation());
|
||||||
|
|
||||||
|
upsertFile(
|
||||||
|
"log/" + std::string(drvPath.to_string()),
|
||||||
|
(std::string) log, // FIXME: don't copy
|
||||||
|
"text/plain; charset=utf-8");
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,7 @@ public:
|
||||||
const std::string & mimeType) = 0;
|
const std::string & mimeType) = 0;
|
||||||
|
|
||||||
void upsertFile(const std::string & path,
|
void upsertFile(const std::string & path,
|
||||||
|
// FIXME: use std::string_view
|
||||||
std::string && data,
|
std::string && data,
|
||||||
const std::string & mimeType);
|
const std::string & mimeType);
|
||||||
|
|
||||||
|
@ -62,10 +63,11 @@ public:
|
||||||
|
|
||||||
/* Fetch the specified file and call the specified callback with
|
/* Fetch the specified file and call the specified callback with
|
||||||
the result. A subclass may implement this asynchronously. */
|
the result. A subclass may implement this asynchronously. */
|
||||||
virtual void getFile(const std::string & path,
|
virtual void getFile(
|
||||||
Callback<std::shared_ptr<std::string>> callback) noexcept;
|
const std::string & path,
|
||||||
|
Callback<std::optional<std::string>> callback) noexcept;
|
||||||
|
|
||||||
std::shared_ptr<std::string> getFile(const std::string & path);
|
std::optional<std::string> getFile(const std::string & path);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
@ -117,7 +119,9 @@ public:
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||||
|
|
||||||
|
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -194,7 +194,7 @@ void DerivationGoal::loadDerivation()
|
||||||
assert(worker.evalStore.isValidPath(drvPath));
|
assert(worker.evalStore.isValidPath(drvPath));
|
||||||
|
|
||||||
/* Get the derivation. */
|
/* Get the derivation. */
|
||||||
drv = std::make_unique<Derivation>(worker.evalStore.derivationFromPath(drvPath));
|
drv = std::make_unique<Derivation>(worker.evalStore.readDerivation(drvPath));
|
||||||
|
|
||||||
haveDerivation();
|
haveDerivation();
|
||||||
}
|
}
|
||||||
|
@ -278,7 +278,7 @@ void DerivationGoal::outputsSubstitutionTried()
|
||||||
|
|
||||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||||
done(BuildResult::TransientFailure,
|
done(BuildResult::TransientFailure,
|
||||||
fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||||
worker.store.printStorePath(drvPath)));
|
worker.store.printStorePath(drvPath)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -260,6 +260,7 @@ void LocalDerivationGoal::cleanupHookFinally()
|
||||||
void LocalDerivationGoal::cleanupPreChildKill()
|
void LocalDerivationGoal::cleanupPreChildKill()
|
||||||
{
|
{
|
||||||
sandboxMountNamespace = -1;
|
sandboxMountNamespace = -1;
|
||||||
|
sandboxUserNamespace = -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -906,11 +907,14 @@ void LocalDerivationGoal::startBuilder()
|
||||||
"nobody:x:65534:65534:Nobody:/:/noshell\n",
|
"nobody:x:65534:65534:Nobody:/:/noshell\n",
|
||||||
sandboxUid(), sandboxGid(), settings.sandboxBuildDir));
|
sandboxUid(), sandboxGid(), settings.sandboxBuildDir));
|
||||||
|
|
||||||
/* Save the mount namespace of the child. We have to do this
|
/* Save the mount- and user namespace of the child. We have to do this
|
||||||
*before* the child does a chroot. */
|
*before* the child does a chroot. */
|
||||||
sandboxMountNamespace = open(fmt("/proc/%d/ns/mnt", (pid_t) pid).c_str(), O_RDONLY);
|
sandboxMountNamespace = open(fmt("/proc/%d/ns/mnt", (pid_t) pid).c_str(), O_RDONLY);
|
||||||
if (sandboxMountNamespace.get() == -1)
|
if (sandboxMountNamespace.get() == -1)
|
||||||
throw SysError("getting sandbox mount namespace");
|
throw SysError("getting sandbox mount namespace");
|
||||||
|
sandboxUserNamespace = open(fmt("/proc/%d/ns/user", (pid_t) pid).c_str(), O_RDONLY);
|
||||||
|
if (sandboxUserNamespace.get() == -1)
|
||||||
|
throw SysError("getting sandbox user namespace");
|
||||||
|
|
||||||
/* Signal the builder that we've updated its user namespace. */
|
/* Signal the builder that we've updated its user namespace. */
|
||||||
writeFull(userNamespaceSync.writeSide.get(), "1");
|
writeFull(userNamespaceSync.writeSide.get(), "1");
|
||||||
|
@ -1438,6 +1442,9 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
|
||||||
child process.*/
|
child process.*/
|
||||||
Pid child(startProcess([&]() {
|
Pid child(startProcess([&]() {
|
||||||
|
|
||||||
|
if (usingUserNamespace && (setns(sandboxUserNamespace.get(), 0) == -1))
|
||||||
|
throw SysError("entering sandbox user namespace");
|
||||||
|
|
||||||
if (setns(sandboxMountNamespace.get(), 0) == -1)
|
if (setns(sandboxMountNamespace.get(), 0) == -1)
|
||||||
throw SysError("entering sandbox mount namespace");
|
throw SysError("entering sandbox mount namespace");
|
||||||
|
|
||||||
|
@ -2219,8 +2226,8 @@ void LocalDerivationGoal::registerOutputs()
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpPath(actualPath, sink);
|
dumpPath(actualPath, sink);
|
||||||
deletePath(actualPath);
|
deletePath(actualPath);
|
||||||
sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
|
sink.s = rewriteStrings(sink.s, outputRewrites);
|
||||||
StringSource source(*sink.s);
|
StringSource source(sink.s);
|
||||||
restorePath(actualPath, source);
|
restorePath(actualPath, source);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -2288,7 +2295,7 @@ void LocalDerivationGoal::registerOutputs()
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpPath(actualPath, sink);
|
dumpPath(actualPath, sink);
|
||||||
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
|
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
|
||||||
rsink2(*sink.s);
|
rsink2(sink.s);
|
||||||
rsink2.flush();
|
rsink2.flush();
|
||||||
});
|
});
|
||||||
Path tmpPath = actualPath + ".tmp";
|
Path tmpPath = actualPath + ".tmp";
|
||||||
|
@ -2460,7 +2467,7 @@ void LocalDerivationGoal::registerOutputs()
|
||||||
}
|
}
|
||||||
|
|
||||||
if (curRound == nrRounds) {
|
if (curRound == nrRounds) {
|
||||||
localStore.optimisePath(actualPath); // FIXME: combine with scanForReferences()
|
localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences()
|
||||||
worker.markContentsGood(newInfo.path);
|
worker.markContentsGood(newInfo.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,9 +27,10 @@ struct LocalDerivationGoal : public DerivationGoal
|
||||||
/* Pipe for synchronising updates to the builder namespaces. */
|
/* Pipe for synchronising updates to the builder namespaces. */
|
||||||
Pipe userNamespaceSync;
|
Pipe userNamespaceSync;
|
||||||
|
|
||||||
/* The mount namespace of the builder, used to add additional
|
/* The mount namespace and user namespace of the builder, used to add additional
|
||||||
paths to the sandbox as a result of recursive Nix calls. */
|
paths to the sandbox as a result of recursive Nix calls. */
|
||||||
AutoCloseFD sandboxMountNamespace;
|
AutoCloseFD sandboxMountNamespace;
|
||||||
|
AutoCloseFD sandboxUserNamespace;
|
||||||
|
|
||||||
/* On Linux, whether we're doing the build in its own user
|
/* On Linux, whether we're doing the build in its own user
|
||||||
namespace. */
|
namespace. */
|
||||||
|
|
|
@ -138,8 +138,8 @@ void PathSubstitutionGoal::tryNext()
|
||||||
only after we've downloaded the path. */
|
only after we've downloaded the path. */
|
||||||
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
||||||
{
|
{
|
||||||
warn("substituter '%s' does not have a valid signature for path '%s'",
|
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
|
||||||
sub->getUri(), worker.store.printStorePath(storePath));
|
worker.store.printStorePath(storePath), sub->getUri());
|
||||||
tryNext();
|
tryNext();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ struct TunnelLogger : public Logger
|
||||||
|
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_NEXT << (fs.s + "\n");
|
buf << STDERR_NEXT << (fs.s + "\n");
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
void logEI(const ErrorInfo & ei) override
|
void logEI(const ErrorInfo & ei) override
|
||||||
|
@ -81,7 +81,7 @@ struct TunnelLogger : public Logger
|
||||||
|
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_NEXT << oss.str();
|
buf << STDERR_NEXT << oss.str();
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* startWork() means that we're starting an operation for which we
|
/* startWork() means that we're starting an operation for which we
|
||||||
|
@ -129,7 +129,7 @@ struct TunnelLogger : public Logger
|
||||||
|
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
|
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
void stopActivity(ActivityId act) override
|
void stopActivity(ActivityId act) override
|
||||||
|
@ -137,7 +137,7 @@ struct TunnelLogger : public Logger
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_STOP_ACTIVITY << act;
|
buf << STDERR_STOP_ACTIVITY << act;
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(buf.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
void result(ActivityId act, ResultType type, const Fields & fields) override
|
void result(ActivityId act, ResultType type, const Fields & fields) override
|
||||||
|
@ -145,7 +145,7 @@ struct TunnelLogger : public Logger
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||||
StringSink buf;
|
StringSink buf;
|
||||||
buf << STDERR_RESULT << act << type << fields;
|
buf << STDERR_RESULT << act << type << fields;
|
||||||
enqueueMsg(*buf.s);
|
enqueueMsg(buf.s);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -468,10 +468,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
dontCheckSigs = false;
|
dontCheckSigs = false;
|
||||||
|
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
FramedSource source(from);
|
{
|
||||||
store->addMultipleToStore(source,
|
FramedSource source(from);
|
||||||
RepairFlag{repair},
|
store->addMultipleToStore(source,
|
||||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
RepairFlag{repair},
|
||||||
|
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
|
}
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -852,14 +854,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
|
|
||||||
else {
|
else {
|
||||||
std::unique_ptr<Source> source;
|
std::unique_ptr<Source> source;
|
||||||
|
StringSink saved;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||||
source = std::make_unique<TunnelSource>(from, to);
|
source = std::make_unique<TunnelSource>(from, to);
|
||||||
else {
|
else {
|
||||||
StringSink saved;
|
|
||||||
TeeSource tee { from, saved };
|
TeeSource tee { from, saved };
|
||||||
ParseSink ether;
|
ParseSink ether;
|
||||||
parseDump(ether, tee);
|
parseDump(ether, tee);
|
||||||
source = std::make_unique<StringSource>(std::move(*saved.s));
|
source = std::make_unique<StringSource>(saved.s);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
|
@ -920,6 +922,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case wopAddBuildLog: {
|
||||||
|
StorePath path{readString(from)};
|
||||||
|
logger->startWork();
|
||||||
|
if (!trusted)
|
||||||
|
throw Error("you are not privileged to add logs");
|
||||||
|
{
|
||||||
|
FramedSource source(from);
|
||||||
|
StringSink sink;
|
||||||
|
source.drainInto(sink);
|
||||||
|
store->addBuildLog(path, sink.s);
|
||||||
|
}
|
||||||
|
logger->stopWork();
|
||||||
|
to << 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
throw Error("invalid operation %1%", op);
|
throw Error("invalid operation %1%", op);
|
||||||
}
|
}
|
||||||
|
@ -963,7 +981,11 @@ void processConnection(
|
||||||
readInt(from);
|
readInt(from);
|
||||||
}
|
}
|
||||||
|
|
||||||
readInt(from); // obsolete reserveSpace
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 11)
|
||||||
|
readInt(from); // obsolete reserveSpace
|
||||||
|
|
||||||
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 33)
|
||||||
|
to << nixVersion;
|
||||||
|
|
||||||
/* Send startup error messages to the client. */
|
/* Send startup error messages to the client. */
|
||||||
tunnelLogger->startWork();
|
tunnelLogger->startWork();
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
#include "fs-accessor.hh"
|
#include "fs-accessor.hh"
|
||||||
|
#include <boost/container/small_vector.hpp>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -272,7 +273,9 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
||||||
|
|
||||||
static void printString(string & res, std::string_view s)
|
static void printString(string & res, std::string_view s)
|
||||||
{
|
{
|
||||||
char buf[s.size() * 2 + 2];
|
boost::container::small_vector<char, 64 * 1024> buffer;
|
||||||
|
buffer.reserve(s.size() * 2 + 2);
|
||||||
|
char * buf = buffer.data();
|
||||||
char * p = buf;
|
char * p = buf;
|
||||||
*p++ = '"';
|
*p++ = '"';
|
||||||
for (auto c : s)
|
for (auto c : s)
|
||||||
|
@ -696,10 +699,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string hashPlaceholder(const std::string & outputName)
|
std::string hashPlaceholder(const std::string_view outputName)
|
||||||
{
|
{
|
||||||
// FIXME: memoize?
|
// FIXME: memoize?
|
||||||
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
|
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
|
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
|
||||||
|
|
|
@ -236,7 +236,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
It is used as a placeholder to allow derivations to refer to their
|
It is used as a placeholder to allow derivations to refer to their
|
||||||
own outputs without needing to use the hash of a derivation in
|
own outputs without needing to use the hash of a derivation in
|
||||||
itself, making the hash near-impossible to calculate. */
|
itself, making the hash near-impossible to calculate. */
|
||||||
std::string hashPlaceholder(const std::string & outputName);
|
std::string hashPlaceholder(const std::string_view outputName);
|
||||||
|
|
||||||
/* This creates an opaque and almost certainly unique string
|
/* This creates an opaque and almost certainly unique string
|
||||||
deterministically from a derivation path and output name.
|
deterministically from a derivation path and output name.
|
||||||
|
|
|
@ -75,20 +75,20 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
|
|
||||||
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
|
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
|
||||||
auto deriver = readString(source);
|
auto deriver = readString(source);
|
||||||
auto narHash = hashString(htSHA256, *saved.s);
|
auto narHash = hashString(htSHA256, saved.s);
|
||||||
|
|
||||||
ValidPathInfo info { path, narHash };
|
ValidPathInfo info { path, narHash };
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.narSize = saved.s->size();
|
info.narSize = saved.s.size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
if (readInt(source) == 1)
|
if (readInt(source) == 1)
|
||||||
readString(source);
|
readString(source);
|
||||||
|
|
||||||
// Can't use underlying source, which would have been exhausted
|
// Can't use underlying source, which would have been exhausted
|
||||||
auto source = StringSource { *saved.s };
|
auto source = StringSource(saved.s);
|
||||||
addToStore(info, source, NoRepair, checkSigs);
|
addToStore(info, source, NoRepair, checkSigs);
|
||||||
|
|
||||||
res.push_back(info.path);
|
res.push_back(info.path);
|
||||||
|
|
|
@ -106,7 +106,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
this->request.dataCallback(data);
|
this->request.dataCallback(data);
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
this->result.data->append(data);
|
this->result.data.append(data);
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
if (!request.expectedETag.empty())
|
if (!request.expectedETag.empty())
|
||||||
|
@ -195,7 +195,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
std::smatch match;
|
std::smatch match;
|
||||||
if (std::regex_match(line, match, statusLine)) {
|
if (std::regex_match(line, match, statusLine)) {
|
||||||
result.etag = "";
|
result.etag = "";
|
||||||
result.data = std::make_shared<std::string>();
|
result.data.clear();
|
||||||
result.bodySize = 0;
|
result.bodySize = 0;
|
||||||
statusMsg = trim(match[1]);
|
statusMsg = trim(match[1]);
|
||||||
acceptRanges = false;
|
acceptRanges = false;
|
||||||
|
@ -340,7 +340,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
if (writtenToSink)
|
if (writtenToSink)
|
||||||
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
|
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
|
||||||
|
|
||||||
result.data = std::make_shared<std::string>();
|
result.data.clear();
|
||||||
result.bodySize = 0;
|
result.bodySize = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -434,21 +434,21 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
attempt++;
|
attempt++;
|
||||||
|
|
||||||
std::shared_ptr<std::string> response;
|
std::optional<std::string> response;
|
||||||
if (errorSink)
|
if (errorSink)
|
||||||
response = errorSink->s;
|
response = std::move(errorSink->s);
|
||||||
auto exc =
|
auto exc =
|
||||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||||
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||||
: httpStatus != 0
|
: httpStatus != 0
|
||||||
? FileTransferError(err,
|
? FileTransferError(err,
|
||||||
response,
|
std::move(response),
|
||||||
fmt("unable to %s '%s': HTTP error %d ('%s')",
|
fmt("unable to %s '%s': HTTP error %d ('%s')",
|
||||||
request.verb(), request.uri, httpStatus, statusMsg)
|
request.verb(), request.uri, httpStatus, statusMsg)
|
||||||
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||||
)
|
)
|
||||||
: FileTransferError(err,
|
: FileTransferError(err,
|
||||||
response,
|
std::move(response),
|
||||||
fmt("unable to %s '%s': %s (%d)",
|
fmt("unable to %s '%s': %s (%d)",
|
||||||
request.verb(), request.uri, curl_easy_strerror(code), code));
|
request.verb(), request.uri, curl_easy_strerror(code), code));
|
||||||
|
|
||||||
|
@ -705,7 +705,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
FileTransferResult res;
|
FileTransferResult res;
|
||||||
if (!s3Res.data)
|
if (!s3Res.data)
|
||||||
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
|
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
|
||||||
res.data = s3Res.data;
|
res.data = std::move(*s3Res.data);
|
||||||
callback(std::move(res));
|
callback(std::move(res));
|
||||||
#else
|
#else
|
||||||
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
|
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
|
||||||
|
@ -859,7 +859,7 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
||||||
}
|
}
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args)
|
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
||||||
: Error(args...), error(error), response(response)
|
: Error(args...), error(error), response(response)
|
||||||
{
|
{
|
||||||
const auto hf = hintfmt(args...);
|
const auto hf = hintfmt(args...);
|
||||||
|
|
|
@ -59,7 +59,7 @@ struct FileTransferRequest
|
||||||
unsigned int baseRetryTimeMs = 250;
|
unsigned int baseRetryTimeMs = 250;
|
||||||
ActivityId parentAct;
|
ActivityId parentAct;
|
||||||
bool decompress = true;
|
bool decompress = true;
|
||||||
std::shared_ptr<std::string> data;
|
std::optional<std::string> data;
|
||||||
std::string mimeType;
|
std::string mimeType;
|
||||||
std::function<void(std::string_view data)> dataCallback;
|
std::function<void(std::string_view data)> dataCallback;
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ struct FileTransferResult
|
||||||
bool cached = false;
|
bool cached = false;
|
||||||
std::string etag;
|
std::string etag;
|
||||||
std::string effectiveUri;
|
std::string effectiveUri;
|
||||||
std::shared_ptr<std::string> data;
|
std::string data;
|
||||||
uint64_t bodySize = 0;
|
uint64_t bodySize = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -119,10 +119,10 @@ class FileTransferError : public Error
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
FileTransfer::Error error;
|
FileTransfer::Error error;
|
||||||
std::shared_ptr<string> response; // intentionally optional
|
std::optional<string> response; // intentionally optional
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args);
|
FileTransferError(FileTransfer::Error error, std::optional<string> response, const Args & ... args);
|
||||||
|
|
||||||
virtual const char* sname() const override { return "FileTransferError"; }
|
virtual const char* sname() const override { return "FileTransferError"; }
|
||||||
};
|
};
|
||||||
|
|
|
@ -966,6 +966,13 @@ public:
|
||||||
|
|
||||||
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
|
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
|
||||||
"Whether to accept nix configuration from a flake without prompting."};
|
"Whether to accept nix configuration from a flake without prompting."};
|
||||||
|
|
||||||
|
Setting<std::string> commitLockFileSummary{
|
||||||
|
this, "", "commit-lockfile-summary",
|
||||||
|
R"(
|
||||||
|
The commit summary to use when committing changed flake lock files. If
|
||||||
|
empty, the summary is generated based on the action performed.
|
||||||
|
)"};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -126,7 +126,7 @@ protected:
|
||||||
const std::string & mimeType) override
|
const std::string & mimeType) override
|
||||||
{
|
{
|
||||||
auto req = makeRequest(path);
|
auto req = makeRequest(path);
|
||||||
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
|
req.data = StreamToSourceAdapter(istream).drain();
|
||||||
req.mimeType = mimeType;
|
req.mimeType = mimeType;
|
||||||
try {
|
try {
|
||||||
getFileTransfer()->upload(req);
|
getFileTransfer()->upload(req);
|
||||||
|
@ -159,7 +159,7 @@ protected:
|
||||||
}
|
}
|
||||||
|
|
||||||
void getFile(const std::string & path,
|
void getFile(const std::string & path,
|
||||||
Callback<std::shared_ptr<std::string>> callback) noexcept override
|
Callback<std::optional<std::string>> callback) noexcept override
|
||||||
{
|
{
|
||||||
checkEnabled();
|
checkEnabled();
|
||||||
|
|
||||||
|
@ -170,10 +170,10 @@ protected:
|
||||||
getFileTransfer()->enqueueFileTransfer(request,
|
getFileTransfer()->enqueueFileTransfer(request,
|
||||||
{[callbackPtr, this](std::future<FileTransferResult> result) {
|
{[callbackPtr, this](std::future<FileTransferResult> result) {
|
||||||
try {
|
try {
|
||||||
(*callbackPtr)(result.get().data);
|
(*callbackPtr)(std::move(result.get().data));
|
||||||
} catch (FileTransferError & e) {
|
} catch (FileTransferError & e) {
|
||||||
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
||||||
return (*callbackPtr)(std::shared_ptr<std::string>());
|
return (*callbackPtr)({});
|
||||||
maybeDisable();
|
maybeDisable();
|
||||||
callbackPtr->rethrow();
|
callbackPtr->rethrow();
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
|
|
@ -94,7 +94,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
||||||
conn->sshConn->in.close();
|
conn->sshConn->in.close();
|
||||||
auto msg = conn->from.drain();
|
auto msg = conn->from.drain();
|
||||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||||
host, chomp(*saved.s + msg));
|
host, chomp(saved.s + msg));
|
||||||
}
|
}
|
||||||
conn->remoteVersion = readInt(conn->from);
|
conn->remoteVersion = readInt(conn->from);
|
||||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||||
|
|
|
@ -96,6 +96,7 @@ void LocalBinaryCacheStore::init()
|
||||||
createDirs(binaryCacheDir + "/" + realisationsPrefix);
|
createDirs(binaryCacheDir + "/" + realisationsPrefix);
|
||||||
if (writeDebugInfo)
|
if (writeDebugInfo)
|
||||||
createDirs(binaryCacheDir + "/debuginfo");
|
createDirs(binaryCacheDir + "/debuginfo");
|
||||||
|
createDirs(binaryCacheDir + "/log");
|
||||||
BinaryCacheStore::init();
|
BinaryCacheStore::init();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,34 +87,32 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
|
||||||
|
|
||||||
const string LocalFSStore::drvsLogDir = "drvs";
|
const string LocalFSStore::drvsLogDir = "drvs";
|
||||||
|
|
||||||
|
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||||
|
|
||||||
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
|
||||||
{
|
{
|
||||||
auto path = path_;
|
auto path = path_;
|
||||||
|
|
||||||
if (!path.isDerivation()) {
|
if (!path.isDerivation()) {
|
||||||
try {
|
try {
|
||||||
auto info = queryPathInfo(path);
|
auto info = queryPathInfo(path);
|
||||||
if (!info->deriver) return nullptr;
|
if (!info->deriver) return std::nullopt;
|
||||||
path = *info->deriver;
|
path = *info->deriver;
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
return nullptr;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto baseName = std::string(baseNameOf(printStorePath(path)));
|
auto baseName = path.to_string();
|
||||||
|
|
||||||
for (int j = 0; j < 2; j++) {
|
for (int j = 0; j < 2; j++) {
|
||||||
|
|
||||||
Path logPath =
|
Path logPath =
|
||||||
j == 0
|
j == 0
|
||||||
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2))
|
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2))
|
||||||
: fmt("%s/%s/%s", logDir, drvsLogDir, baseName);
|
: fmt("%s/%s/%s", logDir, drvsLogDir, baseName);
|
||||||
Path logBz2Path = logPath + ".bz2";
|
Path logBz2Path = logPath + ".bz2";
|
||||||
|
|
||||||
if (pathExists(logPath))
|
if (pathExists(logPath))
|
||||||
return std::make_shared<std::string>(readFile(logPath));
|
return readFile(logPath);
|
||||||
|
|
||||||
else if (pathExists(logBz2Path)) {
|
else if (pathExists(logBz2Path)) {
|
||||||
try {
|
try {
|
||||||
|
@ -124,7 +122,7 @@ std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nullptr;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,8 @@ public:
|
||||||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
#include "callback.hh"
|
#include "callback.hh"
|
||||||
#include "topo-sort.hh"
|
#include "topo-sort.hh"
|
||||||
#include "finally.hh"
|
#include "finally.hh"
|
||||||
|
#include "compression.hh"
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
@ -1307,7 +1308,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
canonicalisePathMetaData(realPath, -1);
|
canonicalisePathMetaData(realPath, -1);
|
||||||
|
|
||||||
optimisePath(realPath); // FIXME: combine with hashPath()
|
optimisePath(realPath, repair); // FIXME: combine with hashPath()
|
||||||
|
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
@ -1419,7 +1420,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
||||||
|
|
||||||
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
|
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath, repair);
|
||||||
|
|
||||||
ValidPathInfo info { dstPath, narHash.first };
|
ValidPathInfo info { dstPath, narHash.first };
|
||||||
info.narSize = narHash.second;
|
info.narSize = narHash.second;
|
||||||
|
@ -1461,12 +1462,12 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
|
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(s, sink);
|
dumpString(s, sink);
|
||||||
auto narHash = hashString(htSHA256, *sink.s);
|
auto narHash = hashString(htSHA256, sink.s);
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath, repair);
|
||||||
|
|
||||||
ValidPathInfo info { dstPath, narHash };
|
ValidPathInfo info { dstPath, narHash };
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s.size();
|
||||||
info.references = references;
|
info.references = references;
|
||||||
info.ca = TextHash { .hash = hash };
|
info.ca = TextHash { .hash = hash };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
|
@ -1898,4 +1899,30 @@ FixedOutputHash LocalStore::hashCAPath(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||||
|
{
|
||||||
|
assert(drvPath.isDerivation());
|
||||||
|
|
||||||
|
auto baseName = drvPath.to_string();
|
||||||
|
|
||||||
|
auto logPath = fmt("%s/%s/%s/%s.bz2", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2));
|
||||||
|
|
||||||
|
if (pathExists(logPath)) return;
|
||||||
|
|
||||||
|
createDirs(dirOf(logPath));
|
||||||
|
|
||||||
|
auto tmpFile = fmt("%s.tmp.%d", logPath, getpid());
|
||||||
|
|
||||||
|
writeFile(tmpFile, compress("bzip2", log));
|
||||||
|
|
||||||
|
if (rename(tmpFile.c_str(), logPath.c_str()) != 0)
|
||||||
|
throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> LocalStore::getVersion()
|
||||||
|
{
|
||||||
|
return nixVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
} // namespace nix
|
} // namespace nix
|
||||||
|
|
|
@ -172,8 +172,9 @@ public:
|
||||||
|
|
||||||
void optimiseStore() override;
|
void optimiseStore() override;
|
||||||
|
|
||||||
/* Optimise a single store path. */
|
/* Optimise a single store path. Optionally, test the encountered
|
||||||
void optimisePath(const Path & path);
|
symlinks for corruption. */
|
||||||
|
void optimisePath(const Path & path, RepairFlag repair);
|
||||||
|
|
||||||
bool verifyStore(bool checkContents, RepairFlag repair) override;
|
bool verifyStore(bool checkContents, RepairFlag repair) override;
|
||||||
|
|
||||||
|
@ -210,6 +211,8 @@ public:
|
||||||
void queryRealisationUncached(const DrvOutput&,
|
void queryRealisationUncached(const DrvOutput&,
|
||||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||||
|
|
||||||
|
std::optional<std::string> getVersion() override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
int getSchema();
|
int getSchema();
|
||||||
|
@ -253,7 +256,7 @@ private:
|
||||||
|
|
||||||
InodeHash loadInodeHash();
|
InodeHash loadInodeHash();
|
||||||
Strings readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash);
|
Strings readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash);
|
||||||
void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash);
|
void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash, RepairFlag repair);
|
||||||
|
|
||||||
// Internal versions that are not wrapped in retry_sqlite.
|
// Internal versions that are not wrapped in retry_sqlite.
|
||||||
bool isValidPath_(State & state, const StorePath & path);
|
bool isValidPath_(State & state, const StorePath & path);
|
||||||
|
@ -279,6 +282,8 @@ private:
|
||||||
const std::string_view pathHash
|
const std::string_view pathHash
|
||||||
);
|
);
|
||||||
|
|
||||||
|
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||||
|
|
||||||
friend struct LocalDerivationGoal;
|
friend struct LocalDerivationGoal;
|
||||||
friend struct PathSubstitutionGoal;
|
friend struct PathSubstitutionGoal;
|
||||||
friend struct SubstitutionGoal;
|
friend struct SubstitutionGoal;
|
||||||
|
|
|
@ -56,8 +56,8 @@ bool DrvName::matches(const DrvName & n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string nextComponent(string::const_iterator & p,
|
std::string_view nextComponent(std::string_view::const_iterator & p,
|
||||||
const string::const_iterator end)
|
const std::string_view::const_iterator end)
|
||||||
{
|
{
|
||||||
/* Skip any dots and dashes (component separators). */
|
/* Skip any dots and dashes (component separators). */
|
||||||
while (p != end && (*p == '.' || *p == '-')) ++p;
|
while (p != end && (*p == '.' || *p == '-')) ++p;
|
||||||
|
@ -67,18 +67,18 @@ string nextComponent(string::const_iterator & p,
|
||||||
/* If the first character is a digit, consume the longest sequence
|
/* If the first character is a digit, consume the longest sequence
|
||||||
of digits. Otherwise, consume the longest sequence of
|
of digits. Otherwise, consume the longest sequence of
|
||||||
non-digit, non-separator characters. */
|
non-digit, non-separator characters. */
|
||||||
string s;
|
auto s = p;
|
||||||
if (isdigit(*p))
|
if (isdigit(*p))
|
||||||
while (p != end && isdigit(*p)) s += *p++;
|
while (p != end && isdigit(*p)) p++;
|
||||||
else
|
else
|
||||||
while (p != end && (!isdigit(*p) && *p != '.' && *p != '-'))
|
while (p != end && (!isdigit(*p) && *p != '.' && *p != '-'))
|
||||||
s += *p++;
|
p++;
|
||||||
|
|
||||||
return s;
|
return {s, size_t(p - s)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static bool componentsLT(const string & c1, const string & c2)
|
static bool componentsLT(const std::string_view c1, const std::string_view c2)
|
||||||
{
|
{
|
||||||
auto n1 = string2Int<int>(c1);
|
auto n1 = string2Int<int>(c1);
|
||||||
auto n2 = string2Int<int>(c2);
|
auto n2 = string2Int<int>(c2);
|
||||||
|
@ -94,14 +94,14 @@ static bool componentsLT(const string & c1, const string & c2)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
int compareVersions(const string & v1, const string & v2)
|
int compareVersions(const std::string_view v1, const std::string_view v2)
|
||||||
{
|
{
|
||||||
string::const_iterator p1 = v1.begin();
|
auto p1 = v1.begin();
|
||||||
string::const_iterator p2 = v2.begin();
|
auto p2 = v2.begin();
|
||||||
|
|
||||||
while (p1 != v1.end() || p2 != v2.end()) {
|
while (p1 != v1.end() || p2 != v2.end()) {
|
||||||
string c1 = nextComponent(p1, v1.end());
|
auto c1 = nextComponent(p1, v1.end());
|
||||||
string c2 = nextComponent(p2, v2.end());
|
auto c2 = nextComponent(p2, v2.end());
|
||||||
if (componentsLT(c1, c2)) return -1;
|
if (componentsLT(c1, c2)) return -1;
|
||||||
else if (componentsLT(c2, c1)) return 1;
|
else if (componentsLT(c2, c1)) return 1;
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,9 +27,9 @@ private:
|
||||||
|
|
||||||
typedef list<DrvName> DrvNames;
|
typedef list<DrvName> DrvNames;
|
||||||
|
|
||||||
string nextComponent(string::const_iterator & p,
|
std::string_view nextComponent(std::string_view::const_iterator & p,
|
||||||
const string::const_iterator end);
|
const std::string_view::const_iterator end);
|
||||||
int compareVersions(const string & v1, const string & v2);
|
int compareVersions(const std::string_view v1, const std::string_view v2);
|
||||||
DrvNames drvNamesFromArgs(const Strings & opArgs);
|
DrvNames drvNamesFromArgs(const Strings & opArgs);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ struct NarMember
|
||||||
|
|
||||||
struct NarAccessor : public FSAccessor
|
struct NarAccessor : public FSAccessor
|
||||||
{
|
{
|
||||||
std::shared_ptr<const std::string> nar;
|
std::optional<const std::string> nar;
|
||||||
|
|
||||||
GetNarBytes getNarBytes;
|
GetNarBytes getNarBytes;
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ struct NarAccessor : public FSAccessor
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
NarAccessor(ref<const std::string> nar) : nar(nar)
|
NarAccessor(std::string && _nar) : nar(_nar)
|
||||||
{
|
{
|
||||||
StringSource source(*nar);
|
StringSource source(*nar);
|
||||||
NarIndexer indexer(*this, source);
|
NarIndexer indexer(*this, source);
|
||||||
|
@ -224,9 +224,9 @@ struct NarAccessor : public FSAccessor
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar)
|
ref<FSAccessor> makeNarAccessor(std::string && nar)
|
||||||
{
|
{
|
||||||
return make_ref<NarAccessor>(nar);
|
return make_ref<NarAccessor>(std::move(nar));
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<FSAccessor> makeNarAccessor(Source & source)
|
ref<FSAccessor> makeNarAccessor(Source & source)
|
||||||
|
|
|
@ -10,7 +10,7 @@ struct Source;
|
||||||
|
|
||||||
/* Return an object that provides access to the contents of a NAR
|
/* Return an object that provides access to the contents of a NAR
|
||||||
file. */
|
file. */
|
||||||
ref<FSAccessor> makeNarAccessor(ref<const std::string> nar);
|
ref<FSAccessor> makeNarAccessor(std::string && nar);
|
||||||
|
|
||||||
ref<FSAccessor> makeNarAccessor(Source & source);
|
ref<FSAccessor> makeNarAccessor(Source & source);
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ static void makeWritable(const Path & path)
|
||||||
struct MakeReadOnly
|
struct MakeReadOnly
|
||||||
{
|
{
|
||||||
Path path;
|
Path path;
|
||||||
MakeReadOnly(const Path & path) : path(path) { }
|
MakeReadOnly(const PathView path) : path(path) { }
|
||||||
~MakeReadOnly()
|
~MakeReadOnly()
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
@ -88,7 +88,7 @@ Strings LocalStore::readDirectoryIgnoringInodes(const Path & path, const InodeHa
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
const Path & path, InodeHash & inodeHash)
|
const Path & path, InodeHash & inodeHash, RepairFlag repair)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
if (S_ISDIR(st.st_mode)) {
|
if (S_ISDIR(st.st_mode)) {
|
||||||
Strings names = readDirectoryIgnoringInodes(path, inodeHash);
|
Strings names = readDirectoryIgnoringInodes(path, inodeHash);
|
||||||
for (auto & i : names)
|
for (auto & i : names)
|
||||||
optimisePath_(act, stats, path + "/" + i, inodeHash);
|
optimisePath_(act, stats, path + "/" + i, inodeHash, repair);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,7 +151,20 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
/* Check if this is a known hash. */
|
/* Check if this is a known hash. */
|
||||||
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
|
Path linkPath = linksDir + "/" + hash.to_string(Base32, false);
|
||||||
|
|
||||||
retry:
|
/* Maybe delete the link, if it has been corrupted. */
|
||||||
|
if (pathExists(linkPath)) {
|
||||||
|
auto stLink = lstat(linkPath);
|
||||||
|
if (st.st_size != stLink.st_size
|
||||||
|
|| (repair && hash != hashPath(htSHA256, linkPath).first))
|
||||||
|
{
|
||||||
|
// XXX: Consider overwriting linkPath with our valid version.
|
||||||
|
warn("removing corrupted link '%s'", linkPath);
|
||||||
|
warn("There may be more corrupted paths."
|
||||||
|
"\nYou should run `nix-store --verify --check-contents --repair` to fix them all");
|
||||||
|
unlink(linkPath.c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!pathExists(linkPath)) {
|
if (!pathExists(linkPath)) {
|
||||||
/* Nope, create a hard link in the links directory. */
|
/* Nope, create a hard link in the links directory. */
|
||||||
if (link(path.c_str(), linkPath.c_str()) == 0) {
|
if (link(path.c_str(), linkPath.c_str()) == 0) {
|
||||||
|
@ -187,23 +200,18 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (st.st_size != stLink.st_size) {
|
|
||||||
warn("removing corrupted link '%s'", linkPath);
|
|
||||||
unlink(linkPath.c_str());
|
|
||||||
goto retry;
|
|
||||||
}
|
|
||||||
|
|
||||||
printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath);
|
printMsg(lvlTalkative, format("linking '%1%' to '%2%'") % path % linkPath);
|
||||||
|
|
||||||
/* Make the containing directory writable, but only if it's not
|
/* Make the containing directory writable, but only if it's not
|
||||||
the store itself (we don't want or need to mess with its
|
the store itself (we don't want or need to mess with its
|
||||||
permissions). */
|
permissions). */
|
||||||
bool mustToggle = dirOf(path) != realStoreDir.get();
|
const Path dirOfPath(dirOf(path));
|
||||||
if (mustToggle) makeWritable(dirOf(path));
|
bool mustToggle = dirOfPath != realStoreDir.get();
|
||||||
|
if (mustToggle) makeWritable(dirOfPath);
|
||||||
|
|
||||||
/* When we're done, make the directory read-only again and reset
|
/* When we're done, make the directory read-only again and reset
|
||||||
its timestamp back to 0. */
|
its timestamp back to 0. */
|
||||||
MakeReadOnly makeReadOnly(mustToggle ? dirOf(path) : "");
|
MakeReadOnly makeReadOnly(mustToggle ? dirOfPath : "");
|
||||||
|
|
||||||
Path tempLink = (format("%1%/.tmp-link-%2%-%3%")
|
Path tempLink = (format("%1%/.tmp-link-%2%-%3%")
|
||||||
% realStoreDir % getpid() % random()).str();
|
% realStoreDir % getpid() % random()).str();
|
||||||
|
@ -260,7 +268,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
|
||||||
if (!isValidPath(i)) continue; /* path was GC'ed, probably */
|
if (!isValidPath(i)) continue; /* path was GC'ed, probably */
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i)));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("optimising path '%s'", printStorePath(i)));
|
||||||
optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash);
|
optimisePath_(&act, stats, realStoreDir + "/" + std::string(i.to_string()), inodeHash, NoRepair);
|
||||||
}
|
}
|
||||||
done++;
|
done++;
|
||||||
act.progress(done, paths.size());
|
act.progress(done, paths.size());
|
||||||
|
@ -278,12 +286,12 @@ void LocalStore::optimiseStore()
|
||||||
stats.filesLinked);
|
stats.filesLinked);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LocalStore::optimisePath(const Path & path)
|
void LocalStore::optimisePath(const Path & path, RepairFlag repair)
|
||||||
{
|
{
|
||||||
OptimiseStats stats;
|
OptimiseStats stats;
|
||||||
InodeHash inodeHash;
|
InodeHash inodeHash;
|
||||||
|
|
||||||
if (settings.autoOptimiseStore) optimisePath_(nullptr, stats, path, inodeHash);
|
if (settings.autoOptimiseStore) optimisePath_(nullptr, stats, path, inodeHash, repair);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -170,7 +170,7 @@ std::string writeStructuredAttrsShell(const nlohmann::json & json)
|
||||||
|
|
||||||
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
|
||||||
if (value.is_string())
|
if (value.is_string())
|
||||||
return shellEscape(value);
|
return shellEscape(value.get<std::string_view>());
|
||||||
|
|
||||||
if (value.is_number()) {
|
if (value.is_number()) {
|
||||||
auto f = value.get<float>();
|
auto f = value.get<float>();
|
||||||
|
|
|
@ -78,7 +78,7 @@ Realisation Realisation::fromJSON(
|
||||||
auto fieldIterator = json.find(fieldName);
|
auto fieldIterator = json.find(fieldName);
|
||||||
if (fieldIterator == json.end())
|
if (fieldIterator == json.end())
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
return *fieldIterator;
|
return {*fieldIterator};
|
||||||
};
|
};
|
||||||
auto getField = [&](std::string fieldName) -> std::string {
|
auto getField = [&](std::string fieldName) -> std::string {
|
||||||
if (auto field = getOptionalField(fieldName))
|
if (auto field = getOptionalField(fieldName))
|
||||||
|
|
|
@ -22,9 +22,18 @@ Path RemoteFSAccessor::makeCacheFile(std::string_view hashPart, const std::strin
|
||||||
return fmt("%s/%s.%s", cacheDir, hashPart, ext);
|
return fmt("%s/%s.%s", cacheDir, hashPart, ext);
|
||||||
}
|
}
|
||||||
|
|
||||||
void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string & nar,
|
ref<FSAccessor> RemoteFSAccessor::addToCache(std::string_view hashPart, std::string && nar)
|
||||||
ref<FSAccessor> narAccessor)
|
|
||||||
{
|
{
|
||||||
|
if (cacheDir != "") {
|
||||||
|
try {
|
||||||
|
/* FIXME: do this asynchronously. */
|
||||||
|
writeFile(makeCacheFile(hashPart, "nar"), nar);
|
||||||
|
} catch (...) {
|
||||||
|
ignoreException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto narAccessor = makeNarAccessor(std::move(nar));
|
||||||
nars.emplace(hashPart, narAccessor);
|
nars.emplace(hashPart, narAccessor);
|
||||||
|
|
||||||
if (cacheDir != "") {
|
if (cacheDir != "") {
|
||||||
|
@ -33,14 +42,12 @@ void RemoteFSAccessor::addToCache(std::string_view hashPart, const std::string &
|
||||||
JSONPlaceholder jsonRoot(str);
|
JSONPlaceholder jsonRoot(str);
|
||||||
listNar(jsonRoot, narAccessor, "", true);
|
listNar(jsonRoot, narAccessor, "", true);
|
||||||
writeFile(makeCacheFile(hashPart, "ls"), str.str());
|
writeFile(makeCacheFile(hashPart, "ls"), str.str());
|
||||||
|
|
||||||
/* FIXME: do this asynchronously. */
|
|
||||||
writeFile(makeCacheFile(hashPart, "nar"), nar);
|
|
||||||
|
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return narAccessor;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, bool requireValidPath)
|
std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, bool requireValidPath)
|
||||||
|
@ -55,7 +62,6 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, boo
|
||||||
auto i = nars.find(std::string(storePath.hashPart()));
|
auto i = nars.find(std::string(storePath.hashPart()));
|
||||||
if (i != nars.end()) return {i->second, restPath};
|
if (i != nars.end()) return {i->second, restPath};
|
||||||
|
|
||||||
StringSink sink;
|
|
||||||
std::string listing;
|
std::string listing;
|
||||||
Path cacheFile;
|
Path cacheFile;
|
||||||
|
|
||||||
|
@ -86,19 +92,15 @@ std::pair<ref<FSAccessor>, Path> RemoteFSAccessor::fetch(const Path & path_, boo
|
||||||
} catch (SysError &) { }
|
} catch (SysError &) { }
|
||||||
|
|
||||||
try {
|
try {
|
||||||
*sink.s = nix::readFile(cacheFile);
|
auto narAccessor = makeNarAccessor(nix::readFile(cacheFile));
|
||||||
|
|
||||||
auto narAccessor = makeNarAccessor(sink.s);
|
|
||||||
nars.emplace(storePath.hashPart(), narAccessor);
|
nars.emplace(storePath.hashPart(), narAccessor);
|
||||||
return {narAccessor, restPath};
|
return {narAccessor, restPath};
|
||||||
|
|
||||||
} catch (SysError &) { }
|
} catch (SysError &) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StringSink sink;
|
||||||
store->narFromPath(storePath, sink);
|
store->narFromPath(storePath, sink);
|
||||||
auto narAccessor = makeNarAccessor(sink.s);
|
return {addToCache(storePath.hashPart(), std::move(sink.s)), restPath};
|
||||||
addToCache(storePath.hashPart(), *sink.s, narAccessor);
|
|
||||||
return {narAccessor, restPath};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FSAccessor::Stat RemoteFSAccessor::stat(const Path & path)
|
FSAccessor::Stat RemoteFSAccessor::stat(const Path & path)
|
||||||
|
|
|
@ -20,8 +20,7 @@ class RemoteFSAccessor : public FSAccessor
|
||||||
|
|
||||||
Path makeCacheFile(std::string_view hashPart, const std::string & ext);
|
Path makeCacheFile(std::string_view hashPart, const std::string & ext);
|
||||||
|
|
||||||
void addToCache(std::string_view hashPart, const std::string & nar,
|
ref<FSAccessor> addToCache(std::string_view hashPart, std::string && nar);
|
||||||
ref<FSAccessor> narAccessor);
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
|
|
|
@ -172,7 +172,7 @@ void RemoteStore::initConnection(Connection & conn)
|
||||||
it. */
|
it. */
|
||||||
conn.closeWrite();
|
conn.closeWrite();
|
||||||
auto msg = conn.from.drain();
|
auto msg = conn.from.drain();
|
||||||
throw Error("protocol mismatch, got '%s'", chomp(*saved.s + msg));
|
throw Error("protocol mismatch, got '%s'", chomp(saved.s + msg));
|
||||||
}
|
}
|
||||||
|
|
||||||
conn.from >> conn.daemonVersion;
|
conn.from >> conn.daemonVersion;
|
||||||
|
@ -188,7 +188,12 @@ void RemoteStore::initConnection(Connection & conn)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 11)
|
if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 11)
|
||||||
conn.to << false;
|
conn.to << false; // obsolete reserveSpace
|
||||||
|
|
||||||
|
if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 33) {
|
||||||
|
conn.to.flush();
|
||||||
|
conn.daemonNixVersion = readString(conn.from);
|
||||||
|
}
|
||||||
|
|
||||||
auto ex = conn.processStderr();
|
auto ex = conn.processStderr();
|
||||||
if (ex) std::rethrow_exception(ex);
|
if (ex) std::rethrow_exception(ex);
|
||||||
|
@ -908,6 +913,25 @@ void RemoteStore::queryMissing(const std::vector<DerivedPath> & targets,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void RemoteStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||||
|
{
|
||||||
|
auto conn(getConnection());
|
||||||
|
conn->to << wopAddBuildLog << drvPath.to_string();
|
||||||
|
StringSource source(log);
|
||||||
|
conn.withFramedSink([&](Sink & sink) {
|
||||||
|
source.drainInto(sink);
|
||||||
|
});
|
||||||
|
readInt(conn->from);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<std::string> RemoteStore::getVersion()
|
||||||
|
{
|
||||||
|
auto conn(getConnection());
|
||||||
|
return conn->daemonNixVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void RemoteStore::connect()
|
void RemoteStore::connect()
|
||||||
{
|
{
|
||||||
auto conn(getConnection());
|
auto conn(getConnection());
|
||||||
|
|
|
@ -116,6 +116,10 @@ public:
|
||||||
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
||||||
uint64_t & downloadSize, uint64_t & narSize) override;
|
uint64_t & downloadSize, uint64_t & narSize) override;
|
||||||
|
|
||||||
|
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||||
|
|
||||||
|
std::optional<std::string> getVersion() override;
|
||||||
|
|
||||||
void connect() override;
|
void connect() override;
|
||||||
|
|
||||||
unsigned int getProtocol() override;
|
unsigned int getProtocol() override;
|
||||||
|
@ -127,6 +131,7 @@ public:
|
||||||
FdSink to;
|
FdSink to;
|
||||||
FdSource from;
|
FdSource from;
|
||||||
unsigned int daemonVersion;
|
unsigned int daemonVersion;
|
||||||
|
std::optional<std::string> daemonNixVersion;
|
||||||
std::chrono::time_point<std::chrono::steady_clock> startTime;
|
std::chrono::time_point<std::chrono::steady_clock> startTime;
|
||||||
|
|
||||||
virtual ~Connection();
|
virtual ~Connection();
|
||||||
|
|
|
@ -385,7 +385,7 @@ struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual
|
||||||
auto compress = [&](std::string compression)
|
auto compress = [&](std::string compression)
|
||||||
{
|
{
|
||||||
auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain());
|
auto compressed = nix::compress(compression, StreamToSourceAdapter(istream).drain());
|
||||||
return std::make_shared<std::stringstream>(std::move(*compressed));
|
return std::make_shared<std::stringstream>(std::move(compressed));
|
||||||
};
|
};
|
||||||
|
|
||||||
if (narinfoCompression != "" && hasSuffix(path, ".narinfo"))
|
if (narinfoCompression != "" && hasSuffix(path, ".narinfo"))
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
#include "ref.hh"
|
#include "ref.hh"
|
||||||
|
|
||||||
|
#include <optional>
|
||||||
|
|
||||||
namespace Aws { namespace Client { class ClientConfiguration; } }
|
namespace Aws { namespace Client { class ClientConfiguration; } }
|
||||||
namespace Aws { namespace S3 { class S3Client; } }
|
namespace Aws { namespace S3 { class S3Client; } }
|
||||||
|
|
||||||
|
@ -20,7 +22,7 @@ struct S3Helper
|
||||||
|
|
||||||
struct FileTransferResult
|
struct FileTransferResult
|
||||||
{
|
{
|
||||||
std::shared_ptr<std::string> data;
|
std::optional<std::string> data;
|
||||||
unsigned int durationMs;
|
unsigned int durationMs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1109,6 +1109,21 @@ void copyClosure(
|
||||||
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
|
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void copyClosure(
|
||||||
|
Store & srcStore,
|
||||||
|
Store & dstStore,
|
||||||
|
const StorePathSet & storePaths,
|
||||||
|
RepairFlag repair,
|
||||||
|
CheckSigsFlag checkSigs,
|
||||||
|
SubstituteFlag substitute)
|
||||||
|
{
|
||||||
|
if (&srcStore == &dstStore) return;
|
||||||
|
|
||||||
|
StorePathSet closure;
|
||||||
|
srcStore.computeFSClosure(storePaths, closure);
|
||||||
|
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
|
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
|
||||||
{
|
{
|
||||||
std::string path;
|
std::string path;
|
||||||
|
|
|
@ -724,8 +724,11 @@ public:
|
||||||
|
|
||||||
/* Return the build log of the specified store path, if available,
|
/* Return the build log of the specified store path, if available,
|
||||||
or null otherwise. */
|
or null otherwise. */
|
||||||
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
|
virtual std::optional<std::string> getBuildLog(const StorePath & path)
|
||||||
{ return nullptr; }
|
{ return std::nullopt; }
|
||||||
|
|
||||||
|
virtual void addBuildLog(const StorePath & path, std::string_view log)
|
||||||
|
{ unsupported("addBuildLog"); }
|
||||||
|
|
||||||
/* Hack to allow long-running processes like hydra-queue-runner to
|
/* Hack to allow long-running processes like hydra-queue-runner to
|
||||||
occasionally flush their path info cache. */
|
occasionally flush their path info cache. */
|
||||||
|
@ -762,6 +765,9 @@ public:
|
||||||
* (a no-op when there’s no daemon)
|
* (a no-op when there’s no daemon)
|
||||||
*/
|
*/
|
||||||
virtual void setOptions() { }
|
virtual void setOptions() { }
|
||||||
|
|
||||||
|
virtual std::optional<std::string> getVersion() { return {}; }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
Stats stats;
|
Stats stats;
|
||||||
|
@ -812,6 +818,13 @@ void copyClosure(
|
||||||
CheckSigsFlag checkSigs = CheckSigs,
|
CheckSigsFlag checkSigs = CheckSigs,
|
||||||
SubstituteFlag substitute = NoSubstitute);
|
SubstituteFlag substitute = NoSubstitute);
|
||||||
|
|
||||||
|
void copyClosure(
|
||||||
|
Store & srcStore, Store & dstStore,
|
||||||
|
const StorePathSet & paths,
|
||||||
|
RepairFlag repair = NoRepair,
|
||||||
|
CheckSigsFlag checkSigs = CheckSigs,
|
||||||
|
SubstituteFlag substitute = NoSubstitute);
|
||||||
|
|
||||||
/* Remove the temporary roots file for this process. Any temporary
|
/* Remove the temporary roots file for this process. Any temporary
|
||||||
root becomes garbage after this point unless it has been registered
|
root becomes garbage after this point unless it has been registered
|
||||||
as a (permanent) root. */
|
as a (permanent) root. */
|
||||||
|
|
|
@ -9,7 +9,7 @@ namespace nix {
|
||||||
#define WORKER_MAGIC_1 0x6e697863
|
#define WORKER_MAGIC_1 0x6e697863
|
||||||
#define WORKER_MAGIC_2 0x6478696f
|
#define WORKER_MAGIC_2 0x6478696f
|
||||||
|
|
||||||
#define PROTOCOL_VERSION (1 << 8 | 32)
|
#define PROTOCOL_VERSION (1 << 8 | 33)
|
||||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||||
|
|
||||||
|
@ -56,6 +56,7 @@ typedef enum {
|
||||||
wopRegisterDrvOutput = 42,
|
wopRegisterDrvOutput = 42,
|
||||||
wopQueryRealisation = 43,
|
wopQueryRealisation = 43,
|
||||||
wopAddMultipleToStore = 44,
|
wopAddMultipleToStore = 44,
|
||||||
|
wopAddBuildLog = 45,
|
||||||
} WorkerOp;
|
} WorkerOp;
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -93,13 +93,12 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
|
||||||
debug(format("removing case hack suffix from '%1%'") % (path + "/" + i.name));
|
debug(format("removing case hack suffix from '%1%'") % (path + "/" + i.name));
|
||||||
name.erase(pos);
|
name.erase(pos);
|
||||||
}
|
}
|
||||||
if (unhacked.find(name) != unhacked.end())
|
if (!unhacked.emplace(name, i.name).second)
|
||||||
throw Error("file name collision in between '%1%' and '%2%'",
|
throw Error("file name collision in between '%1%' and '%2%'",
|
||||||
(path + "/" + unhacked[name]),
|
(path + "/" + unhacked[name]),
|
||||||
(path + "/" + i.name));
|
(path + "/" + i.name));
|
||||||
unhacked[name] = i.name;
|
|
||||||
} else
|
} else
|
||||||
unhacked[i.name] = i.name;
|
unhacked.emplace(i.name, i.name);
|
||||||
|
|
||||||
for (auto & i : unhacked)
|
for (auto & i : unhacked)
|
||||||
if (filter(path + "/" + i.first)) {
|
if (filter(path + "/" + i.first)) {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue