forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into derivation-header-include-order
This commit is contained in:
commit
9357512d73
15
.github/workflows/test.yml
vendored
15
.github/workflows/test.yml
vendored
|
@ -10,15 +10,8 @@ jobs:
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v10
|
- uses: cachix/install-nix-action@v10
|
||||||
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
|
#- run: nix flake check
|
||||||
macos_perf_test:
|
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Disable syspolicy assessments
|
|
||||||
run: |
|
|
||||||
spctl --status
|
|
||||||
sudo spctl --master-disable
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: cachix/install-nix-action@v10
|
|
||||||
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
|
|
||||||
|
|
1
Makefile
1
Makefile
|
@ -11,6 +11,7 @@ makefiles = \
|
||||||
src/resolve-system-dependencies/local.mk \
|
src/resolve-system-dependencies/local.mk \
|
||||||
scripts/local.mk \
|
scripts/local.mk \
|
||||||
corepkgs/local.mk \
|
corepkgs/local.mk \
|
||||||
|
misc/bash/local.mk \
|
||||||
misc/systemd/local.mk \
|
misc/systemd/local.mk \
|
||||||
misc/launchd/local.mk \
|
misc/launchd/local.mk \
|
||||||
misc/upstart/local.mk \
|
misc/upstart/local.mk \
|
||||||
|
|
|
@ -19,6 +19,7 @@ LIBLZMA_LIBS = @LIBLZMA_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||||
|
SHELL = @bash@
|
||||||
SODIUM_LIBS = @SODIUM_LIBS@
|
SODIUM_LIBS = @SODIUM_LIBS@
|
||||||
SQLITE3_LIBS = @SQLITE3_LIBS@
|
SQLITE3_LIBS = @SQLITE3_LIBS@
|
||||||
bash = @bash@
|
bash = @bash@
|
||||||
|
|
25
README.md
25
README.md
|
@ -12,7 +12,7 @@ for more details.
|
||||||
On Linux and macOS the easiest way to Install Nix is to run the following shell command
|
On Linux and macOS the easiest way to Install Nix is to run the following shell command
|
||||||
(as a user other than root):
|
(as a user other than root):
|
||||||
|
|
||||||
```
|
```console
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -20,27 +20,8 @@ Information on additional installation methods is available on the [Nix download
|
||||||
|
|
||||||
## Building And Developing
|
## Building And Developing
|
||||||
|
|
||||||
### Building Nix
|
See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual#chap-hacking) in our manual for instruction on how to
|
||||||
|
build nix from source with nix-build or how to get a development environment.
|
||||||
You can build Nix using one of the targets provided by [release.nix](./release.nix):
|
|
||||||
|
|
||||||
```
|
|
||||||
$ nix-build ./release.nix -A build.aarch64-linux
|
|
||||||
$ nix-build ./release.nix -A build.x86_64-darwin
|
|
||||||
$ nix-build ./release.nix -A build.i686-linux
|
|
||||||
$ nix-build ./release.nix -A build.x86_64-linux
|
|
||||||
```
|
|
||||||
|
|
||||||
### Development Environment
|
|
||||||
|
|
||||||
You can use the provided `shell.nix` to get a working development environment:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ nix-shell
|
|
||||||
$ ./bootstrap.sh
|
|
||||||
$ ./configure
|
|
||||||
$ make
|
|
||||||
```
|
|
||||||
|
|
||||||
## Additional Resources
|
## Additional Resources
|
||||||
|
|
||||||
|
|
|
@ -123,6 +123,7 @@ AC_PATH_PROG(flex, flex, false)
|
||||||
AC_PATH_PROG(bison, bison, false)
|
AC_PATH_PROG(bison, bison, false)
|
||||||
AC_PATH_PROG(dot, dot)
|
AC_PATH_PROG(dot, dot)
|
||||||
AC_PATH_PROG(lsof, lsof, lsof)
|
AC_PATH_PROG(lsof, lsof, lsof)
|
||||||
|
NEED_PROG(jq, jq)
|
||||||
|
|
||||||
|
|
||||||
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
||||||
|
|
3
default.nix
Normal file
3
default.nix
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||||
|
src = ./.;
|
||||||
|
}).defaultNix
|
|
@ -370,34 +370,6 @@ false</literal>.</para>
|
||||||
|
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
|
||||||
<varlistentry xml:id="conf-hashed-mirrors"><term><literal>hashed-mirrors</literal></term>
|
|
||||||
|
|
||||||
<listitem><para>A list of web servers used by
|
|
||||||
<function>builtins.fetchurl</function> to obtain files by
|
|
||||||
hash. The default is
|
|
||||||
<literal>http://tarballs.nixos.org/</literal>. Given a hash type
|
|
||||||
<replaceable>ht</replaceable> and a base-16 hash
|
|
||||||
<replaceable>h</replaceable>, Nix will try to download the file
|
|
||||||
from
|
|
||||||
<literal>hashed-mirror/<replaceable>ht</replaceable>/<replaceable>h</replaceable></literal>.
|
|
||||||
This allows files to be downloaded even if they have disappeared
|
|
||||||
from their original URI. For example, given the default mirror
|
|
||||||
<literal>http://tarballs.nixos.org/</literal>, when building the derivation
|
|
||||||
|
|
||||||
<programlisting>
|
|
||||||
builtins.fetchurl {
|
|
||||||
url = "https://example.org/foo-1.2.3.tar.xz";
|
|
||||||
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae";
|
|
||||||
}
|
|
||||||
</programlisting>
|
|
||||||
|
|
||||||
Nix will attempt to download this file from
|
|
||||||
<literal>http://tarballs.nixos.org/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae</literal>
|
|
||||||
first. If it is not available there, if will try the original URI.</para></listitem>
|
|
||||||
|
|
||||||
</varlistentry>
|
|
||||||
|
|
||||||
|
|
||||||
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
<varlistentry xml:id="conf-http-connections"><term><literal>http-connections</literal></term>
|
||||||
|
|
||||||
<listitem><para>The maximum number of parallel TCP connections
|
<listitem><para>The maximum number of parallel TCP connections
|
||||||
|
|
|
@ -1,119 +0,0 @@
|
||||||
<section xmlns="http://docbook.org/ns/docbook"
|
|
||||||
xmlns:xlink="http://www.w3.org/1999/xlink"
|
|
||||||
xmlns:xi="http://www.w3.org/2001/XInclude"
|
|
||||||
version="5.0"
|
|
||||||
xml:id='sec-builder-syntax'>
|
|
||||||
|
|
||||||
<title>Builder Syntax</title>
|
|
||||||
|
|
||||||
<example xml:id='ex-hello-builder'><title>Build script for GNU Hello
|
|
||||||
(<filename>builder.sh</filename>)</title>
|
|
||||||
<programlisting>
|
|
||||||
source $stdenv/setup <co xml:id='ex-hello-builder-co-1' />
|
|
||||||
|
|
||||||
PATH=$perl/bin:$PATH <co xml:id='ex-hello-builder-co-2' />
|
|
||||||
|
|
||||||
tar xvfz $src <co xml:id='ex-hello-builder-co-3' />
|
|
||||||
cd hello-*
|
|
||||||
./configure --prefix=$out <co xml:id='ex-hello-builder-co-4' />
|
|
||||||
make <co xml:id='ex-hello-builder-co-5' />
|
|
||||||
make install</programlisting>
|
|
||||||
</example>
|
|
||||||
|
|
||||||
<para><xref linkend='ex-hello-builder' /> shows the builder referenced
|
|
||||||
from Hello's Nix expression (stored in
|
|
||||||
<filename>pkgs/applications/misc/hello/ex-1/builder.sh</filename>).
|
|
||||||
The builder can actually be made a lot shorter by using the
|
|
||||||
<emphasis>generic builder</emphasis> functions provided by
|
|
||||||
<varname>stdenv</varname>, but here we write out the build steps to
|
|
||||||
elucidate what a builder does. It performs the following
|
|
||||||
steps:</para>
|
|
||||||
|
|
||||||
<calloutlist>
|
|
||||||
|
|
||||||
<callout arearefs='ex-hello-builder-co-1'>
|
|
||||||
|
|
||||||
<para>When Nix runs a builder, it initially completely clears the
|
|
||||||
environment (except for the attributes declared in the
|
|
||||||
derivation). For instance, the <envar>PATH</envar> variable is
|
|
||||||
empty<footnote><para>Actually, it's initialised to
|
|
||||||
<filename>/path-not-set</filename> to prevent Bash from setting it
|
|
||||||
to a default value.</para></footnote>. This is done to prevent
|
|
||||||
undeclared inputs from being used in the build process. If for
|
|
||||||
example the <envar>PATH</envar> contained
|
|
||||||
<filename>/usr/bin</filename>, then you might accidentally use
|
|
||||||
<filename>/usr/bin/gcc</filename>.</para>
|
|
||||||
|
|
||||||
<para>So the first step is to set up the environment. This is
|
|
||||||
done by calling the <filename>setup</filename> script of the
|
|
||||||
standard environment. The environment variable
|
|
||||||
<envar>stdenv</envar> points to the location of the standard
|
|
||||||
environment being used. (It wasn't specified explicitly as an
|
|
||||||
attribute in <xref linkend='ex-hello-nix' />, but
|
|
||||||
<varname>mkDerivation</varname> adds it automatically.)</para>
|
|
||||||
|
|
||||||
</callout>
|
|
||||||
|
|
||||||
<callout arearefs='ex-hello-builder-co-2'>
|
|
||||||
|
|
||||||
<para>Since Hello needs Perl, we have to make sure that Perl is in
|
|
||||||
the <envar>PATH</envar>. The <envar>perl</envar> environment
|
|
||||||
variable points to the location of the Perl package (since it
|
|
||||||
was passed in as an attribute to the derivation), so
|
|
||||||
<filename><replaceable>$perl</replaceable>/bin</filename> is the
|
|
||||||
directory containing the Perl interpreter.</para>
|
|
||||||
|
|
||||||
</callout>
|
|
||||||
|
|
||||||
<callout arearefs='ex-hello-builder-co-3'>
|
|
||||||
|
|
||||||
<para>Now we have to unpack the sources. The
|
|
||||||
<varname>src</varname> attribute was bound to the result of
|
|
||||||
fetching the Hello source tarball from the network, so the
|
|
||||||
<envar>src</envar> environment variable points to the location in
|
|
||||||
the Nix store to which the tarball was downloaded. After
|
|
||||||
unpacking, we <command>cd</command> to the resulting source
|
|
||||||
directory.</para>
|
|
||||||
|
|
||||||
<para>The whole build is performed in a temporary directory
|
|
||||||
created in <varname>/tmp</varname>, by the way. This directory is
|
|
||||||
removed after the builder finishes, so there is no need to clean
|
|
||||||
up the sources afterwards. Also, the temporary directory is
|
|
||||||
always newly created, so you don't have to worry about files from
|
|
||||||
previous builds interfering with the current build.</para>
|
|
||||||
|
|
||||||
</callout>
|
|
||||||
|
|
||||||
<callout arearefs='ex-hello-builder-co-4'>
|
|
||||||
|
|
||||||
<para>GNU Hello is a typical Autoconf-based package, so we first
|
|
||||||
have to run its <filename>configure</filename> script. In Nix
|
|
||||||
every package is stored in a separate location in the Nix store,
|
|
||||||
for instance
|
|
||||||
<filename>/nix/store/9a54ba97fb71b65fda531012d0443ce2-hello-2.1.1</filename>.
|
|
||||||
Nix computes this path by cryptographically hashing all attributes
|
|
||||||
of the derivation. The path is passed to the builder through the
|
|
||||||
<envar>out</envar> environment variable. So here we give
|
|
||||||
<filename>configure</filename> the parameter
|
|
||||||
<literal>--prefix=$out</literal> to cause Hello to be installed in
|
|
||||||
the expected location.</para>
|
|
||||||
|
|
||||||
</callout>
|
|
||||||
|
|
||||||
<callout arearefs='ex-hello-builder-co-5'>
|
|
||||||
|
|
||||||
<para>Finally we build Hello (<literal>make</literal>) and install
|
|
||||||
it into the location specified by <envar>out</envar>
|
|
||||||
(<literal>make install</literal>).</para>
|
|
||||||
|
|
||||||
</callout>
|
|
||||||
|
|
||||||
</calloutlist>
|
|
||||||
|
|
||||||
<para>If you are wondering about the absence of error checking on the
|
|
||||||
result of various commands called in the builder: this is because the
|
|
||||||
shell script is evaluated with Bash's <option>-e</option> option,
|
|
||||||
which causes the script to be aborted if any command fails without an
|
|
||||||
error check.</para>
|
|
||||||
|
|
||||||
</section>
|
|
|
@ -7,15 +7,34 @@
|
||||||
<para>This section provides some notes on how to hack on Nix. To get
|
<para>This section provides some notes on how to hack on Nix. To get
|
||||||
the latest version of Nix from GitHub:
|
the latest version of Nix from GitHub:
|
||||||
<screen>
|
<screen>
|
||||||
$ git clone git://github.com/NixOS/nix.git
|
$ git clone https://github.com/NixOS/nix.git
|
||||||
$ cd nix
|
$ cd nix
|
||||||
</screen>
|
</screen>
|
||||||
</para>
|
</para>
|
||||||
|
|
||||||
<para>To build it and its dependencies:
|
<para>To build Nix for the current operating system/architecture use
|
||||||
|
|
||||||
<screen>
|
<screen>
|
||||||
$ nix-build release.nix -A build.x86_64-linux
|
$ nix-build
|
||||||
</screen>
|
</screen>
|
||||||
|
|
||||||
|
or if you have a flakes-enabled nix:
|
||||||
|
|
||||||
|
<screen>
|
||||||
|
$ nix build
|
||||||
|
</screen>
|
||||||
|
|
||||||
|
This will build <literal>defaultPackage</literal> attribute defined in the <literal>flake.nix</literal> file.
|
||||||
|
|
||||||
|
To build for other platforms add one of the following suffixes to it: aarch64-linux,
|
||||||
|
i686-linux, x86_64-darwin, x86_64-linux.
|
||||||
|
|
||||||
|
i.e.
|
||||||
|
|
||||||
|
<screen>
|
||||||
|
nix-build -A defaultPackage.x86_64-linux
|
||||||
|
</screen>
|
||||||
|
|
||||||
</para>
|
</para>
|
||||||
|
|
||||||
<para>To build all dependencies and start a shell in which all
|
<para>To build all dependencies and start a shell in which all
|
||||||
|
@ -27,13 +46,27 @@ $ nix-shell
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
<screen>
|
<screen>
|
||||||
[nix-shell]$ ./bootstrap.sh
|
[nix-shell]$ ./bootstrap.sh
|
||||||
[nix-shell]$ configurePhase
|
[nix-shell]$ ./configure $configureFlags
|
||||||
[nix-shell]$ make
|
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||||
</screen>
|
</screen>
|
||||||
To install it in <literal>$(pwd)/inst</literal> and test it:
|
To install it in <literal>$(pwd)/inst</literal> and test it:
|
||||||
<screen>
|
<screen>
|
||||||
[nix-shell]$ make install
|
[nix-shell]$ make install
|
||||||
[nix-shell]$ make installcheck
|
[nix-shell]$ make installcheck
|
||||||
|
[nix-shell]$ ./inst/bin/nix --version
|
||||||
|
nix (Nix) 2.4
|
||||||
|
</screen>
|
||||||
|
|
||||||
|
If you have a flakes-enabled nix you can replace:
|
||||||
|
|
||||||
|
<screen>
|
||||||
|
$ nix-shell
|
||||||
|
</screen>
|
||||||
|
|
||||||
|
by:
|
||||||
|
|
||||||
|
<screen>
|
||||||
|
$ nix develop
|
||||||
</screen>
|
</screen>
|
||||||
|
|
||||||
</para>
|
</para>
|
||||||
|
|
26
flake.lock
Normal file
26
flake.lock
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1591633336,
|
||||||
|
"narHash": "sha256-oVXv4xAnDJB03LvZGbC72vSVlIbbJr8tpjEW5o/Fdek=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "70717a337f7ae4e486ba71a500367cad697e5f09",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"ref": "nixos-20.03-small",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 6
|
||||||
|
}
|
443
flake.nix
Normal file
443
flake.nix
Normal file
|
@ -0,0 +1,443 @@
|
||||||
|
{
|
||||||
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
|
inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small";
|
||||||
|
|
||||||
|
outputs = { self, nixpkgs }:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
version = builtins.readFile ./.version + versionSuffix;
|
||||||
|
versionSuffix =
|
||||||
|
if officialRelease
|
||||||
|
then ""
|
||||||
|
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified)}_${self.shortRev or "dirty"}";
|
||||||
|
|
||||||
|
officialRelease = false;
|
||||||
|
|
||||||
|
systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ];
|
||||||
|
|
||||||
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
||||||
|
|
||||||
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
|
nixpkgsFor = forAllSystems (system:
|
||||||
|
import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
overlays = [ self.overlay ];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
commonDeps = pkgs: with pkgs; rec {
|
||||||
|
# Use "busybox-sandbox-shell" if present,
|
||||||
|
# if not (legacy) fallback and hope it's sufficient.
|
||||||
|
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
||||||
|
useMusl = true;
|
||||||
|
enableStatic = true;
|
||||||
|
enableMinimal = true;
|
||||||
|
extraConfig = ''
|
||||||
|
CONFIG_FEATURE_FANCY_ECHO y
|
||||||
|
CONFIG_FEATURE_SH_MATH y
|
||||||
|
CONFIG_FEATURE_SH_MATH_64 y
|
||||||
|
|
||||||
|
CONFIG_ASH y
|
||||||
|
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
|
||||||
|
|
||||||
|
CONFIG_ASH_ALIAS y
|
||||||
|
CONFIG_ASH_BASH_COMPAT y
|
||||||
|
CONFIG_ASH_CMDCMD y
|
||||||
|
CONFIG_ASH_ECHO y
|
||||||
|
CONFIG_ASH_GETOPTS y
|
||||||
|
CONFIG_ASH_INTERNAL_GLOB y
|
||||||
|
CONFIG_ASH_JOB_CONTROL y
|
||||||
|
CONFIG_ASH_PRINTF y
|
||||||
|
CONFIG_ASH_TEST y
|
||||||
|
'';
|
||||||
|
});
|
||||||
|
|
||||||
|
configureFlags =
|
||||||
|
lib.optionals stdenv.isLinux [
|
||||||
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
|
];
|
||||||
|
|
||||||
|
buildDeps =
|
||||||
|
[ bison
|
||||||
|
flex
|
||||||
|
libxml2
|
||||||
|
libxslt
|
||||||
|
docbook5
|
||||||
|
docbook_xsl_ns
|
||||||
|
autoconf-archive
|
||||||
|
autoreconfHook
|
||||||
|
|
||||||
|
curl
|
||||||
|
bzip2 xz brotli zlib editline
|
||||||
|
openssl pkgconfig sqlite
|
||||||
|
libarchive
|
||||||
|
boost
|
||||||
|
(if lib.versionAtLeast lib.version "20.03pre"
|
||||||
|
then nlohmann_json
|
||||||
|
else nlohmann_json.override { multipleHeaders = true; })
|
||||||
|
nlohmann_json
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
git
|
||||||
|
mercurial
|
||||||
|
jq
|
||||||
|
gmock
|
||||||
|
]
|
||||||
|
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
|
||||||
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin)
|
||||||
|
(aws-sdk-cpp.override {
|
||||||
|
apis = ["s3" "transfer"];
|
||||||
|
customMemoryManagement = false;
|
||||||
|
});
|
||||||
|
|
||||||
|
propagatedDeps =
|
||||||
|
[ (boehmgc.override { enableLargeConfig = true; })
|
||||||
|
];
|
||||||
|
|
||||||
|
perlDeps =
|
||||||
|
[ perl
|
||||||
|
perlPackages.DBDSQLite
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
in {
|
||||||
|
|
||||||
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
|
# 'nix.perl-bindings' packages.
|
||||||
|
overlay = final: prev: {
|
||||||
|
|
||||||
|
nix = with final; with commonDeps pkgs; (stdenv.mkDerivation {
|
||||||
|
name = "nix-${version}";
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
|
outputs = [ "out" "dev" "doc" ];
|
||||||
|
|
||||||
|
buildInputs = buildDeps;
|
||||||
|
|
||||||
|
propagatedBuildInputs = propagatedDeps;
|
||||||
|
|
||||||
|
preConfigure =
|
||||||
|
''
|
||||||
|
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||||
|
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
||||||
|
rm -f $out/lib/*.a
|
||||||
|
${lib.optionalString stdenv.isLinux ''
|
||||||
|
chmod u+w $out/lib/*.so.*
|
||||||
|
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
|
configureFlags = configureFlags ++
|
||||||
|
[ "--sysconfdir=/etc" ];
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
makeFlags = "profiledir=$(out)/etc/profile.d";
|
||||||
|
|
||||||
|
doCheck = true;
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
mkdir -p $doc/nix-support
|
||||||
|
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
|
|
||||||
|
doInstallCheck = true;
|
||||||
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
separateDebugInfo = true;
|
||||||
|
}) // {
|
||||||
|
|
||||||
|
perl-bindings = with final; stdenv.mkDerivation {
|
||||||
|
name = "nix-perl-${version}";
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
buildInputs =
|
||||||
|
[ autoconf-archive
|
||||||
|
autoreconfHook
|
||||||
|
nix
|
||||||
|
curl
|
||||||
|
bzip2
|
||||||
|
xz
|
||||||
|
pkgconfig
|
||||||
|
pkgs.perl
|
||||||
|
boost
|
||||||
|
]
|
||||||
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
|
||||||
|
|
||||||
|
configureFlags = ''
|
||||||
|
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
||||||
|
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
||||||
|
'';
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
hydraJobs = {
|
||||||
|
|
||||||
|
# Binary package for various platforms.
|
||||||
|
build = nixpkgs.lib.genAttrs systems (system: nixpkgsFor.${system}.nix);
|
||||||
|
|
||||||
|
# Perl bindings for various platforms.
|
||||||
|
perlBindings = nixpkgs.lib.genAttrs systems (system: nixpkgsFor.${system}.nix.perl-bindings);
|
||||||
|
|
||||||
|
# Binary tarball for various platforms, containing a Nix store
|
||||||
|
# with the closure of 'nix' package, and the second half of
|
||||||
|
# the installation script.
|
||||||
|
binaryTarball = nixpkgs.lib.genAttrs systems (system:
|
||||||
|
|
||||||
|
with nixpkgsFor.${system};
|
||||||
|
|
||||||
|
let
|
||||||
|
installerClosureInfo = closureInfo { rootPaths = [ nix cacert ]; };
|
||||||
|
in
|
||||||
|
|
||||||
|
runCommand "nix-binary-tarball-${version}"
|
||||||
|
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
||||||
|
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
|
||||||
|
}
|
||||||
|
''
|
||||||
|
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||||
|
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
if type -p shellcheck; then
|
||||||
|
# SC1090: Don't worry about not being able to find
|
||||||
|
# $nix/etc/profile.d/nix.sh
|
||||||
|
shellcheck --exclude SC1090 $TMPDIR/install
|
||||||
|
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
|
||||||
|
# SC1091: Don't panic about not being able to source
|
||||||
|
# /etc/profile
|
||||||
|
# SC2002: Ignore "useless cat" "error", when loading
|
||||||
|
# .reginfo, as the cat is a much cleaner
|
||||||
|
# implementation, even though it is "useless"
|
||||||
|
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
||||||
|
# root's home directory
|
||||||
|
shellcheck --external-sources \
|
||||||
|
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod +x $TMPDIR/install
|
||||||
|
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-multi-user
|
||||||
|
dir=nix-${version}-${system}
|
||||||
|
fn=$out/$dir.tar.xz
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
|
tar cvfJ $fn \
|
||||||
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
|
--absolute-names \
|
||||||
|
--hard-dereference \
|
||||||
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
|
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
||||||
|
--transform "s,$NIX_STORE,$dir/store,S" \
|
||||||
|
$TMPDIR/install $TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
$TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
$TMPDIR/install-multi-user $TMPDIR/reginfo \
|
||||||
|
$(cat ${installerClosureInfo}/store-paths)
|
||||||
|
'');
|
||||||
|
|
||||||
|
# The first half of the installation script. This is uploaded
|
||||||
|
# to https://nixos.org/nix/install. It downloads the binary
|
||||||
|
# tarball for the user's system and calls the second half of the
|
||||||
|
# installation script.
|
||||||
|
installerScript =
|
||||||
|
with nixpkgsFor.x86_64-linux;
|
||||||
|
runCommand "installer-script"
|
||||||
|
{ buildInputs = [ nix ];
|
||||||
|
}
|
||||||
|
''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
|
||||||
|
substitute ${./scripts/install.in} $out/install \
|
||||||
|
${pkgs.lib.concatMapStrings
|
||||||
|
(system: "--replace '@binaryTarball_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${self.hydraJobs.binaryTarball.${system}}/*.tar.xz) ")
|
||||||
|
[ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]
|
||||||
|
} \
|
||||||
|
--replace '@nixVersion@' ${version}
|
||||||
|
|
||||||
|
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Line coverage analysis.
|
||||||
|
coverage =
|
||||||
|
with nixpkgsFor.x86_64-linux;
|
||||||
|
with commonDeps pkgs;
|
||||||
|
|
||||||
|
releaseTools.coverageAnalysis {
|
||||||
|
name = "nix-coverage-${version}";
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
buildInputs = buildDeps ++ propagatedDeps;
|
||||||
|
|
||||||
|
dontInstall = false;
|
||||||
|
|
||||||
|
doInstallCheck = true;
|
||||||
|
|
||||||
|
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
||||||
|
|
||||||
|
# We call `dot', and even though we just use it to
|
||||||
|
# syntax-check generated dot files, it still requires some
|
||||||
|
# fonts. So provide those.
|
||||||
|
FONTCONFIG_FILE = texFunctions.fontsConf;
|
||||||
|
|
||||||
|
# To test building without precompiled headers.
|
||||||
|
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
|
||||||
|
};
|
||||||
|
|
||||||
|
# System tests.
|
||||||
|
tests.remoteBuilds = import ./tests/remote-builds.nix {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
inherit (self) overlay;
|
||||||
|
};
|
||||||
|
|
||||||
|
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
inherit (self) overlay;
|
||||||
|
};
|
||||||
|
|
||||||
|
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
inherit (self) overlay;
|
||||||
|
});
|
||||||
|
|
||||||
|
tests.setuid = nixpkgs.lib.genAttrs
|
||||||
|
["i686-linux" "x86_64-linux"]
|
||||||
|
(system:
|
||||||
|
import ./tests/setuid.nix rec {
|
||||||
|
inherit nixpkgs system;
|
||||||
|
inherit (self) overlay;
|
||||||
|
});
|
||||||
|
|
||||||
|
# Test whether the binary tarball works in an Ubuntu system.
|
||||||
|
tests.binaryTarball =
|
||||||
|
with nixpkgsFor.x86_64-linux;
|
||||||
|
vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test"
|
||||||
|
{ diskImage = vmTools.diskImages.ubuntu1204x86_64;
|
||||||
|
}
|
||||||
|
''
|
||||||
|
set -x
|
||||||
|
useradd -m alice
|
||||||
|
su - alice -c 'tar xf ${self.hydraJobs.binaryTarball.x86_64-linux}/*.tar.*'
|
||||||
|
mkdir /dest-nix
|
||||||
|
mount -o bind /dest-nix /nix # Provide a writable /nix.
|
||||||
|
chown alice /nix
|
||||||
|
su - alice -c '_NIX_INSTALLER_TEST=1 ./nix-*/install'
|
||||||
|
su - alice -c 'nix-store --verify'
|
||||||
|
su - alice -c 'PAGER= nix-store -qR ${self.hydraJobs.build.x86_64-linux}'
|
||||||
|
|
||||||
|
# Check whether 'nix upgrade-nix' works.
|
||||||
|
cat > /tmp/paths.nix <<EOF
|
||||||
|
{
|
||||||
|
x86_64-linux = "${self.hydraJobs.build.x86_64-linux}";
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
su - alice -c 'nix --experimental-features nix-command upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix'
|
||||||
|
(! [ -L /home/alice/.profile-1-link ])
|
||||||
|
su - alice -c 'PAGER= nix-store -qR ${self.hydraJobs.build.x86_64-linux}'
|
||||||
|
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
touch $out/nix-support/hydra-build-products
|
||||||
|
umount /nix
|
||||||
|
'');
|
||||||
|
|
||||||
|
/*
|
||||||
|
# Check whether we can still evaluate all of Nixpkgs.
|
||||||
|
tests.evalNixpkgs =
|
||||||
|
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
|
||||||
|
# FIXME: fix pkgs/top-level/make-tarball.nix in NixOS to not require a revCount.
|
||||||
|
inherit nixpkgs;
|
||||||
|
pkgs = nixpkgsFor.x86_64-linux;
|
||||||
|
officialRelease = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Check whether we can still evaluate NixOS.
|
||||||
|
tests.evalNixOS =
|
||||||
|
with nixpkgsFor.x86_64-linux;
|
||||||
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
|
''
|
||||||
|
export NIX_STATE_DIR=$TMPDIR
|
||||||
|
|
||||||
|
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
|
||||||
|
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
|
||||||
|
|
||||||
|
touch $out
|
||||||
|
'';
|
||||||
|
*/
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
checks = forAllSystems (system: {
|
||||||
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
|
});
|
||||||
|
|
||||||
|
packages = forAllSystems (system: {
|
||||||
|
inherit (nixpkgsFor.${system}) nix;
|
||||||
|
});
|
||||||
|
|
||||||
|
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
|
||||||
|
|
||||||
|
devShell = forAllSystems (system:
|
||||||
|
with nixpkgsFor.${system};
|
||||||
|
with commonDeps pkgs;
|
||||||
|
|
||||||
|
stdenv.mkDerivation {
|
||||||
|
name = "nix";
|
||||||
|
|
||||||
|
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
|
||||||
|
|
||||||
|
inherit configureFlags;
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
shellHook =
|
||||||
|
''
|
||||||
|
export prefix=$(pwd)/inst
|
||||||
|
configureFlags+=" --prefix=$prefix"
|
||||||
|
PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH
|
||||||
|
PATH=$prefix/bin:$PATH
|
||||||
|
unset PYTHONPATH
|
||||||
|
'';
|
||||||
|
});
|
||||||
|
|
||||||
|
};
|
||||||
|
}
|
2
local.mk
2
local.mk
|
@ -8,7 +8,7 @@ clean-files += Makefile.config
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
|
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
|
||||||
|
|
||||||
$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \
|
$(foreach i, config.h $(wildcard src/lib*/*.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
||||||
|
|
||||||
$(GCH) $(PCH): src/libutil/util.hh config.h
|
$(GCH) $(PCH): src/libutil/util.hh config.h
|
||||||
|
|
19
misc/bash/completion.sh
Normal file
19
misc/bash/completion.sh
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
function _complete_nix {
|
||||||
|
local -a words
|
||||||
|
local cword cur
|
||||||
|
_get_comp_words_by_ref -n ':=&' words cword cur
|
||||||
|
local have_type
|
||||||
|
while IFS= read -r line; do
|
||||||
|
if [[ -z $have_type ]]; then
|
||||||
|
have_type=1
|
||||||
|
if [[ $line = filenames ]]; then
|
||||||
|
compopt -o filenames
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
COMPREPLY+=("$line")
|
||||||
|
fi
|
||||||
|
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}")
|
||||||
|
__ltrim_colon_completions "$cur"
|
||||||
|
}
|
||||||
|
|
||||||
|
complete -F _complete_nix nix
|
1
misc/bash/local.mk
Normal file
1
misc/bash/local.mk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
$(eval $(call install-file-as, $(d)/completion.sh, $(datarootdir)/bash-completion/completions/nix, 0644))
|
|
@ -21,13 +21,13 @@ clean-files += $(GCH) $(PCH)
|
||||||
|
|
||||||
ifeq ($(PRECOMPILE_HEADERS), 1)
|
ifeq ($(PRECOMPILE_HEADERS), 1)
|
||||||
|
|
||||||
ifeq ($(CXX), g++)
|
ifeq ($(findstring g++,$(CXX)), g++)
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch
|
GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch
|
||||||
|
|
||||||
GLOBAL_ORDER_AFTER += $(GCH)
|
GLOBAL_ORDER_AFTER += $(GCH)
|
||||||
|
|
||||||
else ifeq ($(CXX), clang++)
|
else ifeq ($(findstring clang++,$(CXX)), clang++)
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS_PCH += -include-pch $(PCH) -Winvalid-pch
|
GLOBAL_CXXFLAGS_PCH += -include-pch $(PCH) -Winvalid-pch
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ SV * queryReferences(char * path)
|
||||||
SV * queryPathHash(char * path)
|
SV * queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(Base32, true);
|
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash->to_string(Base32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -106,7 +106,7 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? Base32 : Base16, true);
|
auto s = info->narHash->to_string(base32 ? Base32 : Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
|
@ -304,7 +304,10 @@ SV * derivationFromPath(char * drvPath)
|
||||||
|
|
||||||
HV * outputs = newHV();
|
HV * outputs = newHV();
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputs)
|
||||||
hv_store(outputs, i.first.c_str(), i.first.size(), newSVpv(store()->printStorePath(i.second.path).c_str(), 0), 0);
|
hv_store(
|
||||||
|
outputs, i.first.c_str(), i.first.size(),
|
||||||
|
newSVpv(store()->printStorePath(i.second.path(*store(), drv.name)).c_str(), 0),
|
||||||
|
0);
|
||||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||||
|
|
||||||
AV * inputDrvs = newAV();
|
AV * inputDrvs = newAV();
|
||||||
|
|
|
@ -1,82 +0,0 @@
|
||||||
{ pkgs }:
|
|
||||||
|
|
||||||
with pkgs;
|
|
||||||
|
|
||||||
rec {
|
|
||||||
# Use "busybox-sandbox-shell" if present,
|
|
||||||
# if not (legacy) fallback and hope it's sufficient.
|
|
||||||
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
|
||||||
useMusl = true;
|
|
||||||
enableStatic = true;
|
|
||||||
enableMinimal = true;
|
|
||||||
extraConfig = ''
|
|
||||||
CONFIG_FEATURE_FANCY_ECHO y
|
|
||||||
CONFIG_FEATURE_SH_MATH y
|
|
||||||
CONFIG_FEATURE_SH_MATH_64 y
|
|
||||||
|
|
||||||
CONFIG_ASH y
|
|
||||||
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
|
|
||||||
|
|
||||||
CONFIG_ASH_ALIAS y
|
|
||||||
CONFIG_ASH_BASH_COMPAT y
|
|
||||||
CONFIG_ASH_CMDCMD y
|
|
||||||
CONFIG_ASH_ECHO y
|
|
||||||
CONFIG_ASH_GETOPTS y
|
|
||||||
CONFIG_ASH_INTERNAL_GLOB y
|
|
||||||
CONFIG_ASH_JOB_CONTROL y
|
|
||||||
CONFIG_ASH_PRINTF y
|
|
||||||
CONFIG_ASH_TEST y
|
|
||||||
'';
|
|
||||||
});
|
|
||||||
|
|
||||||
configureFlags =
|
|
||||||
lib.optionals stdenv.isLinux [
|
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
|
||||||
];
|
|
||||||
|
|
||||||
buildDeps =
|
|
||||||
[ bison
|
|
||||||
flex
|
|
||||||
libxml2
|
|
||||||
libxslt
|
|
||||||
docbook5
|
|
||||||
docbook_xsl_ns
|
|
||||||
autoconf-archive
|
|
||||||
autoreconfHook
|
|
||||||
|
|
||||||
curl
|
|
||||||
bzip2 xz brotli zlib editline
|
|
||||||
openssl pkgconfig sqlite
|
|
||||||
libarchive
|
|
||||||
boost
|
|
||||||
nlohmann_json
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
git
|
|
||||||
mercurial
|
|
||||||
gmock
|
|
||||||
]
|
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
|
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin)
|
|
||||||
((aws-sdk-cpp.override {
|
|
||||||
apis = ["s3" "transfer"];
|
|
||||||
customMemoryManagement = false;
|
|
||||||
}).overrideDerivation (args: {
|
|
||||||
/*
|
|
||||||
patches = args.patches or [] ++ [ (fetchpatch {
|
|
||||||
url = https://github.com/edolstra/aws-sdk-cpp/commit/3e07e1f1aae41b4c8b340735ff9e8c735f0c063f.patch;
|
|
||||||
sha256 = "1pij0v449p166f9l29x7ppzk8j7g9k9mp15ilh5qxp29c7fnvxy2";
|
|
||||||
}) ];
|
|
||||||
*/
|
|
||||||
}));
|
|
||||||
|
|
||||||
propagatedDeps =
|
|
||||||
[ (boehmgc.override { enableLargeConfig = true; })
|
|
||||||
];
|
|
||||||
|
|
||||||
perlDeps =
|
|
||||||
[ perl
|
|
||||||
perlPackages.DBDSQLite
|
|
||||||
];
|
|
||||||
}
|
|
303
release.nix
303
release.nix
|
@ -1,303 +0,0 @@
|
||||||
{ nix ? builtins.fetchGit ./.
|
|
||||||
, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-20.03-small.tar.gz
|
|
||||||
, officialRelease ? false
|
|
||||||
, systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]
|
|
||||||
}:
|
|
||||||
|
|
||||||
let
|
|
||||||
|
|
||||||
pkgs = import nixpkgs { system = builtins.currentSystem or "x86_64-linux"; };
|
|
||||||
|
|
||||||
version =
|
|
||||||
builtins.readFile ./.version
|
|
||||||
+ (if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}");
|
|
||||||
|
|
||||||
jobs = rec {
|
|
||||||
|
|
||||||
build = pkgs.lib.genAttrs systems (system:
|
|
||||||
|
|
||||||
let pkgs = import nixpkgs { inherit system; }; in
|
|
||||||
|
|
||||||
with pkgs;
|
|
||||||
|
|
||||||
with import ./release-common.nix { inherit pkgs; };
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "nix-${version}";
|
|
||||||
|
|
||||||
src = nix;
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
|
||||||
|
|
||||||
buildInputs = buildDeps;
|
|
||||||
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
preConfigure =
|
|
||||||
''
|
|
||||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
|
||||||
mkdir -p $out/lib
|
|
||||||
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
|
||||||
rm -f $out/lib/*.a
|
|
||||||
${lib.optionalString stdenv.isLinux ''
|
|
||||||
chmod u+w $out/lib/*.so.*
|
|
||||||
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
|
||||||
''}
|
|
||||||
|
|
||||||
(cd perl; autoreconf --install --force --verbose)
|
|
||||||
'';
|
|
||||||
|
|
||||||
configureFlags = configureFlags ++
|
|
||||||
[ "--sysconfdir=/etc" ];
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d";
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $doc/nix-support
|
|
||||||
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
separateDebugInfo = true;
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
perlBindings = pkgs.lib.genAttrs systems (system:
|
|
||||||
|
|
||||||
let pkgs = import nixpkgs { inherit system; }; in with pkgs;
|
|
||||||
|
|
||||||
releaseTools.nixBuild {
|
|
||||||
name = "nix-perl-${version}";
|
|
||||||
|
|
||||||
src = nix;
|
|
||||||
|
|
||||||
buildInputs =
|
|
||||||
[ autoconf-archive
|
|
||||||
autoreconfHook
|
|
||||||
jobs.build.${system}
|
|
||||||
curl
|
|
||||||
bzip2
|
|
||||||
xz
|
|
||||||
pkgconfig
|
|
||||||
pkgs.perl
|
|
||||||
boost
|
|
||||||
]
|
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
|
|
||||||
|
|
||||||
configureFlags = ''
|
|
||||||
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
|
||||||
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
|
||||||
'';
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
binaryTarball = pkgs.lib.genAttrs systems (system:
|
|
||||||
|
|
||||||
with import nixpkgs { inherit system; };
|
|
||||||
|
|
||||||
let
|
|
||||||
toplevel = builtins.getAttr system jobs.build;
|
|
||||||
installerClosureInfo = closureInfo { rootPaths = [ toplevel cacert ]; };
|
|
||||||
in
|
|
||||||
|
|
||||||
runCommand "nix-binary-tarball-${version}"
|
|
||||||
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
|
||||||
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
|
|
||||||
}
|
|
||||||
''
|
|
||||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
|
||||||
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
|
||||||
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
|
||||||
--subst-var-by nix ${toplevel} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
--subst-var-by nix ${toplevel} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
--subst-var-by nix ${toplevel} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
|
||||||
--subst-var-by nix ${toplevel} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
|
|
||||||
if type -p shellcheck; then
|
|
||||||
# SC1090: Don't worry about not being able to find
|
|
||||||
# $nix/etc/profile.d/nix.sh
|
|
||||||
shellcheck --exclude SC1090 $TMPDIR/install
|
|
||||||
shellcheck $TMPDIR/create-darwin-volume.sh
|
|
||||||
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
|
|
||||||
# SC1091: Don't panic about not being able to source
|
|
||||||
# /etc/profile
|
|
||||||
# SC2002: Ignore "useless cat" "error", when loading
|
|
||||||
# .reginfo, as the cat is a much cleaner
|
|
||||||
# implementation, even though it is "useless"
|
|
||||||
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
|
||||||
# root's home directory
|
|
||||||
shellcheck --external-sources \
|
|
||||||
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
|
||||||
fi
|
|
||||||
|
|
||||||
chmod +x $TMPDIR/install
|
|
||||||
chmod +x $TMPDIR/create-darwin-volume.sh
|
|
||||||
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-multi-user
|
|
||||||
dir=nix-${version}-${system}
|
|
||||||
fn=$out/$dir.tar.xz
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
|
||||||
tar cvfJ $fn \
|
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
|
||||||
--absolute-names \
|
|
||||||
--hard-dereference \
|
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
|
||||||
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
|
||||||
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
|
||||||
--transform "s,$NIX_STORE,$dir/store,S" \
|
|
||||||
$TMPDIR/install \
|
|
||||||
$TMPDIR/create-darwin-volume.sh \
|
|
||||||
$TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
$TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
$TMPDIR/install-multi-user \
|
|
||||||
$TMPDIR/reginfo \
|
|
||||||
$(cat ${installerClosureInfo}/store-paths)
|
|
||||||
'');
|
|
||||||
|
|
||||||
|
|
||||||
coverage =
|
|
||||||
with pkgs;
|
|
||||||
|
|
||||||
with import ./release-common.nix { inherit pkgs; };
|
|
||||||
|
|
||||||
releaseTools.coverageAnalysis {
|
|
||||||
name = "nix-coverage-${version}";
|
|
||||||
|
|
||||||
src = nix;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps;
|
|
||||||
|
|
||||||
dontInstall = false;
|
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
|
|
||||||
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
|
||||||
|
|
||||||
# We call `dot', and even though we just use it to
|
|
||||||
# syntax-check generated dot files, it still requires some
|
|
||||||
# fonts. So provide those.
|
|
||||||
FONTCONFIG_FILE = texFunctions.fontsConf;
|
|
||||||
|
|
||||||
# To test building without precompiled headers.
|
|
||||||
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
# System tests.
|
|
||||||
tests.remoteBuilds = (import ./tests/remote-builds.nix rec {
|
|
||||||
inherit nixpkgs;
|
|
||||||
nix = build.x86_64-linux; system = "x86_64-linux";
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.nix-copy-closure = (import ./tests/nix-copy-closure.nix rec {
|
|
||||||
inherit nixpkgs;
|
|
||||||
nix = build.x86_64-linux; system = "x86_64-linux";
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.setuid = pkgs.lib.genAttrs
|
|
||||||
["i686-linux" "x86_64-linux"]
|
|
||||||
(system:
|
|
||||||
import ./tests/setuid.nix rec {
|
|
||||||
inherit nixpkgs;
|
|
||||||
nix = build.${system}; inherit system;
|
|
||||||
});
|
|
||||||
|
|
||||||
tests.binaryTarball =
|
|
||||||
with import nixpkgs { system = "x86_64-linux"; };
|
|
||||||
vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test"
|
|
||||||
{ diskImage = vmTools.diskImages.ubuntu1204x86_64;
|
|
||||||
}
|
|
||||||
''
|
|
||||||
set -x
|
|
||||||
useradd -m alice
|
|
||||||
su - alice -c 'tar xf ${binaryTarball.x86_64-linux}/*.tar.*'
|
|
||||||
mkdir /dest-nix
|
|
||||||
mount -o bind /dest-nix /nix # Provide a writable /nix.
|
|
||||||
chown alice /nix
|
|
||||||
su - alice -c '_NIX_INSTALLER_TEST=1 ./nix-*/install'
|
|
||||||
su - alice -c 'nix-store --verify'
|
|
||||||
su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}'
|
|
||||||
|
|
||||||
# Check whether 'nix upgrade-nix' works.
|
|
||||||
cat > /tmp/paths.nix <<EOF
|
|
||||||
{
|
|
||||||
x86_64-linux = "${build.x86_64-linux}";
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
su - alice -c 'nix --experimental-features nix-command upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix'
|
|
||||||
(! [ -L /home/alice/.profile-1-link ])
|
|
||||||
su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}'
|
|
||||||
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
touch $out/nix-support/hydra-build-products
|
|
||||||
umount /nix
|
|
||||||
''); # */
|
|
||||||
|
|
||||||
/*
|
|
||||||
tests.evalNixpkgs =
|
|
||||||
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
|
|
||||||
inherit nixpkgs;
|
|
||||||
inherit pkgs;
|
|
||||||
nix = build.x86_64-linux;
|
|
||||||
officialRelease = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
tests.evalNixOS =
|
|
||||||
pkgs.runCommand "eval-nixos" { buildInputs = [ build.x86_64-linux ]; }
|
|
||||||
''
|
|
||||||
export NIX_STATE_DIR=$TMPDIR
|
|
||||||
|
|
||||||
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
|
|
||||||
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
|
|
||||||
|
|
||||||
touch $out
|
|
||||||
'';
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
installerScript =
|
|
||||||
pkgs.runCommand "installer-script"
|
|
||||||
{ buildInputs = [ build.${builtins.currentSystem or "x86_64-linux"} ]; }
|
|
||||||
''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
|
|
||||||
substitute ${./scripts/install.in} $out/install \
|
|
||||||
${pkgs.lib.concatMapStrings
|
|
||||||
(system: "--replace '@binaryTarball_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${binaryTarball.${system}}/*.tar.xz) ")
|
|
||||||
systems
|
|
||||||
} \
|
|
||||||
--replace '@nixVersion@' ${version}
|
|
||||||
|
|
||||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
in jobs
|
|
|
@ -37,6 +37,8 @@ readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-
|
||||||
|
|
||||||
readonly NIX_INSTALLED_NIX="@nix@"
|
readonly NIX_INSTALLED_NIX="@nix@"
|
||||||
readonly NIX_INSTALLED_CACERT="@cacert@"
|
readonly NIX_INSTALLED_CACERT="@cacert@"
|
||||||
|
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
||||||
|
#readonly NIX_INSTALLED_CACERT="/nix/store/7dxhzymvy330i28ii676fl1pqwcahv2f-nss-cacert-3.49.2"
|
||||||
readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
|
readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
|
||||||
|
|
||||||
readonly ROOT_HOME=$(echo ~root)
|
readonly ROOT_HOME=$(echo ~root)
|
||||||
|
@ -69,9 +71,11 @@ uninstall_directions() {
|
||||||
subheader "Uninstalling nix:"
|
subheader "Uninstalling nix:"
|
||||||
local step=0
|
local step=0
|
||||||
|
|
||||||
if poly_service_installed_check; then
|
if [ -e /run/systemd/system ] && poly_service_installed_check; then
|
||||||
step=$((step + 1))
|
step=$((step + 1))
|
||||||
poly_service_uninstall_directions "$step"
|
poly_service_uninstall_directions "$step"
|
||||||
|
else
|
||||||
|
step=$((step + 1))
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
||||||
|
@ -250,6 +254,8 @@ function finish_success {
|
||||||
echo "But fetching the nixpkgs channel failed. (Are you offline?)"
|
echo "But fetching the nixpkgs channel failed. (Are you offline?)"
|
||||||
echo "To try again later, run \"sudo -i nix-channel --update nixpkgs\"."
|
echo "To try again later, run \"sudo -i nix-channel --update nixpkgs\"."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -e /run/systemd/system ]; then
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
|
||||||
Before Nix will work in your existing shells, you'll need to close
|
Before Nix will work in your existing shells, you'll need to close
|
||||||
|
@ -264,6 +270,26 @@ hesitate:
|
||||||
|
|
||||||
$(contactme)
|
$(contactme)
|
||||||
EOF
|
EOF
|
||||||
|
else
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
Before Nix will work in your existing shells, you'll need to close
|
||||||
|
them and open them again. Other than that, you should be ready to go.
|
||||||
|
|
||||||
|
Try it! Open a new terminal, and type:
|
||||||
|
|
||||||
|
$ sudo nix-daemon
|
||||||
|
$ nix-shell -p nix-info --run "nix-info -m"
|
||||||
|
|
||||||
|
Additionally, you may want to add nix-daemon to your init-system.
|
||||||
|
|
||||||
|
Thank you for using this installer. If you have any feedback, don't
|
||||||
|
hesitate:
|
||||||
|
|
||||||
|
$(contactme)
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -664,12 +690,8 @@ main() {
|
||||||
# shellcheck source=./install-darwin-multi-user.sh
|
# shellcheck source=./install-darwin-multi-user.sh
|
||||||
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
||||||
elif [ "$(uname -s)" = "Linux" ]; then
|
elif [ "$(uname -s)" = "Linux" ]; then
|
||||||
if [ -e /run/systemd/system ]; then
|
|
||||||
# shellcheck source=./install-systemd-multi-user.sh
|
# shellcheck source=./install-systemd-multi-user.sh
|
||||||
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh"
|
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
|
||||||
else
|
|
||||||
failure "Sorry, the multi-user installation requires systemd on Linux (detected using /run/systemd/system)"
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
failure "Sorry, I don't know what to do on $(uname)"
|
failure "Sorry, I don't know what to do on $(uname)"
|
||||||
fi
|
fi
|
||||||
|
@ -702,7 +724,10 @@ main() {
|
||||||
|
|
||||||
setup_default_profile
|
setup_default_profile
|
||||||
place_nix_configuration
|
place_nix_configuration
|
||||||
|
|
||||||
|
if [ -e /run/systemd/system ]; then
|
||||||
poly_configure_nix_daemon_service
|
poly_configure_nix_daemon_service
|
||||||
|
fi
|
||||||
|
|
||||||
trap finish_success EXIT
|
trap finish_success EXIT
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ fi
|
||||||
# Determine if we could use the multi-user installer or not
|
# Determine if we could use the multi-user installer or not
|
||||||
if [ "$(uname -s)" = "Darwin" ]; then
|
if [ "$(uname -s)" = "Darwin" ]; then
|
||||||
echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2
|
echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2
|
||||||
elif [ "$(uname -s)" = "Linux" ] && [ -e /run/systemd/system ]; then
|
elif [ "$(uname -s)" = "Linux" ]; then
|
||||||
echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2
|
echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ if [ "$(uname -s)" = "Darwin" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$INSTALL_MODE" = "daemon" ]; then
|
if [ "$INSTALL_MODE" = "daemon" ]; then
|
||||||
printf '\e[1;31mSwitching to the Daemon-based Installer\e[0m\n'
|
printf '\e[1;31mSwitching to the Multi-user Installer\e[0m\n'
|
||||||
exec "$self/install-multi-user"
|
exec "$self/install-multi-user"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
@ -207,7 +207,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
break
|
break
|
||||||
|
@ -218,7 +218,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
break
|
break
|
||||||
|
|
28
shell.nix
28
shell.nix
|
@ -1,25 +1,3 @@
|
||||||
{ useClang ? false }:
|
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
||||||
|
src = ./.;
|
||||||
with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-20.03-small.tar.gz) {};
|
}).shellNix
|
||||||
|
|
||||||
with import ./release-common.nix { inherit pkgs; };
|
|
||||||
|
|
||||||
(if useClang then clangStdenv else stdenv).mkDerivation {
|
|
||||||
name = "nix";
|
|
||||||
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
|
|
||||||
|
|
||||||
inherit configureFlags;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
shellHook =
|
|
||||||
''
|
|
||||||
export prefix=$(pwd)/inst
|
|
||||||
configureFlags+=" --prefix=$prefix"
|
|
||||||
PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH
|
|
||||||
PATH=$prefix/bin:$PATH
|
|
||||||
'';
|
|
||||||
}
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ std::string escapeUri(std::string uri)
|
||||||
|
|
||||||
static string currentLoad;
|
static string currentLoad;
|
||||||
|
|
||||||
static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot)
|
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||||
{
|
{
|
||||||
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
|
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ static int _main(int argc, char * * argv)
|
||||||
bool rightType = false;
|
bool rightType = false;
|
||||||
|
|
||||||
Machine * bestMachine = nullptr;
|
Machine * bestMachine = nullptr;
|
||||||
unsigned long long bestLoad = 0;
|
uint64_t bestLoad = 0;
|
||||||
for (auto & m : machines) {
|
for (auto & m : machines) {
|
||||||
debug("considering building on remote machine '%s'", m.storeUri);
|
debug("considering building on remote machine '%s'", m.storeUri);
|
||||||
|
|
||||||
|
@ -130,8 +130,8 @@ static int _main(int argc, char * * argv)
|
||||||
m.mandatoryMet(requiredFeatures)) {
|
m.mandatoryMet(requiredFeatures)) {
|
||||||
rightType = true;
|
rightType = true;
|
||||||
AutoCloseFD free;
|
AutoCloseFD free;
|
||||||
unsigned long long load = 0;
|
uint64_t load = 0;
|
||||||
for (unsigned long long slot = 0; slot < m.maxJobs; ++slot) {
|
for (uint64_t slot = 0; slot < m.maxJobs; ++slot) {
|
||||||
auto slotLock = openSlotLock(m, slot);
|
auto slotLock = openSlotLock(m, slot);
|
||||||
if (lockFile(slotLock.get(), ltWrite, false)) {
|
if (lockFile(slotLock.get(), ltWrite, false)) {
|
||||||
if (!free) {
|
if (!free) {
|
||||||
|
|
|
@ -4,6 +4,8 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
|
#include "registry.hh"
|
||||||
|
#include "flake/flakeref.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
@ -31,6 +33,27 @@ MixEvalArgs::MixEvalArgs()
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "impure",
|
||||||
|
.description = "allow access to mutable paths and repositories",
|
||||||
|
.handler = {[&]() {
|
||||||
|
evalSettings.pureEval = false;
|
||||||
|
}},
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "override-flake",
|
||||||
|
.description = "override a flake registry value",
|
||||||
|
.labels = {"original-ref", "resolved-ref"},
|
||||||
|
.handler = {[&](std::string _from, std::string _to) {
|
||||||
|
auto from = parseFlakeRef(_from, absPath("."));
|
||||||
|
auto to = parseFlakeRef(_to, absPath("."));
|
||||||
|
fetchers::Attrs extraAttrs;
|
||||||
|
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
|
||||||
|
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
|
||||||
|
}}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
|
@ -53,7 +76,7 @@ Path lookupFileArg(EvalState & state, string s)
|
||||||
if (isUri(s)) {
|
if (isUri(s)) {
|
||||||
return state.store->toRealPath(
|
return state.store->toRealPath(
|
||||||
fetchers::downloadTarball(
|
fetchers::downloadTarball(
|
||||||
state.store, resolveUri(s), "source", false).storePath);
|
state.store, resolveUri(s), "source", false).first.storePath);
|
||||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||||
Path p = s.substr(1, s.size() - 2);
|
Path p = s.substr(1, s.size() - 2);
|
||||||
return state.findFile(p);
|
return state.findFile(p);
|
||||||
|
|
616
src/libexpr/eval-cache.cc
Normal file
616
src/libexpr/eval-cache.cc
Normal file
|
@ -0,0 +1,616 @@
|
||||||
|
#include "eval-cache.hh"
|
||||||
|
#include "sqlite.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
#include "eval-inline.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
namespace nix::eval_cache {
|
||||||
|
|
||||||
|
static const char * schema = R"sql(
|
||||||
|
create table if not exists Attributes (
|
||||||
|
parent integer not null,
|
||||||
|
name text,
|
||||||
|
type integer not null,
|
||||||
|
value text,
|
||||||
|
context text,
|
||||||
|
primary key (parent, name)
|
||||||
|
);
|
||||||
|
)sql";
|
||||||
|
|
||||||
|
struct AttrDb
|
||||||
|
{
|
||||||
|
std::atomic_bool failed{false};
|
||||||
|
|
||||||
|
struct State
|
||||||
|
{
|
||||||
|
SQLite db;
|
||||||
|
SQLiteStmt insertAttribute;
|
||||||
|
SQLiteStmt insertAttributeWithContext;
|
||||||
|
SQLiteStmt queryAttribute;
|
||||||
|
SQLiteStmt queryAttributes;
|
||||||
|
std::unique_ptr<SQLiteTxn> txn;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::unique_ptr<Sync<State>> _state;
|
||||||
|
|
||||||
|
AttrDb(const Hash & fingerprint)
|
||||||
|
: _state(std::make_unique<Sync<State>>())
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
Path cacheDir = getCacheDir() + "/nix/eval-cache-v2";
|
||||||
|
createDirs(cacheDir);
|
||||||
|
|
||||||
|
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
|
||||||
|
|
||||||
|
state->db = SQLite(dbPath);
|
||||||
|
state->db.isCache();
|
||||||
|
state->db.exec(schema);
|
||||||
|
|
||||||
|
state->insertAttribute.create(state->db,
|
||||||
|
"insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)");
|
||||||
|
|
||||||
|
state->insertAttributeWithContext.create(state->db,
|
||||||
|
"insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)");
|
||||||
|
|
||||||
|
state->queryAttribute.create(state->db,
|
||||||
|
"select rowid, type, value, context from Attributes where parent = ? and name = ?");
|
||||||
|
|
||||||
|
state->queryAttributes.create(state->db,
|
||||||
|
"select name from Attributes where parent = ?");
|
||||||
|
|
||||||
|
state->txn = std::make_unique<SQLiteTxn>(state->db);
|
||||||
|
}
|
||||||
|
|
||||||
|
~AttrDb()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
auto state(_state->lock());
|
||||||
|
if (!failed)
|
||||||
|
state->txn->commit();
|
||||||
|
state->txn.reset();
|
||||||
|
} catch (...) {
|
||||||
|
ignoreException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename F>
|
||||||
|
AttrId doSQLite(F && fun)
|
||||||
|
{
|
||||||
|
if (failed) return 0;
|
||||||
|
try {
|
||||||
|
return fun();
|
||||||
|
} catch (SQLiteError &) {
|
||||||
|
ignoreException();
|
||||||
|
failed = true;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setAttrs(
|
||||||
|
AttrKey key,
|
||||||
|
const std::vector<Symbol> & attrs)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::FullAttrs)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
AttrId rowId = state->db.getLastInsertedRowId();
|
||||||
|
assert(rowId);
|
||||||
|
|
||||||
|
for (auto & attr : attrs)
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(rowId)
|
||||||
|
(attr)
|
||||||
|
(AttrType::Placeholder)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
return rowId;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setString(
|
||||||
|
AttrKey key,
|
||||||
|
std::string_view s,
|
||||||
|
const char * * context = nullptr)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
if (context) {
|
||||||
|
std::string ctx;
|
||||||
|
for (const char * * p = context; *p; ++p) {
|
||||||
|
if (p != context) ctx.push_back(' ');
|
||||||
|
ctx.append(*p);
|
||||||
|
}
|
||||||
|
state->insertAttributeWithContext.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::String)
|
||||||
|
(s)
|
||||||
|
(ctx).exec();
|
||||||
|
} else {
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::String)
|
||||||
|
(s).exec();
|
||||||
|
}
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setBool(
|
||||||
|
AttrKey key,
|
||||||
|
bool b)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::Bool)
|
||||||
|
(b ? 1 : 0).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setPlaceholder(AttrKey key)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::Placeholder)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setMissing(AttrKey key)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::Missing)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setMisc(AttrKey key)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::Misc)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrId setFailed(AttrKey key)
|
||||||
|
{
|
||||||
|
return doSQLite([&]()
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
state->insertAttribute.use()
|
||||||
|
(key.first)
|
||||||
|
(key.second)
|
||||||
|
(AttrType::Failed)
|
||||||
|
(0, false).exec();
|
||||||
|
|
||||||
|
return state->db.getLastInsertedRowId();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::pair<AttrId, AttrValue>> getAttr(
|
||||||
|
AttrKey key,
|
||||||
|
SymbolTable & symbols)
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
auto queryAttribute(state->queryAttribute.use()(key.first)(key.second));
|
||||||
|
if (!queryAttribute.next()) return {};
|
||||||
|
|
||||||
|
auto rowId = (AttrType) queryAttribute.getInt(0);
|
||||||
|
auto type = (AttrType) queryAttribute.getInt(1);
|
||||||
|
|
||||||
|
switch (type) {
|
||||||
|
case AttrType::Placeholder:
|
||||||
|
return {{rowId, placeholder_t()}};
|
||||||
|
case AttrType::FullAttrs: {
|
||||||
|
// FIXME: expensive, should separate this out.
|
||||||
|
std::vector<Symbol> attrs;
|
||||||
|
auto queryAttributes(state->queryAttributes.use()(rowId));
|
||||||
|
while (queryAttributes.next())
|
||||||
|
attrs.push_back(symbols.create(queryAttributes.getStr(0)));
|
||||||
|
return {{rowId, attrs}};
|
||||||
|
}
|
||||||
|
case AttrType::String: {
|
||||||
|
std::vector<std::pair<Path, std::string>> context;
|
||||||
|
if (!queryAttribute.isNull(3))
|
||||||
|
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||||
|
context.push_back(decodeContext(s));
|
||||||
|
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||||
|
}
|
||||||
|
case AttrType::Bool:
|
||||||
|
return {{rowId, queryAttribute.getInt(2) != 0}};
|
||||||
|
case AttrType::Missing:
|
||||||
|
return {{rowId, missing_t()}};
|
||||||
|
case AttrType::Misc:
|
||||||
|
return {{rowId, misc_t()}};
|
||||||
|
case AttrType::Failed:
|
||||||
|
return {{rowId, failed_t()}};
|
||||||
|
default:
|
||||||
|
throw Error("unexpected type in evaluation cache");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static std::shared_ptr<AttrDb> makeAttrDb(const Hash & fingerprint)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return std::make_shared<AttrDb>(fingerprint);
|
||||||
|
} catch (SQLiteError &) {
|
||||||
|
ignoreException();
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
EvalCache::EvalCache(
|
||||||
|
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||||
|
EvalState & state,
|
||||||
|
RootLoader rootLoader)
|
||||||
|
: db(useCache ? makeAttrDb(*useCache) : nullptr)
|
||||||
|
, state(state)
|
||||||
|
, rootLoader(rootLoader)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
Value * EvalCache::getRootValue()
|
||||||
|
{
|
||||||
|
if (!value) {
|
||||||
|
debug("getting root value");
|
||||||
|
value = allocRootValue(rootLoader());
|
||||||
|
}
|
||||||
|
return *value;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> EvalCache::getRoot()
|
||||||
|
{
|
||||||
|
return std::make_shared<AttrCursor>(ref(shared_from_this()), std::nullopt);
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrCursor::AttrCursor(
|
||||||
|
ref<EvalCache> root,
|
||||||
|
Parent parent,
|
||||||
|
Value * value,
|
||||||
|
std::optional<std::pair<AttrId, AttrValue>> && cachedValue)
|
||||||
|
: root(root), parent(parent), cachedValue(std::move(cachedValue))
|
||||||
|
{
|
||||||
|
if (value)
|
||||||
|
_value = allocRootValue(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
AttrKey AttrCursor::getKey()
|
||||||
|
{
|
||||||
|
if (!parent)
|
||||||
|
return {0, root->state.sEpsilon};
|
||||||
|
if (!parent->first->cachedValue) {
|
||||||
|
parent->first->cachedValue = root->db->getAttr(
|
||||||
|
parent->first->getKey(), root->state.symbols);
|
||||||
|
assert(parent->first->cachedValue);
|
||||||
|
}
|
||||||
|
return {parent->first->cachedValue->first, parent->second};
|
||||||
|
}
|
||||||
|
|
||||||
|
Value & AttrCursor::getValue()
|
||||||
|
{
|
||||||
|
if (!_value) {
|
||||||
|
if (parent) {
|
||||||
|
auto & vParent = parent->first->getValue();
|
||||||
|
root->state.forceAttrs(vParent);
|
||||||
|
auto attr = vParent.attrs->get(parent->second);
|
||||||
|
if (!attr)
|
||||||
|
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||||
|
_value = allocRootValue(attr->value);
|
||||||
|
} else
|
||||||
|
_value = allocRootValue(root->getRootValue());
|
||||||
|
}
|
||||||
|
return **_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Symbol> AttrCursor::getAttrPath() const
|
||||||
|
{
|
||||||
|
if (parent) {
|
||||||
|
auto attrPath = parent->first->getAttrPath();
|
||||||
|
attrPath.push_back(parent->second);
|
||||||
|
return attrPath;
|
||||||
|
} else
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Symbol> AttrCursor::getAttrPath(Symbol name) const
|
||||||
|
{
|
||||||
|
auto attrPath = getAttrPath();
|
||||||
|
attrPath.push_back(name);
|
||||||
|
return attrPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string AttrCursor::getAttrPathStr() const
|
||||||
|
{
|
||||||
|
return concatStringsSep(".", getAttrPath());
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string AttrCursor::getAttrPathStr(Symbol name) const
|
||||||
|
{
|
||||||
|
return concatStringsSep(".", getAttrPath(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
Value & AttrCursor::forceValue()
|
||||||
|
{
|
||||||
|
debug("evaluating uncached attribute %s", getAttrPathStr());
|
||||||
|
|
||||||
|
auto & v = getValue();
|
||||||
|
|
||||||
|
try {
|
||||||
|
root->state.forceValue(v);
|
||||||
|
} catch (EvalError &) {
|
||||||
|
debug("setting '%s' to failed", getAttrPathStr());
|
||||||
|
if (root->db)
|
||||||
|
cachedValue = {root->db->setFailed(getKey()), failed_t()};
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
|
||||||
|
if (v.type == tString)
|
||||||
|
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), v.string.s};
|
||||||
|
else if (v.type == tPath)
|
||||||
|
cachedValue = {root->db->setString(getKey(), v.path), v.path};
|
||||||
|
else if (v.type == tBool)
|
||||||
|
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
||||||
|
else if (v.type == tAttrs)
|
||||||
|
; // FIXME: do something?
|
||||||
|
else
|
||||||
|
cachedValue = {root->db->setMisc(getKey()), misc_t()};
|
||||||
|
}
|
||||||
|
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
||||||
|
|
||||||
|
if (cachedValue) {
|
||||||
|
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
||||||
|
for (auto & attr : *attrs)
|
||||||
|
if (attr == name)
|
||||||
|
return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), name));
|
||||||
|
return nullptr;
|
||||||
|
} else if (std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
auto attr = root->db->getAttr({cachedValue->first, name}, root->state.symbols);
|
||||||
|
if (attr) {
|
||||||
|
if (std::get_if<missing_t>(&attr->second))
|
||||||
|
return nullptr;
|
||||||
|
else if (std::get_if<failed_t>(&attr->second))
|
||||||
|
throw EvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
||||||
|
else
|
||||||
|
return std::make_shared<AttrCursor>(root,
|
||||||
|
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
|
||||||
|
}
|
||||||
|
// Incomplete attrset, so need to fall thru and
|
||||||
|
// evaluate to see whether 'name' exists
|
||||||
|
} else
|
||||||
|
return nullptr;
|
||||||
|
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type != tAttrs)
|
||||||
|
return nullptr;
|
||||||
|
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||||
|
|
||||||
|
auto attr = v.attrs->get(name);
|
||||||
|
|
||||||
|
if (!attr) {
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = {root->db->setPlaceholder(getKey()), placeholder_t()};
|
||||||
|
root->db->setMissing({cachedValue->first, name});
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::pair<AttrId, AttrValue>> cachedValue2;
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = {root->db->setPlaceholder(getKey()), placeholder_t()};
|
||||||
|
cachedValue2 = {root->db->setPlaceholder({cachedValue->first, name}), placeholder_t()};
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::make_shared<AttrCursor>(
|
||||||
|
root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
|
||||||
|
{
|
||||||
|
return maybeGetAttr(root->state.symbols.create(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name)
|
||||||
|
{
|
||||||
|
auto p = maybeGetAttr(name);
|
||||||
|
if (!p)
|
||||||
|
throw Error("attribute '%s' does not exist", getAttrPathStr(name));
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name)
|
||||||
|
{
|
||||||
|
return getAttr(root->state.symbols.create(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath)
|
||||||
|
{
|
||||||
|
auto res = shared_from_this();
|
||||||
|
for (auto & attr : attrPath) {
|
||||||
|
res = res->maybeGetAttr(attr);
|
||||||
|
if (!res) return {};
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string AttrCursor::getString()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
|
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||||
|
return s->first;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not a string", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type != tString && v.type != tPath)
|
||||||
|
throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type));
|
||||||
|
|
||||||
|
return v.type == tString ? v.string.s : v.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
string_t AttrCursor::getStringWithContext()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||||
|
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||||
|
return *s;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not a string", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type == tString)
|
||||||
|
return {v.string.s, v.getContext()};
|
||||||
|
else if (v.type == tPath)
|
||||||
|
return {v.path, {}};
|
||||||
|
else
|
||||||
|
throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AttrCursor::getBool()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto b = std::get_if<bool>(&cachedValue->second)) {
|
||||||
|
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||||
|
return *b;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not a Boolean", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type != tBool)
|
||||||
|
throw TypeError("'%s' is not a Boolean", getAttrPathStr());
|
||||||
|
|
||||||
|
return v.boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<Symbol> AttrCursor::getAttrs()
|
||||||
|
{
|
||||||
|
if (root->db) {
|
||||||
|
if (!cachedValue)
|
||||||
|
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
|
||||||
|
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
|
||||||
|
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
|
||||||
|
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||||
|
return *attrs;
|
||||||
|
} else
|
||||||
|
throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto & v = forceValue();
|
||||||
|
|
||||||
|
if (v.type != tAttrs)
|
||||||
|
throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||||
|
|
||||||
|
std::vector<Symbol> attrs;
|
||||||
|
for (auto & attr : *getValue().attrs)
|
||||||
|
attrs.push_back(attr.name);
|
||||||
|
std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) {
|
||||||
|
return (const string &) a < (const string &) b;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (root->db)
|
||||||
|
cachedValue = {root->db->setAttrs(getKey(), attrs), attrs};
|
||||||
|
|
||||||
|
return attrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AttrCursor::isDerivation()
|
||||||
|
{
|
||||||
|
auto aType = maybeGetAttr("type");
|
||||||
|
return aType && aType->getString() == "derivation";
|
||||||
|
}
|
||||||
|
|
||||||
|
StorePath AttrCursor::forceDerivation()
|
||||||
|
{
|
||||||
|
auto aDrvPath = getAttr(root->state.sDrvPath);
|
||||||
|
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
|
||||||
|
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
|
||||||
|
/* The eval cache contains 'drvPath', but the actual path has
|
||||||
|
been garbage-collected. So force it to be regenerated. */
|
||||||
|
aDrvPath->forceValue();
|
||||||
|
if (!root->state.store->isValidPath(drvPath))
|
||||||
|
throw Error("don't know how to recreate store derivation '%s'!",
|
||||||
|
root->state.store->printStorePath(drvPath));
|
||||||
|
}
|
||||||
|
return drvPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
121
src/libexpr/eval-cache.hh
Normal file
121
src/libexpr/eval-cache.hh
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "sync.hh"
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
|
||||||
|
#include <functional>
|
||||||
|
#include <variant>
|
||||||
|
|
||||||
|
namespace nix::eval_cache {
|
||||||
|
|
||||||
|
class AttrDb;
|
||||||
|
class AttrCursor;
|
||||||
|
|
||||||
|
class EvalCache : public std::enable_shared_from_this<EvalCache>
|
||||||
|
{
|
||||||
|
friend class AttrCursor;
|
||||||
|
|
||||||
|
std::shared_ptr<AttrDb> db;
|
||||||
|
EvalState & state;
|
||||||
|
typedef std::function<Value *()> RootLoader;
|
||||||
|
RootLoader rootLoader;
|
||||||
|
RootValue value;
|
||||||
|
|
||||||
|
Value * getRootValue();
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
EvalCache(
|
||||||
|
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||||
|
EvalState & state,
|
||||||
|
RootLoader rootLoader);
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> getRoot();
|
||||||
|
};
|
||||||
|
|
||||||
|
enum AttrType {
|
||||||
|
Placeholder = 0,
|
||||||
|
FullAttrs = 1,
|
||||||
|
String = 2,
|
||||||
|
Missing = 3,
|
||||||
|
Misc = 4,
|
||||||
|
Failed = 5,
|
||||||
|
Bool = 6,
|
||||||
|
};
|
||||||
|
|
||||||
|
struct placeholder_t {};
|
||||||
|
struct missing_t {};
|
||||||
|
struct misc_t {};
|
||||||
|
struct failed_t {};
|
||||||
|
typedef uint64_t AttrId;
|
||||||
|
typedef std::pair<AttrId, Symbol> AttrKey;
|
||||||
|
typedef std::pair<std::string, std::vector<std::pair<Path, std::string>>> string_t;
|
||||||
|
|
||||||
|
typedef std::variant<
|
||||||
|
std::vector<Symbol>,
|
||||||
|
string_t,
|
||||||
|
placeholder_t,
|
||||||
|
missing_t,
|
||||||
|
misc_t,
|
||||||
|
failed_t,
|
||||||
|
bool
|
||||||
|
> AttrValue;
|
||||||
|
|
||||||
|
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
|
||||||
|
{
|
||||||
|
friend class EvalCache;
|
||||||
|
|
||||||
|
ref<EvalCache> root;
|
||||||
|
typedef std::optional<std::pair<std::shared_ptr<AttrCursor>, Symbol>> Parent;
|
||||||
|
Parent parent;
|
||||||
|
RootValue _value;
|
||||||
|
std::optional<std::pair<AttrId, AttrValue>> cachedValue;
|
||||||
|
|
||||||
|
AttrKey getKey();
|
||||||
|
|
||||||
|
Value & getValue();
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
AttrCursor(
|
||||||
|
ref<EvalCache> root,
|
||||||
|
Parent parent,
|
||||||
|
Value * value = nullptr,
|
||||||
|
std::optional<std::pair<AttrId, AttrValue>> && cachedValue = {});
|
||||||
|
|
||||||
|
std::vector<Symbol> getAttrPath() const;
|
||||||
|
|
||||||
|
std::vector<Symbol> getAttrPath(Symbol name) const;
|
||||||
|
|
||||||
|
std::string getAttrPathStr() const;
|
||||||
|
|
||||||
|
std::string getAttrPathStr(Symbol name) const;
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name);
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> getAttr(Symbol name);
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
|
||||||
|
|
||||||
|
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath);
|
||||||
|
|
||||||
|
std::string getString();
|
||||||
|
|
||||||
|
string_t getStringWithContext();
|
||||||
|
|
||||||
|
bool getBool();
|
||||||
|
|
||||||
|
std::vector<Symbol> getAttrs();
|
||||||
|
|
||||||
|
bool isDerivation();
|
||||||
|
|
||||||
|
Value & forceValue();
|
||||||
|
|
||||||
|
/* Force creation of the .drv file in the Nix store. */
|
||||||
|
StorePath forceDerivation();
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -199,6 +199,18 @@ string showType(const Value & v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool Value::isTrivial() const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
type != tApp
|
||||||
|
&& type != tPrimOpApp
|
||||||
|
&& (type != tThunk
|
||||||
|
|| (dynamic_cast<ExprAttrs *>(thunk.expr)
|
||||||
|
&& ((ExprAttrs *) thunk.expr)->dynamicAttrs.empty())
|
||||||
|
|| dynamic_cast<ExprLambda *>(thunk.expr));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
/* Called when the Boehm GC runs out of memory. */
|
/* Called when the Boehm GC runs out of memory. */
|
||||||
static void * oomHandler(size_t requested)
|
static void * oomHandler(size_t requested)
|
||||||
|
@ -337,6 +349,9 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
|
||||||
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
|
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
|
||||||
, sOutputHashMode(symbols.create("outputHashMode"))
|
, sOutputHashMode(symbols.create("outputHashMode"))
|
||||||
, sRecurseForDerivations(symbols.create("recurseForDerivations"))
|
, sRecurseForDerivations(symbols.create("recurseForDerivations"))
|
||||||
|
, sDescription(symbols.create("description"))
|
||||||
|
, sSelf(symbols.create("self"))
|
||||||
|
, sEpsilon(symbols.create(""))
|
||||||
, repair(NoRepair)
|
, repair(NoRepair)
|
||||||
, store(store)
|
, store(store)
|
||||||
, baseEnv(allocEnv(128))
|
, baseEnv(allocEnv(128))
|
||||||
|
@ -782,7 +797,7 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void EvalState::evalFile(const Path & path_, Value & v)
|
void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
|
||||||
{
|
{
|
||||||
auto path = checkSourcePath(path_);
|
auto path = checkSourcePath(path_);
|
||||||
|
|
||||||
|
@ -811,6 +826,11 @@ void EvalState::evalFile(const Path & path_, Value & v)
|
||||||
fileParseCache[path2] = e;
|
fileParseCache[path2] = e;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Enforce that 'flake.nix' is a direct attrset, not a
|
||||||
|
// computation.
|
||||||
|
if (mustBeTrivial &&
|
||||||
|
!(dynamic_cast<ExprAttrs *>(e)))
|
||||||
|
throw Error("file '%s' must be an attribute set", path);
|
||||||
eval(e, v);
|
eval(e, v);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, "while evaluating the file '%1%':", path2);
|
addErrorTrace(e, "while evaluating the file '%1%':", path2);
|
||||||
|
@ -1586,6 +1606,18 @@ string EvalState::forceString(Value & v, const Pos & pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||||
|
name>. */
|
||||||
|
std::pair<string, string> decodeContext(std::string_view s)
|
||||||
|
{
|
||||||
|
if (s.at(0) == '!') {
|
||||||
|
size_t index = s.find("!", 1);
|
||||||
|
return {std::string(s.substr(index + 1)), std::string(s.substr(1, index - 1))};
|
||||||
|
} else
|
||||||
|
return {s.at(0) == '/' ? std::string(s) : std::string(s.substr(1)), ""};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void copyContext(const Value & v, PathSet & context)
|
void copyContext(const Value & v, PathSet & context)
|
||||||
{
|
{
|
||||||
if (v.string.context)
|
if (v.string.context)
|
||||||
|
@ -1594,6 +1626,17 @@ void copyContext(const Value & v, PathSet & context)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::vector<std::pair<Path, std::string>> Value::getContext()
|
||||||
|
{
|
||||||
|
std::vector<std::pair<Path, std::string>> res;
|
||||||
|
assert(type == tString);
|
||||||
|
if (string.context)
|
||||||
|
for (const char * * p = string.context; *p; ++p)
|
||||||
|
res.push_back(decodeContext(*p));
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
string EvalState::forceString(Value & v, PathSet & context, const Pos & pos)
|
string EvalState::forceString(Value & v, PathSet & context, const Pos & pos)
|
||||||
{
|
{
|
||||||
string s = forceString(v, pos);
|
string s = forceString(v, pos);
|
||||||
|
|
|
@ -4,13 +4,13 @@
|
||||||
#include "value.hh"
|
#include "value.hh"
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
#include "symbol-table.hh"
|
#include "symbol-table.hh"
|
||||||
#include "hash.hh"
|
|
||||||
#include "config.hh"
|
#include "config.hh"
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <unordered_map>
|
#include <unordered_map>
|
||||||
|
#include <mutex>
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
@ -75,7 +75,8 @@ public:
|
||||||
sFile, sLine, sColumn, sFunctor, sToString,
|
sFile, sLine, sColumn, sFunctor, sToString,
|
||||||
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
|
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
|
||||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||||
sRecurseForDerivations;
|
sRecurseForDerivations,
|
||||||
|
sDescription, sSelf, sEpsilon;
|
||||||
Symbol sDerivationNix;
|
Symbol sDerivationNix;
|
||||||
|
|
||||||
/* If set, force copying files to the Nix store even if they
|
/* If set, force copying files to the Nix store even if they
|
||||||
|
@ -90,6 +91,7 @@ public:
|
||||||
|
|
||||||
const ref<Store> store;
|
const ref<Store> store;
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
SrcToStore srcToStore;
|
SrcToStore srcToStore;
|
||||||
|
|
||||||
|
@ -152,8 +154,9 @@ public:
|
||||||
Expr * parseStdin();
|
Expr * parseStdin();
|
||||||
|
|
||||||
/* Evaluate an expression read from the given file to normal
|
/* Evaluate an expression read from the given file to normal
|
||||||
form. */
|
form. Optionally enforce that the top-level expression is
|
||||||
void evalFile(const Path & path, Value & v);
|
trivial (i.e. doesn't require arbitrary computation). */
|
||||||
|
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
|
||||||
|
|
||||||
void resetFileCache();
|
void resetFileCache();
|
||||||
|
|
||||||
|
@ -330,7 +333,7 @@ string showType(const Value & v);
|
||||||
|
|
||||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||||
name>. */
|
name>. */
|
||||||
std::pair<string, string> decodeContext(const string & s);
|
std::pair<string, string> decodeContext(std::string_view s);
|
||||||
|
|
||||||
/* If `path' refers to a directory, then append "/default.nix". */
|
/* If `path' refers to a directory, then append "/default.nix". */
|
||||||
Path resolveExprPath(Path path);
|
Path resolveExprPath(Path path);
|
||||||
|
|
56
src/libexpr/flake/call-flake.nix
Normal file
56
src/libexpr/flake/call-flake.nix
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
lockFileStr: rootSrc: rootSubdir:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
lockFile = builtins.fromJSON lockFileStr;
|
||||||
|
|
||||||
|
allNodes =
|
||||||
|
builtins.mapAttrs
|
||||||
|
(key: node:
|
||||||
|
let
|
||||||
|
|
||||||
|
sourceInfo =
|
||||||
|
if key == lockFile.root
|
||||||
|
then rootSrc
|
||||||
|
else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||||
|
|
||||||
|
subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
|
||||||
|
|
||||||
|
flake = import (sourceInfo + (if subdir != "" then "/" else "") + subdir + "/flake.nix");
|
||||||
|
|
||||||
|
inputs = builtins.mapAttrs
|
||||||
|
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
||||||
|
(node.inputs or {});
|
||||||
|
|
||||||
|
# Resolve a input spec into a node name. An input spec is
|
||||||
|
# either a node name, or a 'follows' path from the root
|
||||||
|
# node.
|
||||||
|
resolveInput = inputSpec:
|
||||||
|
if builtins.isList inputSpec
|
||||||
|
then getInputByPath lockFile.root inputSpec
|
||||||
|
else inputSpec;
|
||||||
|
|
||||||
|
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||||
|
# root node, returning the final node.
|
||||||
|
getInputByPath = nodeName: path:
|
||||||
|
if path == []
|
||||||
|
then nodeName
|
||||||
|
else
|
||||||
|
getInputByPath
|
||||||
|
# Since this could be a 'follows' input, call resolveInput.
|
||||||
|
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||||
|
(builtins.tail path);
|
||||||
|
|
||||||
|
outputs = flake.outputs (inputs // { self = result; });
|
||||||
|
|
||||||
|
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
|
||||||
|
in
|
||||||
|
if node.flake or true then
|
||||||
|
assert builtins.isFunction flake.outputs;
|
||||||
|
result
|
||||||
|
else
|
||||||
|
sourceInfo
|
||||||
|
)
|
||||||
|
lockFile.nodes;
|
||||||
|
|
||||||
|
in allNodes.${lockFile.root}
|
609
src/libexpr/flake/flake.cc
Normal file
609
src/libexpr/flake/flake.cc
Normal file
|
@ -0,0 +1,609 @@
|
||||||
|
#include "flake.hh"
|
||||||
|
#include "lockfile.hh"
|
||||||
|
#include "primops.hh"
|
||||||
|
#include "eval-inline.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "finally.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
using namespace flake;
|
||||||
|
|
||||||
|
namespace flake {
|
||||||
|
|
||||||
|
typedef std::pair<Tree, FlakeRef> FetchedFlake;
|
||||||
|
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
|
||||||
|
|
||||||
|
static std::optional<FetchedFlake> lookupInFlakeCache(
|
||||||
|
const FlakeCache & flakeCache,
|
||||||
|
const FlakeRef & flakeRef)
|
||||||
|
{
|
||||||
|
// FIXME: inefficient.
|
||||||
|
for (auto & i : flakeCache) {
|
||||||
|
if (flakeRef == i.first) {
|
||||||
|
debug("mapping '%s' to previously seen input '%s' -> '%s",
|
||||||
|
flakeRef, i.first, i.second.second);
|
||||||
|
return i.second;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
||||||
|
EvalState & state,
|
||||||
|
const FlakeRef & originalRef,
|
||||||
|
bool allowLookup,
|
||||||
|
FlakeCache & flakeCache)
|
||||||
|
{
|
||||||
|
auto fetched = lookupInFlakeCache(flakeCache, originalRef);
|
||||||
|
FlakeRef resolvedRef = originalRef;
|
||||||
|
|
||||||
|
if (!fetched) {
|
||||||
|
if (originalRef.input.isDirect()) {
|
||||||
|
fetched.emplace(originalRef.fetchTree(state.store));
|
||||||
|
} else {
|
||||||
|
if (allowLookup) {
|
||||||
|
resolvedRef = originalRef.resolve(state.store);
|
||||||
|
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
|
||||||
|
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
|
||||||
|
flakeCache.push_back({resolvedRef, fetchedResolved.value()});
|
||||||
|
fetched.emplace(fetchedResolved.value());
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flakeCache.push_back({originalRef, fetched.value()});
|
||||||
|
}
|
||||||
|
|
||||||
|
auto [tree, lockedRef] = fetched.value();
|
||||||
|
|
||||||
|
debug("got tree '%s' from '%s'",
|
||||||
|
state.store->printStorePath(tree.storePath), lockedRef);
|
||||||
|
|
||||||
|
if (state.allowedPaths)
|
||||||
|
state.allowedPaths->insert(tree.actualPath);
|
||||||
|
|
||||||
|
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
|
||||||
|
|
||||||
|
return {std::move(tree), resolvedRef, lockedRef};
|
||||||
|
}
|
||||||
|
|
||||||
|
static void expectType(EvalState & state, ValueType type,
|
||||||
|
Value & value, const Pos & pos)
|
||||||
|
{
|
||||||
|
if (value.type == tThunk && value.isTrivial())
|
||||||
|
state.forceValue(value, pos);
|
||||||
|
if (value.type != type)
|
||||||
|
throw Error("expected %s but got %s at %s",
|
||||||
|
showType(type), showType(value.type), pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||||
|
EvalState & state, Value * value, const Pos & pos);
|
||||||
|
|
||||||
|
static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
|
const std::string & inputName, Value * value, const Pos & pos)
|
||||||
|
{
|
||||||
|
expectType(state, tAttrs, *value, pos);
|
||||||
|
|
||||||
|
FlakeInput input;
|
||||||
|
|
||||||
|
auto sInputs = state.symbols.create("inputs");
|
||||||
|
auto sUrl = state.symbols.create("url");
|
||||||
|
auto sFlake = state.symbols.create("flake");
|
||||||
|
auto sFollows = state.symbols.create("follows");
|
||||||
|
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
std::optional<std::string> url;
|
||||||
|
|
||||||
|
for (nix::Attr attr : *(value->attrs)) {
|
||||||
|
try {
|
||||||
|
if (attr.name == sUrl) {
|
||||||
|
expectType(state, tString, *attr.value, *attr.pos);
|
||||||
|
url = attr.value->string.s;
|
||||||
|
attrs.emplace("url", *url);
|
||||||
|
} else if (attr.name == sFlake) {
|
||||||
|
expectType(state, tBool, *attr.value, *attr.pos);
|
||||||
|
input.isFlake = attr.value->boolean;
|
||||||
|
} else if (attr.name == sInputs) {
|
||||||
|
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
|
||||||
|
} else if (attr.name == sFollows) {
|
||||||
|
expectType(state, tString, *attr.value, *attr.pos);
|
||||||
|
input.follows = parseInputPath(attr.value->string.s);
|
||||||
|
} else {
|
||||||
|
state.forceValue(*attr.value);
|
||||||
|
if (attr.value->type == tString)
|
||||||
|
attrs.emplace(attr.name, attr.value->string.s);
|
||||||
|
else
|
||||||
|
throw TypeError("flake input attribute '%s' is %s while a string is expected",
|
||||||
|
attr.name, showType(*attr.value));
|
||||||
|
}
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name));
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attrs.count("type"))
|
||||||
|
try {
|
||||||
|
input.ref = FlakeRef::fromAttrs(attrs);
|
||||||
|
} catch (Error & e) {
|
||||||
|
e.addTrace(pos, hintfmt("in flake input"));
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
attrs.erase("url");
|
||||||
|
if (!attrs.empty())
|
||||||
|
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
|
||||||
|
if (url)
|
||||||
|
input.ref = parseFlakeRef(*url, {}, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input.follows && !input.ref)
|
||||||
|
input.ref = FlakeRef::fromAttrs({{"type", "indirect"}, {"id", inputName}});
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||||
|
EvalState & state, Value * value, const Pos & pos)
|
||||||
|
{
|
||||||
|
std::map<FlakeId, FlakeInput> inputs;
|
||||||
|
|
||||||
|
expectType(state, tAttrs, *value, pos);
|
||||||
|
|
||||||
|
for (nix::Attr & inputAttr : *(*value).attrs) {
|
||||||
|
inputs.emplace(inputAttr.name,
|
||||||
|
parseFlakeInput(state,
|
||||||
|
inputAttr.name,
|
||||||
|
inputAttr.value,
|
||||||
|
*inputAttr.pos));
|
||||||
|
}
|
||||||
|
|
||||||
|
return inputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Flake getFlake(
|
||||||
|
EvalState & state,
|
||||||
|
const FlakeRef & originalRef,
|
||||||
|
bool allowLookup,
|
||||||
|
FlakeCache & flakeCache)
|
||||||
|
{
|
||||||
|
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||||
|
state, originalRef, allowLookup, flakeCache);
|
||||||
|
|
||||||
|
// Guard against symlink attacks.
|
||||||
|
auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
|
||||||
|
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||||
|
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||||
|
lockedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||||
|
|
||||||
|
Flake flake {
|
||||||
|
.originalRef = originalRef,
|
||||||
|
.resolvedRef = resolvedRef,
|
||||||
|
.lockedRef = lockedRef,
|
||||||
|
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!pathExists(flakeFile))
|
||||||
|
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
|
||||||
|
|
||||||
|
Value vInfo;
|
||||||
|
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
||||||
|
|
||||||
|
expectType(state, tAttrs, vInfo, Pos(foFile, state.symbols.create(flakeFile), 0, 0));
|
||||||
|
|
||||||
|
auto sEdition = state.symbols.create("edition"); // FIXME: remove soon
|
||||||
|
|
||||||
|
if (vInfo.attrs->get(sEdition))
|
||||||
|
warn("flake '%s' has deprecated attribute 'edition'", lockedRef);
|
||||||
|
|
||||||
|
if (auto description = vInfo.attrs->get(state.sDescription)) {
|
||||||
|
expectType(state, tString, *description->value, *description->pos);
|
||||||
|
flake.description = description->value->string.s;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto sInputs = state.symbols.create("inputs");
|
||||||
|
|
||||||
|
if (auto inputs = vInfo.attrs->get(sInputs))
|
||||||
|
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
|
||||||
|
|
||||||
|
auto sOutputs = state.symbols.create("outputs");
|
||||||
|
|
||||||
|
if (auto outputs = vInfo.attrs->get(sOutputs)) {
|
||||||
|
expectType(state, tLambda, *outputs->value, *outputs->pos);
|
||||||
|
flake.vOutputs = allocRootValue(outputs->value);
|
||||||
|
|
||||||
|
if ((*flake.vOutputs)->lambda.fun->matchAttrs) {
|
||||||
|
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) {
|
||||||
|
if (formal.name != state.sSelf)
|
||||||
|
flake.inputs.emplace(formal.name, FlakeInput {
|
||||||
|
.ref = parseFlakeRef(formal.name)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
} else
|
||||||
|
throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
|
||||||
|
|
||||||
|
for (auto & attr : *vInfo.attrs) {
|
||||||
|
if (attr.name != sEdition &&
|
||||||
|
attr.name != state.sDescription &&
|
||||||
|
attr.name != sInputs &&
|
||||||
|
attr.name != sOutputs)
|
||||||
|
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
|
||||||
|
lockedRef, attr.name, *attr.pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
return flake;
|
||||||
|
}
|
||||||
|
|
||||||
|
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
||||||
|
{
|
||||||
|
FlakeCache flakeCache;
|
||||||
|
return getFlake(state, originalRef, allowLookup, flakeCache);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Compute an in-memory lock file for the specified top-level flake,
|
||||||
|
and optionally write it to file, it the flake is writable. */
|
||||||
|
LockedFlake lockFlake(
|
||||||
|
EvalState & state,
|
||||||
|
const FlakeRef & topRef,
|
||||||
|
const LockFlags & lockFlags)
|
||||||
|
{
|
||||||
|
settings.requireExperimentalFeature("flakes");
|
||||||
|
|
||||||
|
FlakeCache flakeCache;
|
||||||
|
|
||||||
|
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
|
||||||
|
|
||||||
|
// FIXME: symlink attack
|
||||||
|
auto oldLockFile = LockFile::read(
|
||||||
|
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
|
||||||
|
|
||||||
|
debug("old lock file: %s", oldLockFile);
|
||||||
|
|
||||||
|
// FIXME: check whether all overrides are used.
|
||||||
|
std::map<InputPath, FlakeInput> overrides;
|
||||||
|
std::set<InputPath> overridesUsed, updatesUsed;
|
||||||
|
|
||||||
|
for (auto & i : lockFlags.inputOverrides)
|
||||||
|
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
|
||||||
|
|
||||||
|
LockFile newLockFile;
|
||||||
|
|
||||||
|
std::vector<FlakeRef> parents;
|
||||||
|
|
||||||
|
std::function<void(
|
||||||
|
const FlakeInputs & flakeInputs,
|
||||||
|
std::shared_ptr<Node> node,
|
||||||
|
const InputPath & inputPathPrefix,
|
||||||
|
std::shared_ptr<const Node> oldNode)>
|
||||||
|
computeLocks;
|
||||||
|
|
||||||
|
computeLocks = [&](
|
||||||
|
const FlakeInputs & flakeInputs,
|
||||||
|
std::shared_ptr<Node> node,
|
||||||
|
const InputPath & inputPathPrefix,
|
||||||
|
std::shared_ptr<const Node> oldNode)
|
||||||
|
{
|
||||||
|
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||||
|
|
||||||
|
/* Get the overrides (i.e. attributes of the form
|
||||||
|
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||||
|
// FIXME: check this
|
||||||
|
for (auto & [id, input] : flake.inputs) {
|
||||||
|
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||||
|
auto inputPath(inputPathPrefix);
|
||||||
|
inputPath.push_back(id);
|
||||||
|
inputPath.push_back(idOverride);
|
||||||
|
overrides.insert_or_assign(inputPath, inputOverride);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Go over the flake inputs, resolve/fetch them if
|
||||||
|
necessary (i.e. if they're new or the flakeref changed
|
||||||
|
from what's in the lock file). */
|
||||||
|
for (auto & [id, input2] : flakeInputs) {
|
||||||
|
auto inputPath(inputPathPrefix);
|
||||||
|
inputPath.push_back(id);
|
||||||
|
auto inputPathS = printInputPath(inputPath);
|
||||||
|
debug("computing input '%s'", inputPathS);
|
||||||
|
|
||||||
|
/* Do we have an override for this input from one of the
|
||||||
|
ancestors? */
|
||||||
|
auto i = overrides.find(inputPath);
|
||||||
|
bool hasOverride = i != overrides.end();
|
||||||
|
if (hasOverride) overridesUsed.insert(inputPath);
|
||||||
|
auto & input = hasOverride ? i->second : input2;
|
||||||
|
|
||||||
|
/* Resolve 'follows' later (since it may refer to an input
|
||||||
|
path we haven't processed yet. */
|
||||||
|
if (input.follows) {
|
||||||
|
InputPath target;
|
||||||
|
if (hasOverride || input.absolute)
|
||||||
|
/* 'follows' from an override is relative to the
|
||||||
|
root of the graph. */
|
||||||
|
target = *input.follows;
|
||||||
|
else {
|
||||||
|
/* Otherwise, it's relative to the current flake. */
|
||||||
|
target = inputPathPrefix;
|
||||||
|
for (auto & i : *input.follows) target.push_back(i);
|
||||||
|
}
|
||||||
|
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||||
|
node->inputs.insert_or_assign(id, target);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(input.ref);
|
||||||
|
|
||||||
|
/* Do we have an entry in the existing lock file? And we
|
||||||
|
don't have a --update-input flag for this input? */
|
||||||
|
std::shared_ptr<LockedNode> oldLock;
|
||||||
|
|
||||||
|
updatesUsed.insert(inputPath);
|
||||||
|
|
||||||
|
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
|
||||||
|
if (auto oldLock2 = get(oldNode->inputs, id))
|
||||||
|
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||||
|
oldLock = *oldLock3;
|
||||||
|
|
||||||
|
if (oldLock
|
||||||
|
&& oldLock->originalRef == *input.ref
|
||||||
|
&& !hasOverride)
|
||||||
|
{
|
||||||
|
debug("keeping existing input '%s'", inputPathS);
|
||||||
|
|
||||||
|
/* Copy the input from the old lock since its flakeref
|
||||||
|
didn't change and there is no override from a
|
||||||
|
higher level flake. */
|
||||||
|
auto childNode = std::make_shared<LockedNode>(
|
||||||
|
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
|
||||||
|
|
||||||
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
|
||||||
|
/* If we have an --update-input flag for an input
|
||||||
|
of this input, then we must fetch the flake to
|
||||||
|
to update it. */
|
||||||
|
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||||
|
|
||||||
|
auto hasChildUpdate =
|
||||||
|
lb != lockFlags.inputUpdates.end()
|
||||||
|
&& lb->size() > inputPath.size()
|
||||||
|
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||||
|
|
||||||
|
if (hasChildUpdate) {
|
||||||
|
auto inputFlake = getFlake(
|
||||||
|
state, oldLock->lockedRef, false, flakeCache);
|
||||||
|
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
|
||||||
|
} else {
|
||||||
|
/* No need to fetch this flake, we can be
|
||||||
|
lazy. However there may be new overrides on the
|
||||||
|
inputs of this flake, so we need to check
|
||||||
|
those. */
|
||||||
|
FlakeInputs fakeInputs;
|
||||||
|
|
||||||
|
for (auto & i : oldLock->inputs) {
|
||||||
|
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||||
|
fakeInputs.emplace(i.first, FlakeInput {
|
||||||
|
.ref = (*lockedNode)->originalRef,
|
||||||
|
.isFlake = (*lockedNode)->isFlake,
|
||||||
|
});
|
||||||
|
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||||
|
fakeInputs.emplace(i.first, FlakeInput {
|
||||||
|
.follows = *follows,
|
||||||
|
.absolute = true
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
computeLocks(fakeInputs, childNode, inputPath, oldLock);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
/* We need to create a new lock file entry. So fetch
|
||||||
|
this input. */
|
||||||
|
debug("creating new input '%s'", inputPathS);
|
||||||
|
|
||||||
|
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
|
||||||
|
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||||
|
|
||||||
|
if (input.isFlake) {
|
||||||
|
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||||
|
|
||||||
|
/* Note: in case of an --override-input, we use
|
||||||
|
the *original* ref (input2.ref) for the
|
||||||
|
"original" field, rather than the
|
||||||
|
override. This ensures that the override isn't
|
||||||
|
nuked the next time we update the lock
|
||||||
|
file. That is, overrides are sticky unless you
|
||||||
|
use --no-write-lock-file. */
|
||||||
|
auto childNode = std::make_shared<LockedNode>(
|
||||||
|
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
|
||||||
|
|
||||||
|
node->inputs.insert_or_assign(id, childNode);
|
||||||
|
|
||||||
|
/* Guard against circular flake imports. */
|
||||||
|
for (auto & parent : parents)
|
||||||
|
if (parent == *input.ref)
|
||||||
|
throw Error("found circular import of flake '%s'", parent);
|
||||||
|
parents.push_back(*input.ref);
|
||||||
|
Finally cleanup([&]() { parents.pop_back(); });
|
||||||
|
|
||||||
|
/* Recursively process the inputs of this
|
||||||
|
flake. Also, unless we already have this flake
|
||||||
|
in the top-level lock file, use this flake's
|
||||||
|
own lock file. */
|
||||||
|
computeLocks(
|
||||||
|
inputFlake.inputs, childNode, inputPath,
|
||||||
|
oldLock
|
||||||
|
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||||
|
: LockFile::read(
|
||||||
|
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||||
|
state, *input.ref, lockFlags.useRegistries, flakeCache);
|
||||||
|
node->inputs.insert_or_assign(id,
|
||||||
|
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
computeLocks(
|
||||||
|
flake.inputs, newLockFile.root, {},
|
||||||
|
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
|
||||||
|
|
||||||
|
for (auto & i : lockFlags.inputOverrides)
|
||||||
|
if (!overridesUsed.count(i.first))
|
||||||
|
warn("the flag '--override-input %s %s' does not match any input",
|
||||||
|
printInputPath(i.first), i.second);
|
||||||
|
|
||||||
|
for (auto & i : lockFlags.inputUpdates)
|
||||||
|
if (!updatesUsed.count(i))
|
||||||
|
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
|
||||||
|
|
||||||
|
/* Check 'follows' inputs. */
|
||||||
|
newLockFile.check();
|
||||||
|
|
||||||
|
debug("new lock file: %s", newLockFile);
|
||||||
|
|
||||||
|
/* Check whether we need to / can write the new lock file. */
|
||||||
|
if (!(newLockFile == oldLockFile)) {
|
||||||
|
|
||||||
|
auto diff = LockFile::diff(oldLockFile, newLockFile);
|
||||||
|
|
||||||
|
if (lockFlags.writeLockFile) {
|
||||||
|
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||||
|
if (!newLockFile.isImmutable()) {
|
||||||
|
if (settings.warnDirty)
|
||||||
|
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||||
|
} else {
|
||||||
|
if (!lockFlags.updateLockFile)
|
||||||
|
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
|
||||||
|
|
||||||
|
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
|
||||||
|
|
||||||
|
auto path = *sourcePath + "/" + relPath;
|
||||||
|
|
||||||
|
bool lockFileExists = pathExists(path);
|
||||||
|
|
||||||
|
if (lockFileExists) {
|
||||||
|
auto s = chomp(diff);
|
||||||
|
if (s.empty())
|
||||||
|
warn("updating lock file '%s'", path);
|
||||||
|
else
|
||||||
|
warn("updating lock file '%s':\n%s", path, s);
|
||||||
|
} else
|
||||||
|
warn("creating lock file '%s'", path);
|
||||||
|
|
||||||
|
newLockFile.write(path);
|
||||||
|
|
||||||
|
topRef.input.markChangedFile(
|
||||||
|
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||||
|
lockFlags.commitLockFile
|
||||||
|
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
|
||||||
|
relPath, lockFileExists ? "Update" : "Add", diff))
|
||||||
|
: std::nullopt);
|
||||||
|
|
||||||
|
/* Rewriting the lockfile changed the top-level
|
||||||
|
repo, so we should re-read it. FIXME: we could
|
||||||
|
also just clear the 'rev' field... */
|
||||||
|
auto prevLockedRef = flake.lockedRef;
|
||||||
|
FlakeCache dummyCache;
|
||||||
|
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
|
||||||
|
|
||||||
|
if (lockFlags.commitLockFile &&
|
||||||
|
flake.lockedRef.input.getRev() &&
|
||||||
|
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
|
||||||
|
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
|
||||||
|
|
||||||
|
/* Make sure that we picked up the change,
|
||||||
|
i.e. the tree should usually be dirty
|
||||||
|
now. Corner case: we could have reverted from a
|
||||||
|
dirty to a clean tree! */
|
||||||
|
if (flake.lockedRef.input == prevLockedRef.input
|
||||||
|
&& !flake.lockedRef.input.isImmutable())
|
||||||
|
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||||
|
} else
|
||||||
|
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||||
|
}
|
||||||
|
|
||||||
|
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||||
|
}
|
||||||
|
|
||||||
|
void callFlake(EvalState & state,
|
||||||
|
const LockedFlake & lockedFlake,
|
||||||
|
Value & vRes)
|
||||||
|
{
|
||||||
|
auto vLocks = state.allocValue();
|
||||||
|
auto vRootSrc = state.allocValue();
|
||||||
|
auto vRootSubdir = state.allocValue();
|
||||||
|
auto vTmp1 = state.allocValue();
|
||||||
|
auto vTmp2 = state.allocValue();
|
||||||
|
|
||||||
|
mkString(*vLocks, lockedFlake.lockFile.to_string());
|
||||||
|
|
||||||
|
emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
|
||||||
|
|
||||||
|
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
|
||||||
|
|
||||||
|
static RootValue vCallFlake = nullptr;
|
||||||
|
|
||||||
|
if (!vCallFlake) {
|
||||||
|
vCallFlake = allocRootValue(state.allocValue());
|
||||||
|
state.eval(state.parseExprFromString(
|
||||||
|
#include "call-flake.nix.gen.hh"
|
||||||
|
, "/"), **vCallFlake);
|
||||||
|
}
|
||||||
|
|
||||||
|
state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
|
||||||
|
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
|
||||||
|
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
|
||||||
|
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
||||||
|
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
|
||||||
|
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||||
|
|
||||||
|
callFlake(state,
|
||||||
|
lockFlake(state, flakeRef,
|
||||||
|
LockFlags {
|
||||||
|
.updateLockFile = false,
|
||||||
|
.useRegistries = !evalSettings.pureEval,
|
||||||
|
.allowMutable = !evalSettings.pureEval,
|
||||||
|
}),
|
||||||
|
v);
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
Fingerprint LockedFlake::getFingerprint() const
|
||||||
|
{
|
||||||
|
// FIXME: as an optimization, if the flake contains a lock file
|
||||||
|
// and we haven't changed it, then it's sufficient to use
|
||||||
|
// flake.sourceInfo.storePath for the fingerprint.
|
||||||
|
return hashString(htSHA256,
|
||||||
|
fmt("%s;%d;%d;%s",
|
||||||
|
flake.sourceInfo->storePath.to_string(),
|
||||||
|
flake.lockedRef.input.getRevCount().value_or(0),
|
||||||
|
flake.lockedRef.input.getLastModified().value_or(0),
|
||||||
|
lockFile));
|
||||||
|
}
|
||||||
|
|
||||||
|
Flake::~Flake() { }
|
||||||
|
|
||||||
|
}
|
111
src/libexpr/flake/flake.hh
Normal file
111
src/libexpr/flake/flake.hh
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
#include "flakeref.hh"
|
||||||
|
#include "lockfile.hh"
|
||||||
|
#include "value.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class EvalState;
|
||||||
|
|
||||||
|
namespace fetchers { struct Tree; }
|
||||||
|
|
||||||
|
namespace flake {
|
||||||
|
|
||||||
|
struct FlakeInput;
|
||||||
|
|
||||||
|
typedef std::map<FlakeId, FlakeInput> FlakeInputs;
|
||||||
|
|
||||||
|
struct FlakeInput
|
||||||
|
{
|
||||||
|
std::optional<FlakeRef> ref;
|
||||||
|
bool isFlake = true;
|
||||||
|
std::optional<InputPath> follows;
|
||||||
|
bool absolute = false; // whether 'follows' is relative to the flake root
|
||||||
|
FlakeInputs overrides;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Flake
|
||||||
|
{
|
||||||
|
FlakeRef originalRef;
|
||||||
|
FlakeRef resolvedRef;
|
||||||
|
FlakeRef lockedRef;
|
||||||
|
std::optional<std::string> description;
|
||||||
|
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||||
|
FlakeInputs inputs;
|
||||||
|
RootValue vOutputs;
|
||||||
|
~Flake();
|
||||||
|
};
|
||||||
|
|
||||||
|
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
|
||||||
|
|
||||||
|
/* Fingerprint of a locked flake; used as a cache key. */
|
||||||
|
typedef Hash Fingerprint;
|
||||||
|
|
||||||
|
struct LockedFlake
|
||||||
|
{
|
||||||
|
Flake flake;
|
||||||
|
LockFile lockFile;
|
||||||
|
|
||||||
|
Fingerprint getFingerprint() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct LockFlags
|
||||||
|
{
|
||||||
|
/* Whether to ignore the existing lock file, creating a new one
|
||||||
|
from scratch. */
|
||||||
|
bool recreateLockFile = false;
|
||||||
|
|
||||||
|
/* Whether to update the lock file at all. If set to false, if any
|
||||||
|
change to the lock file is needed (e.g. when an input has been
|
||||||
|
added to flake.nix), you get a fatal error. */
|
||||||
|
bool updateLockFile = true;
|
||||||
|
|
||||||
|
/* Whether to write the lock file to disk. If set to true, if the
|
||||||
|
any changes to the lock file are needed and the flake is not
|
||||||
|
writable (i.e. is not a local Git working tree or similar), you
|
||||||
|
get a fatal error. If set to false, Nix will use the modified
|
||||||
|
lock file in memory only, without writing it to disk. */
|
||||||
|
bool writeLockFile = true;
|
||||||
|
|
||||||
|
/* Whether to use the registries to lookup indirect flake
|
||||||
|
references like 'nixpkgs'. */
|
||||||
|
bool useRegistries = true;
|
||||||
|
|
||||||
|
/* Whether mutable flake references (i.e. those without a Git
|
||||||
|
revision or similar) without a corresponding lock are
|
||||||
|
allowed. Mutable flake references with a lock are always
|
||||||
|
allowed. */
|
||||||
|
bool allowMutable = true;
|
||||||
|
|
||||||
|
/* Whether to commit changes to flake.lock. */
|
||||||
|
bool commitLockFile = false;
|
||||||
|
|
||||||
|
/* Flake inputs to be overriden. */
|
||||||
|
std::map<InputPath, FlakeRef> inputOverrides;
|
||||||
|
|
||||||
|
/* Flake inputs to be updated. This means that any existing lock
|
||||||
|
for those inputs will be ignored. */
|
||||||
|
std::set<InputPath> inputUpdates;
|
||||||
|
};
|
||||||
|
|
||||||
|
LockedFlake lockFlake(
|
||||||
|
EvalState & state,
|
||||||
|
const FlakeRef & flakeRef,
|
||||||
|
const LockFlags & lockFlags);
|
||||||
|
|
||||||
|
void callFlake(
|
||||||
|
EvalState & state,
|
||||||
|
const LockedFlake & lockedFlake,
|
||||||
|
Value & v);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
void emitTreeAttrs(
|
||||||
|
EvalState & state,
|
||||||
|
const fetchers::Tree & tree,
|
||||||
|
const fetchers::Input & input,
|
||||||
|
Value & v, bool emptyRevFallback = false);
|
||||||
|
|
||||||
|
}
|
204
src/libexpr/flake/flakeref.cc
Normal file
204
src/libexpr/flake/flakeref.cc
Normal file
|
@ -0,0 +1,204 @@
|
||||||
|
#include "flakeref.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "url.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "registry.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
// 'dir' path elements cannot start with a '.'. We also reject
|
||||||
|
// potentially dangerous characters like ';'.
|
||||||
|
const static std::string subDirElemRegex = "(?:[a-zA-Z0-9_-]+[a-zA-Z0-9._-]*)";
|
||||||
|
const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRegex + ")*";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
std::string FlakeRef::to_string() const
|
||||||
|
{
|
||||||
|
auto url = input.toURL();
|
||||||
|
if (subdir != "")
|
||||||
|
url.query.insert_or_assign("dir", subdir);
|
||||||
|
return url.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchers::Attrs FlakeRef::toAttrs() const
|
||||||
|
{
|
||||||
|
auto attrs = input.toAttrs();
|
||||||
|
if (subdir != "")
|
||||||
|
attrs.emplace("dir", subdir);
|
||||||
|
return attrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
||||||
|
{
|
||||||
|
str << flakeRef.to_string();
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool FlakeRef::operator ==(const FlakeRef & other) const
|
||||||
|
{
|
||||||
|
return input == other.input && subdir == other.subdir;
|
||||||
|
}
|
||||||
|
|
||||||
|
FlakeRef FlakeRef::resolve(ref<Store> store) const
|
||||||
|
{
|
||||||
|
auto [input2, extraAttrs] = lookupInRegistries(store, input);
|
||||||
|
return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir));
|
||||||
|
}
|
||||||
|
|
||||||
|
FlakeRef parseFlakeRef(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||||
|
{
|
||||||
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing);
|
||||||
|
if (fragment != "")
|
||||||
|
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
||||||
|
return flakeRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<FlakeRef> maybeParseFlakeRef(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return parseFlakeRef(url, baseDir);
|
||||||
|
} catch (Error &) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||||
|
{
|
||||||
|
using namespace fetchers;
|
||||||
|
|
||||||
|
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
|
||||||
|
|
||||||
|
static std::regex pathUrlRegex(
|
||||||
|
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
|
||||||
|
+ "(?:\\?(" + queryRegex + "))?"
|
||||||
|
+ "(?:#(" + queryRegex + "))?",
|
||||||
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
|
static std::regex flakeRegex(
|
||||||
|
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||||
|
+ "(?:#(" + queryRegex + "))?",
|
||||||
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
|
||||||
|
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||||
|
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||||
|
|
||||||
|
if (std::regex_match(url, match, flakeRegex)) {
|
||||||
|
auto parsedURL = ParsedURL{
|
||||||
|
.url = url,
|
||||||
|
.base = "flake:" + std::string(match[1]),
|
||||||
|
.scheme = "flake",
|
||||||
|
.authority = "",
|
||||||
|
.path = match[1],
|
||||||
|
};
|
||||||
|
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(Input::fromURL(parsedURL), ""),
|
||||||
|
percentDecode(std::string(match[6])));
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (std::regex_match(url, match, pathUrlRegex)) {
|
||||||
|
std::string path = match[1];
|
||||||
|
std::string fragment = percentDecode(std::string(match[3]));
|
||||||
|
|
||||||
|
if (baseDir) {
|
||||||
|
/* Check if 'url' is a path (either absolute or relative
|
||||||
|
to 'baseDir'). If so, search upward to the root of the
|
||||||
|
repo (i.e. the directory containing .git). */
|
||||||
|
|
||||||
|
path = absPath(path, baseDir, true);
|
||||||
|
|
||||||
|
if (!S_ISDIR(lstat(path).st_mode))
|
||||||
|
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||||
|
|
||||||
|
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||||
|
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||||
|
|
||||||
|
auto flakeRoot = path;
|
||||||
|
std::string subdir;
|
||||||
|
|
||||||
|
while (flakeRoot != "/") {
|
||||||
|
if (pathExists(flakeRoot + "/.git")) {
|
||||||
|
auto base = std::string("git+file://") + flakeRoot;
|
||||||
|
|
||||||
|
auto parsedURL = ParsedURL{
|
||||||
|
.url = base, // FIXME
|
||||||
|
.base = base,
|
||||||
|
.scheme = "git+file",
|
||||||
|
.authority = "",
|
||||||
|
.path = flakeRoot,
|
||||||
|
.query = decodeQuery(match[2]),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (subdir != "") {
|
||||||
|
if (parsedURL.query.count("dir"))
|
||||||
|
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||||
|
parsedURL.query.insert_or_assign("dir", subdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||||
|
parsedURL.query.insert_or_assign("shallow", "1");
|
||||||
|
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||||
|
fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||||
|
flakeRoot = dirOf(flakeRoot);
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if (!hasPrefix(path, "/"))
|
||||||
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||||
|
path = canonPath(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
fetchers::Attrs attrs;
|
||||||
|
attrs.insert_or_assign("type", "path");
|
||||||
|
attrs.insert_or_assign("path", path);
|
||||||
|
|
||||||
|
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
auto parsedURL = parseURL(url);
|
||||||
|
std::string fragment;
|
||||||
|
std::swap(fragment, parsedURL.fragment);
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||||
|
fragment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return parseFlakeRefWithFragment(url, baseDir);
|
||||||
|
} catch (Error & e) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
|
||||||
|
{
|
||||||
|
auto attrs2(attrs);
|
||||||
|
attrs2.erase("dir");
|
||||||
|
return FlakeRef(
|
||||||
|
fetchers::Input::fromAttrs(std::move(attrs2)),
|
||||||
|
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
||||||
|
{
|
||||||
|
auto [tree, lockedInput] = input.fetch(store);
|
||||||
|
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
53
src/libexpr/flake/flakeref.hh
Normal file
53
src/libexpr/flake/flakeref.hh
Normal file
|
@ -0,0 +1,53 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
|
||||||
|
#include <variant>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class Store;
|
||||||
|
|
||||||
|
typedef std::string FlakeId;
|
||||||
|
|
||||||
|
struct FlakeRef
|
||||||
|
{
|
||||||
|
fetchers::Input input;
|
||||||
|
|
||||||
|
Path subdir;
|
||||||
|
|
||||||
|
bool operator==(const FlakeRef & other) const;
|
||||||
|
|
||||||
|
FlakeRef(fetchers::Input && input, const Path & subdir)
|
||||||
|
: input(std::move(input)), subdir(subdir)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
// FIXME: change to operator <<.
|
||||||
|
std::string to_string() const;
|
||||||
|
|
||||||
|
fetchers::Attrs toAttrs() const;
|
||||||
|
|
||||||
|
FlakeRef resolve(ref<Store> store) const;
|
||||||
|
|
||||||
|
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
|
||||||
|
|
||||||
|
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||||
|
|
||||||
|
FlakeRef parseFlakeRef(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||||
|
|
||||||
|
std::optional<FlakeRef> maybeParseFlake(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||||
|
|
||||||
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
|
}
|
338
src/libexpr/flake/lockfile.cc
Normal file
338
src/libexpr/flake/lockfile.cc
Normal file
|
@ -0,0 +1,338 @@
|
||||||
|
#include "lockfile.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix::flake {
|
||||||
|
|
||||||
|
FlakeRef getFlakeRef(
|
||||||
|
const nlohmann::json & json,
|
||||||
|
const char * attr,
|
||||||
|
const char * info)
|
||||||
|
{
|
||||||
|
auto i = json.find(attr);
|
||||||
|
if (i != json.end()) {
|
||||||
|
auto attrs = jsonToAttrs(*i);
|
||||||
|
// FIXME: remove when we drop support for version 5.
|
||||||
|
if (info) {
|
||||||
|
auto j = json.find(info);
|
||||||
|
if (j != json.end()) {
|
||||||
|
for (auto k : jsonToAttrs(*j))
|
||||||
|
attrs.insert_or_assign(k.first, k.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return FlakeRef::fromAttrs(attrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error("attribute '%s' missing in lock file", attr);
|
||||||
|
}
|
||||||
|
|
||||||
|
LockedNode::LockedNode(const nlohmann::json & json)
|
||||||
|
: lockedRef(getFlakeRef(json, "locked", "info"))
|
||||||
|
, originalRef(getFlakeRef(json, "original", nullptr))
|
||||||
|
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||||
|
{
|
||||||
|
if (!lockedRef.input.isImmutable())
|
||||||
|
throw Error("lockfile contains mutable lock '%s'", attrsToJson(lockedRef.input.toAttrs()));
|
||||||
|
}
|
||||||
|
|
||||||
|
StorePath LockedNode::computeStorePath(Store & store) const
|
||||||
|
{
|
||||||
|
return lockedRef.input.computeStorePath(store);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||||
|
{
|
||||||
|
auto pos = root;
|
||||||
|
|
||||||
|
if (!pos) return {};
|
||||||
|
|
||||||
|
for (auto & elem : path) {
|
||||||
|
if (auto i = get(pos->inputs, elem)) {
|
||||||
|
if (auto node = std::get_if<0>(&*i))
|
||||||
|
pos = *node;
|
||||||
|
else if (auto follows = std::get_if<1>(&*i)) {
|
||||||
|
pos = findInput(*follows);
|
||||||
|
if (!pos) return {};
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
return pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
LockFile::LockFile(const nlohmann::json & json, const Path & path)
|
||||||
|
{
|
||||||
|
auto version = json.value("version", 0);
|
||||||
|
if (version < 5 || version > 7)
|
||||||
|
throw Error("lock file '%s' has unsupported version %d", path, version);
|
||||||
|
|
||||||
|
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
|
||||||
|
|
||||||
|
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
|
||||||
|
|
||||||
|
getInputs = [&](Node & node, const nlohmann::json & jsonNode)
|
||||||
|
{
|
||||||
|
if (jsonNode.find("inputs") == jsonNode.end()) return;
|
||||||
|
for (auto & i : jsonNode["inputs"].items()) {
|
||||||
|
if (i.value().is_array()) {
|
||||||
|
InputPath path;
|
||||||
|
for (auto & j : i.value())
|
||||||
|
path.push_back(j);
|
||||||
|
node.inputs.insert_or_assign(i.key(), path);
|
||||||
|
} else {
|
||||||
|
std::string inputKey = i.value();
|
||||||
|
auto k = nodeMap.find(inputKey);
|
||||||
|
if (k == nodeMap.end()) {
|
||||||
|
auto jsonNode2 = json["nodes"][inputKey];
|
||||||
|
auto input = std::make_shared<LockedNode>(jsonNode2);
|
||||||
|
k = nodeMap.insert_or_assign(inputKey, input).first;
|
||||||
|
getInputs(*input, jsonNode2);
|
||||||
|
}
|
||||||
|
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
|
||||||
|
node.inputs.insert_or_assign(i.key(), child);
|
||||||
|
else
|
||||||
|
// FIXME: replace by follows node
|
||||||
|
throw Error("lock file contains cycle to root node");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
std::string rootKey = json["root"];
|
||||||
|
nodeMap.insert_or_assign(rootKey, root);
|
||||||
|
getInputs(*root, json["nodes"][rootKey]);
|
||||||
|
|
||||||
|
// FIXME: check that there are no cycles in version >= 7. Cycles
|
||||||
|
// between inputs are only possible using 'follows' indirections.
|
||||||
|
// Once we drop support for version <= 6, we can simplify the code
|
||||||
|
// a bit since we don't need to worry about cycles.
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json LockFile::toJson() const
|
||||||
|
{
|
||||||
|
nlohmann::json nodes;
|
||||||
|
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
|
||||||
|
std::unordered_set<std::string> keys;
|
||||||
|
|
||||||
|
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
|
||||||
|
|
||||||
|
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
|
||||||
|
{
|
||||||
|
auto k = nodeKeys.find(node);
|
||||||
|
if (k != nodeKeys.end())
|
||||||
|
return k->second;
|
||||||
|
|
||||||
|
if (!keys.insert(key).second) {
|
||||||
|
for (int n = 2; ; ++n) {
|
||||||
|
auto k = fmt("%s_%d", key, n);
|
||||||
|
if (keys.insert(k).second) {
|
||||||
|
key = k;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
nodeKeys.insert_or_assign(node, key);
|
||||||
|
|
||||||
|
auto n = nlohmann::json::object();
|
||||||
|
|
||||||
|
if (!node->inputs.empty()) {
|
||||||
|
auto inputs = nlohmann::json::object();
|
||||||
|
for (auto & i : node->inputs) {
|
||||||
|
if (auto child = std::get_if<0>(&i.second)) {
|
||||||
|
inputs[i.first] = dumpNode(i.first, *child);
|
||||||
|
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||||
|
auto arr = nlohmann::json::array();
|
||||||
|
for (auto & x : *follows)
|
||||||
|
arr.push_back(x);
|
||||||
|
inputs[i.first] = std::move(arr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
n["inputs"] = std::move(inputs);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
|
||||||
|
n["original"] = fetchers::attrsToJson(lockedNode->originalRef.toAttrs());
|
||||||
|
n["locked"] = fetchers::attrsToJson(lockedNode->lockedRef.toAttrs());
|
||||||
|
if (!lockedNode->isFlake) n["flake"] = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes[key] = std::move(n);
|
||||||
|
|
||||||
|
return key;
|
||||||
|
};
|
||||||
|
|
||||||
|
nlohmann::json json;
|
||||||
|
json["version"] = 7;
|
||||||
|
json["root"] = dumpNode("root", root);
|
||||||
|
json["nodes"] = std::move(nodes);
|
||||||
|
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string LockFile::to_string() const
|
||||||
|
{
|
||||||
|
return toJson().dump(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
LockFile LockFile::read(const Path & path)
|
||||||
|
{
|
||||||
|
if (!pathExists(path)) return LockFile();
|
||||||
|
return LockFile(nlohmann::json::parse(readFile(path)), path);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
|
||||||
|
{
|
||||||
|
stream << lockFile.toJson().dump(2);
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
void LockFile::write(const Path & path) const
|
||||||
|
{
|
||||||
|
createDirs(dirOf(path));
|
||||||
|
writeFile(path, fmt("%s\n", *this));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool LockFile::isImmutable() const
|
||||||
|
{
|
||||||
|
std::unordered_set<std::shared_ptr<const Node>> nodes;
|
||||||
|
|
||||||
|
std::function<void(std::shared_ptr<const Node> node)> visit;
|
||||||
|
|
||||||
|
visit = [&](std::shared_ptr<const Node> node)
|
||||||
|
{
|
||||||
|
if (!nodes.insert(node).second) return;
|
||||||
|
for (auto & i : node->inputs)
|
||||||
|
if (auto child = std::get_if<0>(&i.second))
|
||||||
|
visit(*child);
|
||||||
|
};
|
||||||
|
|
||||||
|
visit(root);
|
||||||
|
|
||||||
|
for (auto & i : nodes) {
|
||||||
|
if (i == root) continue;
|
||||||
|
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
|
||||||
|
if (lockedNode && !lockedNode->lockedRef.input.isImmutable()) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool LockFile::operator ==(const LockFile & other) const
|
||||||
|
{
|
||||||
|
// FIXME: slow
|
||||||
|
return toJson() == other.toJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
InputPath parseInputPath(std::string_view s)
|
||||||
|
{
|
||||||
|
InputPath path;
|
||||||
|
|
||||||
|
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
|
||||||
|
if (!std::regex_match(elem, flakeIdRegex))
|
||||||
|
throw UsageError("invalid flake input path element '%s'", elem);
|
||||||
|
path.push_back(elem);
|
||||||
|
}
|
||||||
|
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
||||||
|
{
|
||||||
|
std::unordered_set<std::shared_ptr<Node>> done;
|
||||||
|
std::map<InputPath, Node::Edge> res;
|
||||||
|
|
||||||
|
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
|
||||||
|
|
||||||
|
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
|
||||||
|
{
|
||||||
|
if (!done.insert(node).second) return;
|
||||||
|
|
||||||
|
for (auto &[id, input] : node->inputs) {
|
||||||
|
auto inputPath(prefix);
|
||||||
|
inputPath.push_back(id);
|
||||||
|
res.emplace(inputPath, input);
|
||||||
|
if (auto child = std::get_if<0>(&input))
|
||||||
|
recurse(inputPath, *child);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
recurse({}, root);
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
|
||||||
|
{
|
||||||
|
if (auto node = std::get_if<0>(&edge))
|
||||||
|
stream << "'" << (*node)->lockedRef << "'";
|
||||||
|
else if (auto follows = std::get_if<1>(&edge))
|
||||||
|
stream << fmt("follows '%s'", printInputPath(*follows));
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
static bool equals(const Node::Edge & e1, const Node::Edge & e2)
|
||||||
|
{
|
||||||
|
if (auto n1 = std::get_if<0>(&e1))
|
||||||
|
if (auto n2 = std::get_if<0>(&e2))
|
||||||
|
return (*n1)->lockedRef == (*n2)->lockedRef;
|
||||||
|
if (auto f1 = std::get_if<1>(&e1))
|
||||||
|
if (auto f2 = std::get_if<1>(&e2))
|
||||||
|
return *f1 == *f2;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
|
||||||
|
{
|
||||||
|
auto oldFlat = oldLocks.getAllInputs();
|
||||||
|
auto newFlat = newLocks.getAllInputs();
|
||||||
|
|
||||||
|
auto i = oldFlat.begin();
|
||||||
|
auto j = newFlat.begin();
|
||||||
|
std::string res;
|
||||||
|
|
||||||
|
while (i != oldFlat.end() || j != newFlat.end()) {
|
||||||
|
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
|
||||||
|
res += fmt("* Added '%s': %s\n", printInputPath(j->first), j->second);
|
||||||
|
++j;
|
||||||
|
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
|
||||||
|
res += fmt("* Removed '%s'\n", printInputPath(i->first));
|
||||||
|
++i;
|
||||||
|
} else {
|
||||||
|
if (!equals(i->second, j->second)) {
|
||||||
|
res += fmt("* Updated '%s': %s -> %s\n",
|
||||||
|
printInputPath(i->first),
|
||||||
|
i->second,
|
||||||
|
j->second);
|
||||||
|
}
|
||||||
|
++i;
|
||||||
|
++j;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
void LockFile::check()
|
||||||
|
{
|
||||||
|
auto inputs = getAllInputs();
|
||||||
|
|
||||||
|
for (auto & [inputPath, input] : inputs) {
|
||||||
|
if (auto follows = std::get_if<1>(&input)) {
|
||||||
|
if (!follows->empty() && !get(inputs, *follows))
|
||||||
|
throw Error("input '%s' follows a non-existent input '%s'",
|
||||||
|
printInputPath(inputPath),
|
||||||
|
printInputPath(*follows));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void check();
|
||||||
|
|
||||||
|
std::string printInputPath(const InputPath & path)
|
||||||
|
{
|
||||||
|
return concatStringsSep("/", path);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
85
src/libexpr/flake/lockfile.hh
Normal file
85
src/libexpr/flake/lockfile.hh
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "flakeref.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json_fwd.hpp>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
class Store;
|
||||||
|
struct StorePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
namespace nix::flake {
|
||||||
|
|
||||||
|
using namespace fetchers;
|
||||||
|
|
||||||
|
typedef std::vector<FlakeId> InputPath;
|
||||||
|
|
||||||
|
struct LockedNode;
|
||||||
|
|
||||||
|
/* A node in the lock file. It has outgoing edges to other nodes (its
|
||||||
|
inputs). Only the root node has this type; all other nodes have
|
||||||
|
type LockedNode. */
|
||||||
|
struct Node : std::enable_shared_from_this<Node>
|
||||||
|
{
|
||||||
|
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
|
||||||
|
|
||||||
|
std::map<FlakeId, Edge> inputs;
|
||||||
|
|
||||||
|
virtual ~Node() { }
|
||||||
|
};
|
||||||
|
|
||||||
|
/* A non-root node in the lock file. */
|
||||||
|
struct LockedNode : Node
|
||||||
|
{
|
||||||
|
FlakeRef lockedRef, originalRef;
|
||||||
|
bool isFlake = true;
|
||||||
|
|
||||||
|
LockedNode(
|
||||||
|
const FlakeRef & lockedRef,
|
||||||
|
const FlakeRef & originalRef,
|
||||||
|
bool isFlake = true)
|
||||||
|
: lockedRef(lockedRef), originalRef(originalRef), isFlake(isFlake)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
LockedNode(const nlohmann::json & json);
|
||||||
|
|
||||||
|
StorePath computeStorePath(Store & store) const;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct LockFile
|
||||||
|
{
|
||||||
|
std::shared_ptr<Node> root = std::make_shared<Node>();
|
||||||
|
|
||||||
|
LockFile() {};
|
||||||
|
LockFile(const nlohmann::json & json, const Path & path);
|
||||||
|
|
||||||
|
nlohmann::json toJson() const;
|
||||||
|
|
||||||
|
std::string to_string() const;
|
||||||
|
|
||||||
|
static LockFile read(const Path & path);
|
||||||
|
|
||||||
|
void write(const Path & path) const;
|
||||||
|
|
||||||
|
bool isImmutable() const;
|
||||||
|
|
||||||
|
bool operator ==(const LockFile & other) const;
|
||||||
|
|
||||||
|
std::shared_ptr<Node> findInput(const InputPath & path);
|
||||||
|
|
||||||
|
std::map<InputPath, Node::Edge> getAllInputs() const;
|
||||||
|
|
||||||
|
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
|
||||||
|
|
||||||
|
/* Check that every 'follows' input target exists. */
|
||||||
|
void check();
|
||||||
|
};
|
||||||
|
|
||||||
|
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
|
||||||
|
|
||||||
|
InputPath parseInputPath(std::string_view s);
|
||||||
|
|
||||||
|
std::string printInputPath(const InputPath & path);
|
||||||
|
|
||||||
|
}
|
|
@ -39,7 +39,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
||||||
if (i == drv.outputs.end())
|
if (i == drv.outputs.end())
|
||||||
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
|
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
|
||||||
|
|
||||||
outPath = store->printStorePath(i->second.path);
|
outPath = store->printStorePath(i->second.path(*store, drv.name));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,12 @@ libexpr_NAME = libnixexpr
|
||||||
|
|
||||||
libexpr_DIR := $(d)
|
libexpr_DIR := $(d)
|
||||||
|
|
||||||
libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc
|
libexpr_SOURCES := \
|
||||||
|
$(wildcard $(d)/*.cc) \
|
||||||
|
$(wildcard $(d)/primops/*.cc) \
|
||||||
|
$(wildcard $(d)/flake/*.cc) \
|
||||||
|
$(d)/lexer-tab.cc \
|
||||||
|
$(d)/parser-tab.cc
|
||||||
|
|
||||||
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
|
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
|
||||||
|
|
||||||
|
@ -34,4 +39,9 @@ dist-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
|
||||||
|
|
||||||
|
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||||
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||||
|
|
||||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||||
|
|
||||||
|
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
|
||||||
|
|
|
@ -719,7 +719,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
||||||
if (isUri(elem.second)) {
|
if (isUri(elem.second)) {
|
||||||
try {
|
try {
|
||||||
res = { true, store->toRealPath(fetchers::downloadTarball(
|
res = { true, store->toRealPath(fetchers::downloadTarball(
|
||||||
store, resolveUri(elem.second), "source", false).storePath) };
|
store, resolveUri(elem.second), "source", false).first.storePath) };
|
||||||
} catch (FileTransferError & e) {
|
} catch (FileTransferError & e) {
|
||||||
logWarning({
|
logWarning({
|
||||||
.name = "Entry download",
|
.name = "Entry download",
|
||||||
|
|
|
@ -30,18 +30,6 @@ namespace nix {
|
||||||
*************************************************************/
|
*************************************************************/
|
||||||
|
|
||||||
|
|
||||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
|
||||||
name>. */
|
|
||||||
std::pair<string, string> decodeContext(const string & s)
|
|
||||||
{
|
|
||||||
if (s.at(0) == '!') {
|
|
||||||
size_t index = s.find("!", 1);
|
|
||||||
return std::pair<string, string>(string(s, index + 1), string(s, 1, index - 1));
|
|
||||||
} else
|
|
||||||
return std::pair<string, string>(s.at(0) == '/' ? s : string(s, 1), "");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
InvalidPathError::InvalidPathError(const Path & path) :
|
InvalidPathError::InvalidPathError(const Path & path) :
|
||||||
EvalError("path '%s' is not valid", path), path(path) {}
|
EvalError("path '%s' is not valid", path), path(path) {}
|
||||||
|
|
||||||
|
@ -64,7 +52,7 @@ void EvalState::realiseContext(const PathSet & context)
|
||||||
DerivationOutputs::iterator i = drv.outputs.find(outputName);
|
DerivationOutputs::iterator i = drv.outputs.find(outputName);
|
||||||
if (i == drv.outputs.end())
|
if (i == drv.outputs.end())
|
||||||
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
|
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
|
||||||
allowedPaths->insert(store->printStorePath(i->second.path));
|
allowedPaths->insert(store->printStorePath(i->second.path(*store, drv.name)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -77,7 +65,7 @@ void EvalState::realiseContext(const PathSet & context)
|
||||||
|
|
||||||
/* For performance, prefetch all substitute info. */
|
/* For performance, prefetch all substitute info. */
|
||||||
StorePathSet willBuild, willSubstitute, unknown;
|
StorePathSet willBuild, willSubstitute, unknown;
|
||||||
unsigned long long downloadSize, narSize;
|
uint64_t downloadSize, narSize;
|
||||||
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||||
|
|
||||||
store->buildPaths(drvs);
|
store->buildPaths(drvs);
|
||||||
|
@ -103,8 +91,17 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
|
||||||
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
|
Path realPath = state.checkSourcePath(state.toRealPath(path, context));
|
||||||
|
|
||||||
// FIXME
|
// FIXME
|
||||||
if (state.store->isStorePath(path) && state.store->isValidPath(state.store->parseStorePath(path)) && isDerivation(path)) {
|
auto isValidDerivationInStore = [&]() -> std::optional<StorePath> {
|
||||||
Derivation drv = state.store->readDerivation(state.store->parseStorePath(path));
|
if (!state.store->isStorePath(path))
|
||||||
|
return std::nullopt;
|
||||||
|
auto storePath = state.store->parseStorePath(path);
|
||||||
|
if (!(state.store->isValidPath(storePath) && isDerivation(path)))
|
||||||
|
return std::nullopt;
|
||||||
|
return storePath;
|
||||||
|
};
|
||||||
|
if (auto optStorePath = isValidDerivationInStore()) {
|
||||||
|
auto storePath = *optStorePath;
|
||||||
|
Derivation drv = state.store->readDerivation(storePath);
|
||||||
Value & w = *state.allocValue();
|
Value & w = *state.allocValue();
|
||||||
state.mkAttrs(w, 3 + drv.outputs.size());
|
state.mkAttrs(w, 3 + drv.outputs.size());
|
||||||
Value * v2 = state.allocAttr(w, state.sDrvPath);
|
Value * v2 = state.allocAttr(w, state.sDrvPath);
|
||||||
|
@ -118,7 +115,7 @@ static void prim_scopedImport(EvalState & state, const Pos & pos, Value * * args
|
||||||
|
|
||||||
for (const auto & o : drv.outputs) {
|
for (const auto & o : drv.outputs) {
|
||||||
v2 = state.allocAttr(w, state.symbols.create(o.first));
|
v2 = state.allocAttr(w, state.symbols.create(o.first));
|
||||||
mkString(*v2, state.store->printStorePath(o.second.path), {"!" + o.first + "!" + path});
|
mkString(*v2, state.store->printStorePath(o.second.path(*state.store, drv.name)), {"!" + o.first + "!" + path});
|
||||||
outputsVal->listElems()[outputs_index] = state.allocValue();
|
outputsVal->listElems()[outputs_index] = state.allocValue();
|
||||||
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
|
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
|
||||||
}
|
}
|
||||||
|
@ -582,6 +579,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
|
|
||||||
/* Build the derivation expression by processing the attributes. */
|
/* Build the derivation expression by processing the attributes. */
|
||||||
Derivation drv;
|
Derivation drv;
|
||||||
|
drv.name = drvName;
|
||||||
|
|
||||||
PathSet context;
|
PathSet context;
|
||||||
|
|
||||||
|
@ -776,11 +774,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
|
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
|
||||||
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign("out", DerivationOutput {
|
drv.outputs.insert_or_assign("out", DerivationOutput {
|
||||||
.path = std::move(outPath),
|
.output = DerivationOutputFixed {
|
||||||
.hash = FixedOutputHash {
|
.hash = FixedOutputHash {
|
||||||
.method = ingestionMethod,
|
.method = ingestionMethod,
|
||||||
.hash = std::move(h),
|
.hash = std::move(h),
|
||||||
},
|
},
|
||||||
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -795,8 +794,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
if (!jsonObject) drv.env[i] = "";
|
if (!jsonObject) drv.env[i] = "";
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
|
.output = DerivationOutputInputAddressed {
|
||||||
.path = StorePath::dummy,
|
.path = StorePath::dummy,
|
||||||
.hash = std::optional<FixedOutputHash> {},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -807,8 +807,9 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
|
if (!jsonObject) drv.env[i] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign(i,
|
drv.outputs.insert_or_assign(i,
|
||||||
DerivationOutput {
|
DerivationOutput {
|
||||||
|
.output = DerivationOutputInputAddressed {
|
||||||
.path = std::move(outPath),
|
.path = std::move(outPath),
|
||||||
.hash = std::optional<FixedOutputHash>(),
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -829,7 +830,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
||||||
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
|
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputs) {
|
||||||
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
|
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
|
||||||
state.store->printStorePath(i.second.path), {"!" + i.first + "!" + drvPathS});
|
state.store->printStorePath(i.second.path(*state.store, drv.name)), {"!" + i.first + "!" + drvPathS});
|
||||||
}
|
}
|
||||||
v.attrs->sort();
|
v.attrs->sort();
|
||||||
}
|
}
|
||||||
|
@ -1123,7 +1124,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
||||||
|
|
||||||
|
|
||||||
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
|
||||||
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
|
Value * filterFun, FileIngestionMethod method, const std::optional<Hash> expectedHash, Value & v)
|
||||||
{
|
{
|
||||||
const auto path = evalSettings.pureEval && expectedHash ?
|
const auto path = evalSettings.pureEval && expectedHash ?
|
||||||
path_ :
|
path_ :
|
||||||
|
@ -1154,7 +1155,7 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
std::optional<StorePath> expectedStorePath;
|
||||||
if (expectedHash)
|
if (expectedHash)
|
||||||
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
|
expectedStorePath = state.store->makeFixedOutputPath(method, *expectedHash, name);
|
||||||
Path dstPath;
|
Path dstPath;
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
dstPath = state.store->printStorePath(settings.readOnlyMode
|
dstPath = state.store->printStorePath(settings.readOnlyMode
|
||||||
|
@ -1188,7 +1189,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
|
||||||
.errPos = pos
|
.errPos = pos
|
||||||
});
|
});
|
||||||
|
|
||||||
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
|
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
@ -1198,7 +1199,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
|
||||||
string name;
|
string name;
|
||||||
Value * filterFun = nullptr;
|
Value * filterFun = nullptr;
|
||||||
auto method = FileIngestionMethod::Recursive;
|
auto method = FileIngestionMethod::Recursive;
|
||||||
Hash expectedHash;
|
std::optional<Hash> expectedHash;
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
const string & n(attr.name);
|
const string & n(attr.name);
|
||||||
|
|
|
@ -1,91 +0,0 @@
|
||||||
#include "primops.hh"
|
|
||||||
#include "eval-inline.hh"
|
|
||||||
#include "store-api.hh"
|
|
||||||
#include "hash.hh"
|
|
||||||
#include "fetchers.hh"
|
|
||||||
#include "url.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|
||||||
{
|
|
||||||
std::string url;
|
|
||||||
std::optional<std::string> ref;
|
|
||||||
std::optional<Hash> rev;
|
|
||||||
std::string name = "source";
|
|
||||||
bool fetchSubmodules = false;
|
|
||||||
PathSet context;
|
|
||||||
|
|
||||||
state.forceValue(*args[0]);
|
|
||||||
|
|
||||||
if (args[0]->type == tAttrs) {
|
|
||||||
|
|
||||||
state.forceAttrs(*args[0], pos);
|
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
|
||||||
string n(attr.name);
|
|
||||||
if (n == "url")
|
|
||||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
|
|
||||||
else if (n == "ref")
|
|
||||||
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
|
|
||||||
else if (n == "rev")
|
|
||||||
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
|
|
||||||
else if (n == "name")
|
|
||||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
|
||||||
else if (n == "submodules")
|
|
||||||
fetchSubmodules = state.forceBool(*attr.value, *attr.pos);
|
|
||||||
else
|
|
||||||
throw EvalError({
|
|
||||||
.hint = hintfmt("unsupported argument '%s' to 'fetchGit'", attr.name),
|
|
||||||
.errPos = *attr.pos
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (url.empty())
|
|
||||||
throw EvalError({
|
|
||||||
.hint = hintfmt("'url' argument required"),
|
|
||||||
.errPos = pos
|
|
||||||
});
|
|
||||||
|
|
||||||
} else
|
|
||||||
url = state.coerceToString(pos, *args[0], context, false, false);
|
|
||||||
|
|
||||||
// FIXME: git externals probably can be used to bypass the URI
|
|
||||||
// whitelist. Ah well.
|
|
||||||
state.checkURI(url);
|
|
||||||
|
|
||||||
if (evalSettings.pureEval && !rev)
|
|
||||||
throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
|
|
||||||
|
|
||||||
fetchers::Attrs attrs;
|
|
||||||
attrs.insert_or_assign("type", "git");
|
|
||||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
|
||||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
|
||||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
|
||||||
if (fetchSubmodules) attrs.insert_or_assign("submodules", true);
|
|
||||||
auto input = fetchers::inputFromAttrs(attrs);
|
|
||||||
|
|
||||||
// FIXME: use name?
|
|
||||||
auto [tree, input2] = input->fetchTree(state.store);
|
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
|
||||||
auto storePath = state.store->printStorePath(tree.storePath);
|
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
|
||||||
// Backward compatibility: set 'rev' to
|
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
|
||||||
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
|
|
||||||
// Backward compatibility: set 'revCount' to 0 for a dirty tree.
|
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")),
|
|
||||||
tree.info.revCount.value_or(0));
|
|
||||||
mkBool(*state.allocAttr(v, state.symbols.create("submodules")), fetchSubmodules);
|
|
||||||
v.attrs->sort();
|
|
||||||
|
|
||||||
if (state.allowedPaths)
|
|
||||||
state.allowedPaths->insert(tree.actualPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
|
|
||||||
|
|
||||||
}
|
|
|
@ -65,23 +65,23 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
||||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
||||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
if (ref) attrs.insert_or_assign("ref", *ref);
|
||||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
auto input = fetchers::inputFromAttrs(attrs);
|
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
|
|
||||||
// FIXME: use name
|
// FIXME: use name
|
||||||
auto [tree, input2] = input->fetchTree(state.store);
|
auto [tree, input2] = input.fetch(state.store);
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
state.mkAttrs(v, 8);
|
||||||
auto storePath = state.store->printStorePath(tree.storePath);
|
auto storePath = state.store->printStorePath(tree.storePath);
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||||
if (input2->getRef())
|
if (input2.getRef())
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
|
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2.getRef());
|
||||||
// Backward compatibility: set 'rev' to
|
// Backward compatibility: set 'rev' to
|
||||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
||||||
if (tree.info.revCount)
|
if (auto revCount = input2.getRevCount())
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
|
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||||
v.attrs->sort();
|
v.attrs->sort();
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "filetransfer.hh"
|
#include "filetransfer.hh"
|
||||||
|
#include "registry.hh"
|
||||||
|
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
@ -12,39 +13,73 @@ namespace nix {
|
||||||
void emitTreeAttrs(
|
void emitTreeAttrs(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
const fetchers::Tree & tree,
|
const fetchers::Tree & tree,
|
||||||
std::shared_ptr<const fetchers::Input> input,
|
const fetchers::Input & input,
|
||||||
Value & v)
|
Value & v,
|
||||||
|
bool emptyRevFallback)
|
||||||
{
|
{
|
||||||
|
assert(input.isImmutable());
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
state.mkAttrs(v, 8);
|
||||||
|
|
||||||
auto storePath = state.store->printStorePath(tree.storePath);
|
auto storePath = state.store->printStorePath(tree.storePath);
|
||||||
|
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||||
|
|
||||||
assert(tree.info.narHash);
|
// FIXME: support arbitrary input attributes.
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
|
||||||
tree.info.narHash.to_string(SRI, true));
|
|
||||||
|
|
||||||
if (input->getRev()) {
|
auto narHash = input.getNarHash();
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
|
assert(narHash);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), input->getRev()->gitShortRev());
|
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
||||||
|
narHash->to_string(SRI, true));
|
||||||
|
|
||||||
|
if (auto rev = input.getRev()) {
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
|
||||||
|
} else if (emptyRevFallback) {
|
||||||
|
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||||
|
auto emptyHash = Hash(htSHA1);
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tree.info.revCount)
|
if (input.getType() == "git")
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
|
mkBool(*state.allocAttr(v, state.symbols.create("submodules")), maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||||
|
|
||||||
if (tree.info.lastModified)
|
if (auto revCount = input.getRevCount())
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("lastModified")),
|
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||||
fmt("%s", std::put_time(std::gmtime(&*tree.info.lastModified), "%Y%m%d%H%M%S")));
|
else if (emptyRevFallback)
|
||||||
|
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
|
||||||
|
|
||||||
|
if (auto lastModified = input.getLastModified()) {
|
||||||
|
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("lastModifiedDate")),
|
||||||
|
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||||
|
}
|
||||||
|
|
||||||
v.attrs->sort();
|
v.attrs->sort();
|
||||||
}
|
}
|
||||||
|
|
||||||
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
std::string fixURI(std::string uri, EvalState &state)
|
||||||
{
|
{
|
||||||
settings.requireExperimentalFeature("flakes");
|
state.checkURI(uri);
|
||||||
|
return uri.find("://") != std::string::npos ? uri : "file://" + uri;
|
||||||
|
}
|
||||||
|
|
||||||
std::shared_ptr<const fetchers::Input> input;
|
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
|
||||||
|
{
|
||||||
|
string n(name);
|
||||||
|
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void fetchTree(
|
||||||
|
EvalState &state,
|
||||||
|
const Pos &pos,
|
||||||
|
Value **args,
|
||||||
|
Value &v,
|
||||||
|
const std::optional<std::string> type,
|
||||||
|
bool emptyRevFallback = false
|
||||||
|
) {
|
||||||
|
fetchers::Input input;
|
||||||
PathSet context;
|
PathSet context;
|
||||||
|
|
||||||
state.forceValue(*args[0]);
|
state.forceValue(*args[0]);
|
||||||
|
@ -56,35 +91,65 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
state.forceValue(*attr.value);
|
state.forceValue(*attr.value);
|
||||||
if (attr.value->type == tString)
|
if (attr.value->type == tPath || attr.value->type == tString)
|
||||||
attrs.emplace(attr.name, attr.value->string.s);
|
addURI(
|
||||||
|
state,
|
||||||
|
attrs,
|
||||||
|
attr.name,
|
||||||
|
state.coerceToString(*attr.pos, *attr.value, context, false, false)
|
||||||
|
);
|
||||||
|
else if (attr.value->type == tString)
|
||||||
|
addURI(state, attrs, attr.name, attr.value->string.s);
|
||||||
else if (attr.value->type == tBool)
|
else if (attr.value->type == tBool)
|
||||||
attrs.emplace(attr.name, attr.value->boolean);
|
attrs.emplace(attr.name, fetchers::Explicit<bool>{attr.value->boolean});
|
||||||
|
else if (attr.value->type == tInt)
|
||||||
|
attrs.emplace(attr.name, attr.value->integer);
|
||||||
else
|
else
|
||||||
throw TypeError("fetchTree argument '%s' is %s while a string or Boolean is expected",
|
throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||||
attr.name, showType(*attr.value));
|
attr.name, showType(*attr.value));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (type)
|
||||||
|
attrs.emplace("type", type.value());
|
||||||
|
|
||||||
if (!attrs.count("type"))
|
if (!attrs.count("type"))
|
||||||
throw Error({
|
throw Error({
|
||||||
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
.hint = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||||
.errPos = pos
|
.errPos = pos
|
||||||
});
|
});
|
||||||
|
|
||||||
input = fetchers::inputFromAttrs(attrs);
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
} else
|
} else {
|
||||||
input = fetchers::inputFromURL(state.coerceToString(pos, *args[0], context, false, false));
|
auto url = fixURI(state.coerceToString(pos, *args[0], context, false, false), state);
|
||||||
|
|
||||||
if (evalSettings.pureEval && !input->isImmutable())
|
if (type == "git") {
|
||||||
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input");
|
fetchers::Attrs attrs;
|
||||||
|
attrs.emplace("type", "git");
|
||||||
|
attrs.emplace("url", url);
|
||||||
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
|
} else {
|
||||||
|
input = fetchers::Input::fromURL(url);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME: use fetchOrSubstituteTree
|
if (!evalSettings.pureEval && !input.isDirect())
|
||||||
auto [tree, input2] = input->fetchTree(state.store);
|
input = lookupInRegistries(state.store, input).first;
|
||||||
|
|
||||||
|
if (evalSettings.pureEval && !input.isImmutable())
|
||||||
|
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input, at %s", pos);
|
||||||
|
|
||||||
|
auto [tree, input2] = input.fetch(state.store);
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
state.allowedPaths->insert(tree.actualPath);
|
state.allowedPaths->insert(tree.actualPath);
|
||||||
|
|
||||||
emitTreeAttrs(state, tree, input2, v);
|
emitTreeAttrs(state, tree, input2, v, emptyRevFallback);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
settings.requireExperimentalFeature("flakes");
|
||||||
|
fetchTree(state, pos, args, v, std::nullopt);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
|
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
|
||||||
|
@ -136,7 +201,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
|
|
||||||
auto storePath =
|
auto storePath =
|
||||||
unpack
|
unpack
|
||||||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
|
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
|
||||||
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||||
|
|
||||||
auto path = state.store->toRealPath(storePath);
|
auto path = state.store->toRealPath(storePath);
|
||||||
|
@ -147,7 +212,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
: hashFile(htSHA256, path);
|
: hashFile(htSHA256, path);
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash)
|
||||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
*url, expectedHash->to_string(Base32, true), hash->to_string(Base32, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
|
@ -166,7 +231,13 @@ static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args
|
||||||
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
|
||||||
|
{
|
||||||
|
fetchTree(state, pos, args, v, "git", true);
|
||||||
|
}
|
||||||
|
|
||||||
static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
|
static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
|
||||||
static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
|
static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
|
||||||
|
static RegisterPrimOp r4("fetchGit", 1, prim_fetchGit);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,12 @@ public:
|
||||||
return s == s2.s;
|
return s == s2.s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: remove
|
||||||
|
bool operator == (std::string_view s2) const
|
||||||
|
{
|
||||||
|
return s->compare(s2) == 0;
|
||||||
|
}
|
||||||
|
|
||||||
bool operator != (const Symbol & s2) const
|
bool operator != (const Symbol & s2) const
|
||||||
{
|
{
|
||||||
return s != s2.s;
|
return s != s2.s;
|
||||||
|
@ -68,9 +74,10 @@ private:
|
||||||
Symbols symbols;
|
Symbols symbols;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Symbol create(const string & s)
|
Symbol create(std::string_view s)
|
||||||
{
|
{
|
||||||
std::pair<Symbols::iterator, bool> res = symbols.insert(s);
|
// FIXME: avoid allocation if 's' already exists in the symbol table.
|
||||||
|
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
|
||||||
return Symbol(&*res.first);
|
return Symbol(&*res.first);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -166,6 +166,13 @@ struct Value
|
||||||
{
|
{
|
||||||
return type == tList1 ? 1 : type == tList2 ? 2 : bigList.size;
|
return type == tList1 ? 1 : type == tList2 ? 2 : bigList.size;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Check whether forcing this value requires a trivial amount of
|
||||||
|
computation. In particular, function applications are
|
||||||
|
non-trivial. */
|
||||||
|
bool isTrivial() const;
|
||||||
|
|
||||||
|
std::vector<std::pair<Path, std::string>> getContext();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ nlohmann::json attrsToJson(const Attrs & attrs)
|
||||||
{
|
{
|
||||||
nlohmann::json json;
|
nlohmann::json json;
|
||||||
for (auto & attr : attrs) {
|
for (auto & attr : attrs) {
|
||||||
if (auto v = std::get_if<int64_t>(&attr.second)) {
|
if (auto v = std::get_if<uint64_t>(&attr.second)) {
|
||||||
json[attr.first] = *v;
|
json[attr.first] = *v;
|
||||||
} else if (auto v = std::get_if<std::string>(&attr.second)) {
|
} else if (auto v = std::get_if<std::string>(&attr.second)) {
|
||||||
json[attr.first] = *v;
|
json[attr.first] = *v;
|
||||||
|
@ -55,16 +55,16 @@ std::string getStrAttr(const Attrs & attrs, const std::string & name)
|
||||||
return *s;
|
return *s;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
|
std::optional<uint64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
|
||||||
{
|
{
|
||||||
auto i = attrs.find(name);
|
auto i = attrs.find(name);
|
||||||
if (i == attrs.end()) return {};
|
if (i == attrs.end()) return {};
|
||||||
if (auto v = std::get_if<int64_t>(&i->second))
|
if (auto v = std::get_if<uint64_t>(&i->second))
|
||||||
return *v;
|
return *v;
|
||||||
throw Error("input attribute '%s' is not an integer", name);
|
throw Error("input attribute '%s' is not an integer", name);
|
||||||
}
|
}
|
||||||
|
|
||||||
int64_t getIntAttr(const Attrs & attrs, const std::string & name)
|
uint64_t getIntAttr(const Attrs & attrs, const std::string & name)
|
||||||
{
|
{
|
||||||
auto s = maybeGetIntAttr(attrs, name);
|
auto s = maybeGetIntAttr(attrs, name);
|
||||||
if (!s)
|
if (!s)
|
||||||
|
@ -76,8 +76,8 @@ std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & na
|
||||||
{
|
{
|
||||||
auto i = attrs.find(name);
|
auto i = attrs.find(name);
|
||||||
if (i == attrs.end()) return {};
|
if (i == attrs.end()) return {};
|
||||||
if (auto v = std::get_if<int64_t>(&i->second))
|
if (auto v = std::get_if<Explicit<bool>>(&i->second))
|
||||||
return *v;
|
return v->t;
|
||||||
throw Error("input attribute '%s' is not a Boolean", name);
|
throw Error("input attribute '%s' is not a Boolean", name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
|
||||||
{
|
{
|
||||||
std::map<std::string, std::string> query;
|
std::map<std::string, std::string> query;
|
||||||
for (auto & attr : attrs) {
|
for (auto & attr : attrs) {
|
||||||
if (auto v = std::get_if<int64_t>(&attr.second)) {
|
if (auto v = std::get_if<uint64_t>(&attr.second)) {
|
||||||
query.insert_or_assign(attr.first, fmt("%d", *v));
|
query.insert_or_assign(attr.first, fmt("%d", *v));
|
||||||
} else if (auto v = std::get_if<std::string>(&attr.second)) {
|
} else if (auto v = std::get_if<std::string>(&attr.second)) {
|
||||||
query.insert_or_assign(attr.first, *v);
|
query.insert_or_assign(attr.first, *v);
|
||||||
|
|
|
@ -13,9 +13,14 @@ namespace nix::fetchers {
|
||||||
template<typename T>
|
template<typename T>
|
||||||
struct Explicit {
|
struct Explicit {
|
||||||
T t;
|
T t;
|
||||||
|
|
||||||
|
bool operator ==(const Explicit<T> & other) const
|
||||||
|
{
|
||||||
|
return t == other.t;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::variant<std::string, int64_t, Explicit<bool>> Attr;
|
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
|
||||||
typedef std::map<std::string, Attr> Attrs;
|
typedef std::map<std::string, Attr> Attrs;
|
||||||
|
|
||||||
Attrs jsonToAttrs(const nlohmann::json & json);
|
Attrs jsonToAttrs(const nlohmann::json & json);
|
||||||
|
@ -26,9 +31,9 @@ std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::strin
|
||||||
|
|
||||||
std::string getStrAttr(const Attrs & attrs, const std::string & name);
|
std::string getStrAttr(const Attrs & attrs, const std::string & name);
|
||||||
|
|
||||||
std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
|
std::optional<uint64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
|
||||||
|
|
||||||
int64_t getIntAttr(const Attrs & attrs, const std::string & name);
|
uint64_t getIntAttr(const Attrs & attrs, const std::string & name);
|
||||||
|
|
||||||
std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name);
|
std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name);
|
||||||
|
|
||||||
|
|
|
@ -5,71 +5,265 @@
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
|
std::unique_ptr<std::vector<std::shared_ptr<InputScheme>>> inputSchemes = nullptr;
|
||||||
|
|
||||||
void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
|
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
|
||||||
{
|
{
|
||||||
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
|
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::shared_ptr<InputScheme>>>();
|
||||||
inputSchemes->push_back(std::move(inputScheme));
|
inputSchemes->push_back(std::move(inputScheme));
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
|
Input Input::fromURL(const std::string & url)
|
||||||
|
{
|
||||||
|
return fromURL(parseURL(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
static void fixupInput(Input & input)
|
||||||
|
{
|
||||||
|
// Check common attributes.
|
||||||
|
input.getType();
|
||||||
|
input.getRef();
|
||||||
|
if (input.getRev())
|
||||||
|
input.immutable = true;
|
||||||
|
input.getRevCount();
|
||||||
|
input.getLastModified();
|
||||||
|
if (input.getNarHash())
|
||||||
|
input.immutable = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input Input::fromURL(const ParsedURL & url)
|
||||||
{
|
{
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
for (auto & inputScheme : *inputSchemes) {
|
||||||
auto res = inputScheme->inputFromURL(url);
|
auto res = inputScheme->inputFromURL(url);
|
||||||
if (res) return res;
|
if (res) {
|
||||||
|
res->scheme = inputScheme;
|
||||||
|
fixupInput(*res);
|
||||||
|
return std::move(*res);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
throw Error("input '%s' is unsupported", url.url);
|
throw Error("input '%s' is unsupported", url.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromURL(const std::string & url)
|
Input Input::fromAttrs(Attrs && attrs)
|
||||||
{
|
{
|
||||||
return inputFromURL(parseURL(url));
|
for (auto & inputScheme : *inputSchemes) {
|
||||||
|
auto res = inputScheme->inputFromAttrs(attrs);
|
||||||
|
if (res) {
|
||||||
|
res->scheme = inputScheme;
|
||||||
|
fixupInput(*res);
|
||||||
|
return std::move(*res);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
|
fixupInput(input);
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL Input::toURL() const
|
||||||
{
|
{
|
||||||
auto attrs2(attrs);
|
if (!scheme)
|
||||||
attrs2.erase("narHash");
|
throw Error("cannot show unsupported input '%s'", attrsToJson(attrs));
|
||||||
for (auto & inputScheme : *inputSchemes) {
|
return scheme->toURL(*this);
|
||||||
auto res = inputScheme->inputFromAttrs(attrs2);
|
|
||||||
if (res) {
|
|
||||||
if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
|
|
||||||
// FIXME: require SRI hash.
|
|
||||||
res->narHash = newHashAllowEmpty(*narHash, {});
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
throw Error("input '%s' is unsupported", attrsToJson(attrs));
|
std::string Input::to_string() const
|
||||||
|
{
|
||||||
|
return toURL().to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs Input::toAttrs() const
|
Attrs Input::toAttrs() const
|
||||||
{
|
{
|
||||||
auto attrs = toAttrsInternal();
|
|
||||||
if (narHash)
|
|
||||||
attrs.emplace("narHash", narHash->to_string(SRI, true));
|
|
||||||
attrs.emplace("type", type());
|
|
||||||
return attrs;
|
return attrs;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
|
bool Input::hasAllInfo() const
|
||||||
{
|
{
|
||||||
auto [tree, input] = fetchTreeInternal(store);
|
return getNarHash() && scheme && scheme->hasAllInfo(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Input::operator ==(const Input & other) const
|
||||||
|
{
|
||||||
|
return attrs == other.attrs;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Input::contains(const Input & other) const
|
||||||
|
{
|
||||||
|
if (*this == other) return true;
|
||||||
|
auto other2(other);
|
||||||
|
other2.attrs.erase("ref");
|
||||||
|
other2.attrs.erase("rev");
|
||||||
|
if (*this == other2) return true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
||||||
|
{
|
||||||
|
if (!scheme)
|
||||||
|
throw Error("cannot fetch unsupported input '%s'", attrsToJson(toAttrs()));
|
||||||
|
|
||||||
|
/* The tree may already be in the Nix store, or it could be
|
||||||
|
substituted (which is often faster than fetching from the
|
||||||
|
original source). So check that. */
|
||||||
|
if (hasAllInfo()) {
|
||||||
|
try {
|
||||||
|
auto storePath = computeStorePath(*store);
|
||||||
|
|
||||||
|
store->ensurePath(storePath);
|
||||||
|
|
||||||
|
debug("using substituted/cached input '%s' in '%s'",
|
||||||
|
to_string(), store->printStorePath(storePath));
|
||||||
|
|
||||||
|
auto actualPath = store->toRealPath(storePath);
|
||||||
|
|
||||||
|
return {fetchers::Tree(std::move(actualPath), std::move(storePath)), *this};
|
||||||
|
} catch (Error & e) {
|
||||||
|
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto [tree, input] = scheme->fetch(store, *this);
|
||||||
|
|
||||||
if (tree.actualPath == "")
|
if (tree.actualPath == "")
|
||||||
tree.actualPath = store->toRealPath(tree.storePath);
|
tree.actualPath = store->toRealPath(tree.storePath);
|
||||||
|
|
||||||
if (!tree.info.narHash)
|
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
tree.info.narHash = store->queryPathInfo(tree.storePath)->narHash;
|
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||||
|
|
||||||
if (input->narHash)
|
if (auto prevNarHash = getNarHash()) {
|
||||||
assert(input->narHash == tree.info.narHash);
|
if (narHash != *prevNarHash)
|
||||||
|
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||||
|
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
|
||||||
|
}
|
||||||
|
|
||||||
if (narHash && narHash != input->narHash)
|
if (auto prevLastModified = getLastModified()) {
|
||||||
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
if (input.getLastModified() != prevLastModified)
|
||||||
to_string(), tree.actualPath, narHash->to_string(SRI, true), input->narHash->to_string(SRI, true));
|
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
|
||||||
|
input.to_string(), *prevLastModified);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (auto prevRevCount = getRevCount()) {
|
||||||
|
if (input.getRevCount() != prevRevCount)
|
||||||
|
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
|
||||||
|
input.to_string(), *prevRevCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
input.immutable = true;
|
||||||
|
|
||||||
|
assert(input.hasAllInfo());
|
||||||
|
|
||||||
return {std::move(tree), input};
|
return {std::move(tree), input};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Input Input::applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const
|
||||||
|
{
|
||||||
|
if (!scheme) return *this;
|
||||||
|
return scheme->applyOverrides(*this, ref, rev);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Input::clone(const Path & destDir) const
|
||||||
|
{
|
||||||
|
assert(scheme);
|
||||||
|
scheme->clone(*this, destDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> Input::getSourcePath() const
|
||||||
|
{
|
||||||
|
assert(scheme);
|
||||||
|
return scheme->getSourcePath(*this);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Input::markChangedFile(
|
||||||
|
std::string_view file,
|
||||||
|
std::optional<std::string> commitMsg) const
|
||||||
|
{
|
||||||
|
assert(scheme);
|
||||||
|
return scheme->markChangedFile(*this, file, commitMsg);
|
||||||
|
}
|
||||||
|
|
||||||
|
StorePath Input::computeStorePath(Store & store) const
|
||||||
|
{
|
||||||
|
auto narHash = getNarHash();
|
||||||
|
if (!narHash)
|
||||||
|
throw Error("cannot compute store path for mutable input '%s'", to_string());
|
||||||
|
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string Input::getType() const
|
||||||
|
{
|
||||||
|
return getStrAttr(attrs, "type");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Hash> Input::getNarHash() const
|
||||||
|
{
|
||||||
|
if (auto s = maybeGetStrAttr(attrs, "narHash"))
|
||||||
|
// FIXME: require SRI hash.
|
||||||
|
return newHashAllowEmpty(*s, htSHA256);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> Input::getRef() const
|
||||||
|
{
|
||||||
|
if (auto s = maybeGetStrAttr(attrs, "ref"))
|
||||||
|
return *s;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Hash> Input::getRev() const
|
||||||
|
{
|
||||||
|
if (auto s = maybeGetStrAttr(attrs, "rev"))
|
||||||
|
return Hash(*s, htSHA1);
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<uint64_t> Input::getRevCount() const
|
||||||
|
{
|
||||||
|
if (auto n = maybeGetIntAttr(attrs, "revCount"))
|
||||||
|
return *n;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<time_t> Input::getLastModified() const
|
||||||
|
{
|
||||||
|
if (auto n = maybeGetIntAttr(attrs, "lastModified"))
|
||||||
|
return *n;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL InputScheme::toURL(const Input & input)
|
||||||
|
{
|
||||||
|
throw Error("don't know how to convert input '%s' to a URL", attrsToJson(input.attrs));
|
||||||
|
}
|
||||||
|
|
||||||
|
Input InputScheme::applyOverrides(
|
||||||
|
const Input & input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev)
|
||||||
|
{
|
||||||
|
if (ref)
|
||||||
|
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
|
||||||
|
if (rev)
|
||||||
|
throw Error("don't know how to set revision of input '%s' to '%s'", input.to_string(), rev->gitRev());
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> InputScheme::getSourcePath(const Input & input)
|
||||||
|
{
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
|
||||||
|
{
|
||||||
|
assert(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
void InputScheme::clone(const Input & input, const Path & destDir)
|
||||||
|
{
|
||||||
|
throw Error("do not know how to clone input '%s'", input.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "path.hh"
|
#include "path.hh"
|
||||||
#include "tree-info.hh"
|
|
||||||
#include "attrs.hh"
|
#include "attrs.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
|
|
||||||
|
@ -13,73 +12,101 @@ namespace nix { class Store; }
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
struct Input;
|
|
||||||
|
|
||||||
struct Tree
|
struct Tree
|
||||||
{
|
{
|
||||||
Path actualPath;
|
Path actualPath;
|
||||||
StorePath storePath;
|
StorePath storePath;
|
||||||
TreeInfo info;
|
Tree(Path && actualPath, StorePath && storePath) : actualPath(actualPath), storePath(std::move(storePath)) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Input : std::enable_shared_from_this<Input>
|
struct InputScheme;
|
||||||
|
|
||||||
|
struct Input
|
||||||
{
|
{
|
||||||
std::optional<Hash> narHash; // FIXME: implement
|
friend class InputScheme;
|
||||||
|
|
||||||
virtual std::string type() const = 0;
|
std::shared_ptr<InputScheme> scheme; // note: can be null
|
||||||
|
Attrs attrs;
|
||||||
|
bool immutable = false;
|
||||||
|
bool direct = true;
|
||||||
|
|
||||||
virtual ~Input() { }
|
public:
|
||||||
|
static Input fromURL(const std::string & url);
|
||||||
|
|
||||||
virtual bool operator ==(const Input & other) const { return false; }
|
static Input fromURL(const ParsedURL & url);
|
||||||
|
|
||||||
/* Check whether this is a "direct" input, that is, not
|
static Input fromAttrs(Attrs && attrs);
|
||||||
one that goes through a registry. */
|
|
||||||
virtual bool isDirect() const { return true; }
|
|
||||||
|
|
||||||
/* Check whether this is an "immutable" input, that is,
|
ParsedURL toURL() const;
|
||||||
one that contains a commit hash or content hash. */
|
|
||||||
virtual bool isImmutable() const { return (bool) narHash; }
|
|
||||||
|
|
||||||
virtual bool contains(const Input & other) const { return false; }
|
std::string to_string() const;
|
||||||
|
|
||||||
virtual std::optional<std::string> getRef() const { return {}; }
|
|
||||||
|
|
||||||
virtual std::optional<Hash> getRev() const { return {}; }
|
|
||||||
|
|
||||||
virtual ParsedURL toURL() const = 0;
|
|
||||||
|
|
||||||
std::string to_string() const
|
|
||||||
{
|
|
||||||
return toURL().to_string();
|
|
||||||
}
|
|
||||||
|
|
||||||
Attrs toAttrs() const;
|
Attrs toAttrs() const;
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
|
/* Check whether this is a "direct" input, that is, not
|
||||||
|
one that goes through a registry. */
|
||||||
|
bool isDirect() const { return direct; }
|
||||||
|
|
||||||
private:
|
/* Check whether this is an "immutable" input, that is,
|
||||||
|
one that contains a commit hash or content hash. */
|
||||||
|
bool isImmutable() const { return immutable; }
|
||||||
|
|
||||||
virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
|
bool hasAllInfo() const;
|
||||||
|
|
||||||
virtual Attrs toAttrsInternal() const = 0;
|
bool operator ==(const Input & other) const;
|
||||||
|
|
||||||
|
bool contains(const Input & other) const;
|
||||||
|
|
||||||
|
std::pair<Tree, Input> fetch(ref<Store> store) const;
|
||||||
|
|
||||||
|
Input applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const;
|
||||||
|
|
||||||
|
void clone(const Path & destDir) const;
|
||||||
|
|
||||||
|
std::optional<Path> getSourcePath() const;
|
||||||
|
|
||||||
|
void markChangedFile(
|
||||||
|
std::string_view file,
|
||||||
|
std::optional<std::string> commitMsg) const;
|
||||||
|
|
||||||
|
StorePath computeStorePath(Store & store) const;
|
||||||
|
|
||||||
|
// Convience functions for common attributes.
|
||||||
|
std::string getType() const;
|
||||||
|
std::optional<Hash> getNarHash() const;
|
||||||
|
std::optional<std::string> getRef() const;
|
||||||
|
std::optional<Hash> getRev() const;
|
||||||
|
std::optional<uint64_t> getRevCount() const;
|
||||||
|
std::optional<time_t> getLastModified() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct InputScheme
|
struct InputScheme
|
||||||
{
|
{
|
||||||
virtual ~InputScheme() { }
|
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
|
||||||
|
|
||||||
virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
|
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
|
||||||
|
|
||||||
virtual std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) = 0;
|
virtual ParsedURL toURL(const Input & input);
|
||||||
|
|
||||||
|
virtual bool hasAllInfo(const Input & input) = 0;
|
||||||
|
|
||||||
|
virtual Input applyOverrides(
|
||||||
|
const Input & input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev);
|
||||||
|
|
||||||
|
virtual void clone(const Input & input, const Path & destDir);
|
||||||
|
|
||||||
|
virtual std::optional<Path> getSourcePath(const Input & input);
|
||||||
|
|
||||||
|
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
||||||
|
|
||||||
|
virtual std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
|
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromURL(const std::string & url);
|
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
|
|
||||||
|
|
||||||
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
|
|
||||||
|
|
||||||
struct DownloadFileResult
|
struct DownloadFileResult
|
||||||
{
|
{
|
||||||
|
@ -94,7 +121,7 @@ DownloadFileResult downloadFile(
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
bool immutable);
|
bool immutable);
|
||||||
|
|
||||||
Tree downloadTarball(
|
std::pair<Tree, time_t> downloadTarball(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
|
|
|
@ -22,80 +22,152 @@ static bool isNotDotGitDirectory(const Path & path)
|
||||||
return not std::regex_match(path, gitDirRegex);
|
return not std::regex_match(path, gitDirRegex);
|
||||||
}
|
}
|
||||||
|
|
||||||
struct GitInput : Input
|
struct GitInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
ParsedURL url;
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
std::optional<std::string> ref;
|
|
||||||
std::optional<Hash> rev;
|
|
||||||
bool shallow = false;
|
|
||||||
bool submodules = false;
|
|
||||||
|
|
||||||
GitInput(const ParsedURL & url) : url(url)
|
|
||||||
{ }
|
|
||||||
|
|
||||||
std::string type() const override { return "git"; }
|
|
||||||
|
|
||||||
bool operator ==(const Input & other) const override
|
|
||||||
{
|
{
|
||||||
auto other2 = dynamic_cast<const GitInput *>(&other);
|
if (url.scheme != "git" &&
|
||||||
return
|
url.scheme != "git+http" &&
|
||||||
other2
|
url.scheme != "git+https" &&
|
||||||
&& url == other2->url
|
url.scheme != "git+ssh" &&
|
||||||
&& rev == other2->rev
|
url.scheme != "git+file") return {};
|
||||||
&& ref == other2->ref;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isImmutable() const override
|
auto url2(url);
|
||||||
{
|
if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
|
||||||
return (bool) rev || narHash;
|
url2.query.clear();
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<std::string> getRef() const override { return ref; }
|
|
||||||
|
|
||||||
std::optional<Hash> getRev() const override { return rev; }
|
|
||||||
|
|
||||||
ParsedURL toURL() const override
|
|
||||||
{
|
|
||||||
ParsedURL url2(url);
|
|
||||||
if (url2.scheme != "git") url2.scheme = "git+" + url2.scheme;
|
|
||||||
if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
|
|
||||||
if (ref) url2.query.insert_or_assign("ref", *ref);
|
|
||||||
if (shallow) url2.query.insert_or_assign("shallow", "1");
|
|
||||||
return url2;
|
|
||||||
}
|
|
||||||
|
|
||||||
Attrs toAttrsInternal() const override
|
|
||||||
{
|
|
||||||
Attrs attrs;
|
Attrs attrs;
|
||||||
attrs.emplace("url", url.to_string());
|
attrs.emplace("type", "git");
|
||||||
if (ref)
|
|
||||||
attrs.emplace("ref", *ref);
|
for (auto &[name, value] : url.query) {
|
||||||
if (rev)
|
if (name == "rev" || name == "ref")
|
||||||
attrs.emplace("rev", rev->gitRev());
|
attrs.emplace(name, value);
|
||||||
if (shallow)
|
else if (name == "shallow")
|
||||||
attrs.emplace("shallow", true);
|
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||||
if (submodules)
|
else
|
||||||
attrs.emplace("submodules", true);
|
url2.query.emplace(name, value);
|
||||||
return attrs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<bool, std::string> getActualUrl() const
|
attrs.emplace("url", url2.to_string());
|
||||||
|
|
||||||
|
return inputFromAttrs(attrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
|
{
|
||||||
|
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
||||||
|
|
||||||
|
for (auto & [name, value] : attrs)
|
||||||
|
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash")
|
||||||
|
throw Error("unsupported Git input attribute '%s'", name);
|
||||||
|
|
||||||
|
parseURL(getStrAttr(attrs, "url"));
|
||||||
|
maybeGetBoolAttr(attrs, "shallow");
|
||||||
|
maybeGetBoolAttr(attrs, "submodules");
|
||||||
|
|
||||||
|
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||||
|
if (std::regex_search(*ref, badGitRefRegex))
|
||||||
|
throw BadURL("invalid Git branch/tag name '%s'", *ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL toURL(const Input & input) override
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
|
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
|
||||||
|
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||||
|
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
|
||||||
|
url.query.insert_or_assign("shallow", "1");
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool hasAllInfo(const Input & input) override
|
||||||
|
{
|
||||||
|
bool maybeDirty = !input.getRef();
|
||||||
|
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||||
|
return
|
||||||
|
maybeGetIntAttr(input.attrs, "lastModified")
|
||||||
|
&& (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Input applyOverrides(
|
||||||
|
const Input & input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) override
|
||||||
|
{
|
||||||
|
auto res(input);
|
||||||
|
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) res.attrs.insert_or_assign("ref", *ref);
|
||||||
|
if (!res.getRef() && res.getRev())
|
||||||
|
throw Error("Git input '%s' has a commit hash but no branch/tag name", res.to_string());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
void clone(const Input & input, const Path & destDir) override
|
||||||
|
{
|
||||||
|
auto [isLocal, actualUrl] = getActualUrl(input);
|
||||||
|
|
||||||
|
Strings args = {"clone"};
|
||||||
|
|
||||||
|
args.push_back(actualUrl);
|
||||||
|
|
||||||
|
if (auto ref = input.getRef()) {
|
||||||
|
args.push_back("--branch");
|
||||||
|
args.push_back(*ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input.getRev()) throw Error("cloning a specific revision is not implemented");
|
||||||
|
|
||||||
|
args.push_back(destDir);
|
||||||
|
|
||||||
|
runProgram("git", true, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> getSourcePath(const Input & input) override
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
|
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
||||||
|
return url.path;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||||
|
{
|
||||||
|
auto sourcePath = getSourcePath(input);
|
||||||
|
assert(sourcePath);
|
||||||
|
|
||||||
|
runProgram("git", true,
|
||||||
|
{ "-C", *sourcePath, "add", "--force", "--intent-to-add", "--", std::string(file) });
|
||||||
|
|
||||||
|
if (commitMsg)
|
||||||
|
runProgram("git", true,
|
||||||
|
{ "-C", *sourcePath, "commit", std::string(file), "-m", *commitMsg });
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||||
{
|
{
|
||||||
// Don't clone file:// URIs (but otherwise treat them the
|
// Don't clone file:// URIs (but otherwise treat them the
|
||||||
// same as remote URIs, i.e. don't use the working tree or
|
// same as remote URIs, i.e. don't use the working tree or
|
||||||
// HEAD).
|
// HEAD).
|
||||||
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
bool isLocal = url.scheme == "file" && !forceHttp;
|
bool isLocal = url.scheme == "file" && !forceHttp;
|
||||||
return {isLocal, isLocal ? url.path : url.base};
|
return {isLocal, isLocal ? url.path : url.base};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
{
|
{
|
||||||
auto name = "source";
|
auto name = "source";
|
||||||
|
|
||||||
auto input = std::make_shared<GitInput>(*this);
|
Input input(_input);
|
||||||
|
|
||||||
assert(!rev || rev->type == htSHA1);
|
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||||
|
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||||
|
|
||||||
std::string cacheType = "git";
|
std::string cacheType = "git";
|
||||||
if (shallow) cacheType += "-shallow";
|
if (shallow) cacheType += "-shallow";
|
||||||
|
@ -106,39 +178,35 @@ struct GitInput : Input
|
||||||
return Attrs({
|
return Attrs({
|
||||||
{"type", cacheType},
|
{"type", cacheType},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"rev", input->rev->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||||
-> std::pair<Tree, std::shared_ptr<const Input>>
|
-> std::pair<Tree, Input>
|
||||||
{
|
{
|
||||||
assert(input->rev);
|
assert(input.getRev());
|
||||||
assert(!rev || rev == input->rev);
|
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||||
|
if (!shallow)
|
||||||
|
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||||
|
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||||
return {
|
return {
|
||||||
Tree {
|
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||||
.actualPath = store->toRealPath(storePath),
|
|
||||||
.storePath = std::move(storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.revCount = shallow ? std::nullopt : std::optional(getIntAttr(infoAttrs, "revCount")),
|
|
||||||
.lastModified = getIntAttr(infoAttrs, "lastModified"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input
|
input
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
if (rev) {
|
if (input.getRev()) {
|
||||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
|
|
||||||
auto [isLocal, actualUrl_] = getActualUrl();
|
auto [isLocal, actualUrl_] = getActualUrl(input);
|
||||||
auto actualUrl = actualUrl_; // work around clang bug
|
auto actualUrl = actualUrl_; // work around clang bug
|
||||||
|
|
||||||
// If this is a local directory and no ref or revision is
|
// If this is a local directory and no ref or revision is
|
||||||
// given, then allow the use of an unclean working tree.
|
// given, then allow the use of an unclean working tree.
|
||||||
if (!input->ref && !input->rev && isLocal) {
|
if (!input.getRef() && !input.getRev() && isLocal) {
|
||||||
bool clean = false;
|
bool clean = false;
|
||||||
|
|
||||||
/* Check whether this repo has any commits. There are
|
/* Check whether this repo has any commits. There are
|
||||||
|
@ -197,35 +265,35 @@ struct GitInput : Input
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto tree = Tree {
|
|
||||||
.actualPath = store->printStorePath(storePath),
|
|
||||||
.storePath = std::move(storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
// FIXME: maybe we should use the timestamp of the last
|
// FIXME: maybe we should use the timestamp of the last
|
||||||
// modified dirty file?
|
// modified dirty file?
|
||||||
.lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
|
input.attrs.insert_or_assign(
|
||||||
}
|
"lastModified",
|
||||||
|
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0);
|
||||||
|
|
||||||
|
return {
|
||||||
|
Tree(store->printStorePath(storePath), std::move(storePath)),
|
||||||
|
input
|
||||||
};
|
};
|
||||||
|
|
||||||
return {std::move(tree), input};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input->ref) input->ref = isLocal ? readHead(actualUrl) : "master";
|
if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
|
||||||
|
|
||||||
Attrs mutableAttrs({
|
Attrs mutableAttrs({
|
||||||
{"type", cacheType},
|
{"type", cacheType},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"url", actualUrl},
|
{"url", actualUrl},
|
||||||
{"ref", *input->ref},
|
{"ref", *input.getRef()},
|
||||||
});
|
});
|
||||||
|
|
||||||
Path repoDir;
|
Path repoDir;
|
||||||
|
|
||||||
if (isLocal) {
|
if (isLocal) {
|
||||||
|
|
||||||
if (!input->rev)
|
if (!input.getRev())
|
||||||
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
|
input.attrs.insert_or_assign("rev",
|
||||||
|
Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev());
|
||||||
|
|
||||||
repoDir = actualUrl;
|
repoDir = actualUrl;
|
||||||
|
|
||||||
|
@ -233,8 +301,8 @@ struct GitInput : Input
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||||
if (!rev || rev == rev2) {
|
if (!input.getRev() || input.getRev() == rev2) {
|
||||||
input->rev = rev2;
|
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,18 +316,18 @@ struct GitInput : Input
|
||||||
}
|
}
|
||||||
|
|
||||||
Path localRefFile =
|
Path localRefFile =
|
||||||
input->ref->compare(0, 5, "refs/") == 0
|
input.getRef()->compare(0, 5, "refs/") == 0
|
||||||
? cacheDir + "/" + *input->ref
|
? cacheDir + "/" + *input.getRef()
|
||||||
: cacheDir + "/refs/heads/" + *input->ref;
|
: cacheDir + "/refs/heads/" + *input.getRef();
|
||||||
|
|
||||||
bool doFetch;
|
bool doFetch;
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
|
|
||||||
/* If a rev was specified, we need to fetch if it's not in the
|
/* If a rev was specified, we need to fetch if it's not in the
|
||||||
repo. */
|
repo. */
|
||||||
if (input->rev) {
|
if (input.getRev()) {
|
||||||
try {
|
try {
|
||||||
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
|
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input.getRev()->gitRev() });
|
||||||
doFetch = false;
|
doFetch = false;
|
||||||
} catch (ExecError & e) {
|
} catch (ExecError & e) {
|
||||||
if (WIFEXITED(e.status)) {
|
if (WIFEXITED(e.status)) {
|
||||||
|
@ -282,9 +350,10 @@ struct GitInput : Input
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
// we're using --quiet for now. Should process its stderr.
|
// we're using --quiet for now. Should process its stderr.
|
||||||
try {
|
try {
|
||||||
auto fetchRef = input->ref->compare(0, 5, "refs/") == 0
|
auto ref = input.getRef();
|
||||||
? *input->ref
|
auto fetchRef = ref->compare(0, 5, "refs/") == 0
|
||||||
: "refs/heads/" + *input->ref;
|
? *ref
|
||||||
|
: "refs/heads/" + *ref;
|
||||||
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
if (!pathExists(localRefFile)) throw;
|
if (!pathExists(localRefFile)) throw;
|
||||||
|
@ -300,8 +369,8 @@ struct GitInput : Input
|
||||||
utimes(localRefFile.c_str(), times);
|
utimes(localRefFile.c_str(), times);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input->rev)
|
if (!input.getRev())
|
||||||
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
|
input.attrs.insert_or_assign("rev", Hash(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||||
|
@ -311,7 +380,7 @@ struct GitInput : Input
|
||||||
|
|
||||||
// FIXME: check whether rev is an ancestor of ref.
|
// FIXME: check whether rev is an ancestor of ref.
|
||||||
|
|
||||||
printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
|
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl);
|
||||||
|
|
||||||
/* Now that we know the ref, check again whether we have it in
|
/* Now that we know the ref, check again whether we have it in
|
||||||
the store. */
|
the store. */
|
||||||
|
@ -333,7 +402,7 @@ struct GitInput : Input
|
||||||
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
|
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
|
||||||
"--update-head-ok", "--", repoDir, "refs/*:refs/*" });
|
"--update-head-ok", "--", repoDir, "refs/*:refs/*" });
|
||||||
|
|
||||||
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input->rev->gitRev() });
|
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
|
||||||
runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
|
runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
|
||||||
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
|
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
|
||||||
|
|
||||||
|
@ -342,7 +411,7 @@ struct GitInput : Input
|
||||||
// FIXME: should pipe this, or find some better way to extract a
|
// FIXME: should pipe this, or find some better way to extract a
|
||||||
// revision.
|
// revision.
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
|
RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
|
||||||
gitOptions.standardOut = &sink;
|
gitOptions.standardOut = &sink;
|
||||||
runProgram2(gitOptions);
|
runProgram2(gitOptions);
|
||||||
});
|
});
|
||||||
|
@ -352,18 +421,18 @@ struct GitInput : Input
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
|
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input->rev->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
{"lastModified", lastModified},
|
{"lastModified", lastModified},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!shallow)
|
if (!shallow)
|
||||||
infoAttrs.insert_or_assign("revCount",
|
infoAttrs.insert_or_assign("revCount",
|
||||||
std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() })));
|
std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input.getRev()->gitRev() })));
|
||||||
|
|
||||||
if (!this->rev)
|
if (!_input.getRev())
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
store,
|
store,
|
||||||
mutableAttrs,
|
mutableAttrs,
|
||||||
|
@ -382,60 +451,6 @@ struct GitInput : Input
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct GitInputScheme : InputScheme
|
|
||||||
{
|
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
|
||||||
{
|
|
||||||
if (url.scheme != "git" &&
|
|
||||||
url.scheme != "git+http" &&
|
|
||||||
url.scheme != "git+https" &&
|
|
||||||
url.scheme != "git+ssh" &&
|
|
||||||
url.scheme != "git+file") return nullptr;
|
|
||||||
|
|
||||||
auto url2(url);
|
|
||||||
if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
|
|
||||||
url2.query.clear();
|
|
||||||
|
|
||||||
Attrs attrs;
|
|
||||||
attrs.emplace("type", "git");
|
|
||||||
|
|
||||||
for (auto &[name, value] : url.query) {
|
|
||||||
if (name == "rev" || name == "ref")
|
|
||||||
attrs.emplace(name, value);
|
|
||||||
else
|
|
||||||
url2.query.emplace(name, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs.emplace("url", url2.to_string());
|
|
||||||
|
|
||||||
return inputFromAttrs(attrs);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
|
||||||
{
|
|
||||||
if (maybeGetStrAttr(attrs, "type") != "git") return {};
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules")
|
|
||||||
throw Error("unsupported Git input attribute '%s'", name);
|
|
||||||
|
|
||||||
auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
|
|
||||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
|
||||||
if (std::regex_search(*ref, badGitRefRegex))
|
|
||||||
throw BadURL("invalid Git branch/tag name '%s'", *ref);
|
|
||||||
input->ref = *ref;
|
|
||||||
}
|
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
|
||||||
input->rev = Hash(*rev, htSHA1);
|
|
||||||
|
|
||||||
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
|
|
||||||
|
|
||||||
input->submodules = maybeGetBoolAttr(attrs, "submodules").value_or(false);
|
|
||||||
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,81 +8,142 @@
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
// A github or gitlab url
|
||||||
std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
||||||
|
std::regex urlRegex(urlRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
struct GitHubInput : Input
|
struct GitArchiveInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
std::string owner;
|
virtual std::string type() = 0;
|
||||||
std::string repo;
|
|
||||||
std::optional<std::string> ref;
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != type()) return {};
|
||||||
|
|
||||||
|
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||||
|
|
||||||
std::optional<Hash> rev;
|
std::optional<Hash> rev;
|
||||||
|
std::optional<std::string> ref;
|
||||||
|
std::optional<std::string> host_url;
|
||||||
|
|
||||||
std::string type() const override { return "github"; }
|
if (path.size() == 2) {
|
||||||
|
} else if (path.size() == 3) {
|
||||||
|
if (std::regex_match(path[2], revRegex))
|
||||||
|
rev = Hash(path[2], htSHA1);
|
||||||
|
else if (std::regex_match(path[2], refRegex))
|
||||||
|
ref = path[2];
|
||||||
|
else
|
||||||
|
throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
|
||||||
|
} else
|
||||||
|
throw BadURL("URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
bool operator ==(const Input & other) const override
|
for (auto &[name, value] : url.query) {
|
||||||
{
|
if (name == "rev") {
|
||||||
auto other2 = dynamic_cast<const GitHubInput *>(&other);
|
if (rev)
|
||||||
return
|
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
|
||||||
other2
|
rev = Hash(value, htSHA1);
|
||||||
&& owner == other2->owner
|
}
|
||||||
&& repo == other2->repo
|
else if (name == "ref") {
|
||||||
&& rev == other2->rev
|
if (!std::regex_match(value, refRegex))
|
||||||
&& ref == other2->ref;
|
throw BadURL("URL '%s' contains an invalid branch/tag name", url.url);
|
||||||
|
if (ref)
|
||||||
|
throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
|
||||||
|
ref = value;
|
||||||
|
}
|
||||||
|
else if (name == "url") {
|
||||||
|
if (!std::regex_match(value, urlRegex))
|
||||||
|
throw BadURL("URL '%s' contains an invalid instance url", url.url);
|
||||||
|
host_url = value;
|
||||||
|
}
|
||||||
|
// FIXME: barf on unsupported attributes
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isImmutable() const override
|
if (ref && rev)
|
||||||
{
|
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
|
||||||
return (bool) rev || narHash;
|
|
||||||
|
Input input;
|
||||||
|
input.attrs.insert_or_assign("type", type());
|
||||||
|
input.attrs.insert_or_assign("owner", path[0]);
|
||||||
|
input.attrs.insert_or_assign("repo", path[1]);
|
||||||
|
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||||
|
if (host_url) input.attrs.insert_or_assign("url", *host_url);
|
||||||
|
|
||||||
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<std::string> getRef() const override { return ref; }
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
|
|
||||||
std::optional<Hash> getRev() const override { return rev; }
|
|
||||||
|
|
||||||
ParsedURL toURL() const override
|
|
||||||
{
|
{
|
||||||
|
if (maybeGetStrAttr(attrs, "type") != type()) return {};
|
||||||
|
|
||||||
|
for (auto & [name, value] : attrs)
|
||||||
|
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified")
|
||||||
|
throw Error("unsupported input attribute '%s'", name);
|
||||||
|
|
||||||
|
getStrAttr(attrs, "owner");
|
||||||
|
getStrAttr(attrs, "repo");
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL toURL(const Input & input) override
|
||||||
|
{
|
||||||
|
auto owner = getStrAttr(input.attrs, "owner");
|
||||||
|
auto repo = getStrAttr(input.attrs, "repo");
|
||||||
|
auto ref = input.getRef();
|
||||||
|
auto rev = input.getRev();
|
||||||
auto path = owner + "/" + repo;
|
auto path = owner + "/" + repo;
|
||||||
assert(!(ref && rev));
|
assert(!(ref && rev));
|
||||||
if (ref) path += "/" + *ref;
|
if (ref) path += "/" + *ref;
|
||||||
if (rev) path += "/" + rev->to_string(Base16, false);
|
if (rev) path += "/" + rev->to_string(Base16, false);
|
||||||
return ParsedURL {
|
return ParsedURL {
|
||||||
.scheme = "github",
|
.scheme = type(),
|
||||||
.path = path,
|
.path = path,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs toAttrsInternal() const override
|
bool hasAllInfo(const Input & input) override
|
||||||
{
|
{
|
||||||
Attrs attrs;
|
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
|
||||||
attrs.emplace("owner", owner);
|
|
||||||
attrs.emplace("repo", repo);
|
|
||||||
if (ref)
|
|
||||||
attrs.emplace("ref", *ref);
|
|
||||||
if (rev)
|
|
||||||
attrs.emplace("rev", rev->gitRev());
|
|
||||||
return attrs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
Input applyOverrides(
|
||||||
|
const Input & _input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) override
|
||||||
{
|
{
|
||||||
auto rev = this->rev;
|
auto input(_input);
|
||||||
auto ref = this->ref.value_or("master");
|
if (rev && ref)
|
||||||
|
throw BadURL("cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'",
|
||||||
if (!rev) {
|
rev->gitRev(), *ref, input.to_string());
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
|
if (rev) {
|
||||||
owner, repo, ref);
|
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
auto json = nlohmann::json::parse(
|
input.attrs.erase("ref");
|
||||||
readFile(
|
}
|
||||||
store->toRealPath(
|
if (ref) {
|
||||||
downloadFile(store, url, "source", false).storePath)));
|
input.attrs.insert_or_assign("ref", *ref);
|
||||||
rev = Hash(std::string { json["sha"] }, htSHA1);
|
input.attrs.erase("rev");
|
||||||
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
}
|
||||||
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto input = std::make_shared<GitHubInput>(*this);
|
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
|
||||||
input->ref = {};
|
|
||||||
input->rev = *rev;
|
virtual std::string getDownloadUrl(const Input & input) const = 0;
|
||||||
|
|
||||||
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
|
{
|
||||||
|
Input input(_input);
|
||||||
|
|
||||||
|
if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
|
||||||
|
|
||||||
|
auto rev = input.getRev();
|
||||||
|
if (!rev) rev = getRevFromRef(store, input);
|
||||||
|
|
||||||
|
input.attrs.erase("ref");
|
||||||
|
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
|
||||||
Attrs immutableAttrs({
|
Attrs immutableAttrs({
|
||||||
{"type", "git-tarball"},
|
{"type", "git-tarball"},
|
||||||
|
@ -90,36 +151,25 @@ struct GitHubInput : Input
|
||||||
});
|
});
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, immutableAttrs)) {
|
if (auto res = getCache()->lookup(store, immutableAttrs)) {
|
||||||
|
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||||
return {
|
return {
|
||||||
Tree{
|
Tree(store->toRealPath(res->second), std::move(res->second)),
|
||||||
.actualPath = store->toRealPath(res->second),
|
|
||||||
.storePath = std::move(res->second),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = getIntAttr(res->first, "lastModified"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input
|
input
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: use regular /archive URLs instead? api.github.com
|
auto url = getDownloadUrl(input);
|
||||||
// might have stricter rate limits.
|
|
||||||
|
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
auto [tree, lastModified] = downloadTarball(store, url, "source", true);
|
||||||
owner, repo, rev->to_string(Base16, false));
|
|
||||||
|
|
||||||
std::string accessToken = settings.githubAccessToken.get();
|
input.attrs.insert_or_assign("lastModified", lastModified);
|
||||||
if (accessToken != "")
|
|
||||||
url += "?access_token=" + accessToken;
|
|
||||||
|
|
||||||
auto tree = downloadTarball(store, url, "source", true);
|
|
||||||
|
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
store,
|
store,
|
||||||
immutableAttrs,
|
immutableAttrs,
|
||||||
{
|
{
|
||||||
{"rev", rev->gitRev()},
|
{"rev", rev->gitRev()},
|
||||||
{"lastModified", *tree.info.lastModified}
|
{"lastModified", lastModified}
|
||||||
},
|
},
|
||||||
tree.storePath,
|
tree.storePath,
|
||||||
true);
|
true);
|
||||||
|
@ -128,68 +178,96 @@ struct GitHubInput : Input
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct GitHubInputScheme : InputScheme
|
struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
std::string type() override { return "github"; }
|
||||||
|
|
||||||
|
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||||
{
|
{
|
||||||
if (url.scheme != "github") return nullptr;
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
|
||||||
|
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
|
||||||
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||||
auto input = std::make_unique<GitHubInput>();
|
auto json = nlohmann::json::parse(
|
||||||
|
readFile(
|
||||||
if (path.size() == 2) {
|
store->toRealPath(
|
||||||
} else if (path.size() == 3) {
|
downloadFile(store, url, "source", false).storePath)));
|
||||||
if (std::regex_match(path[2], revRegex))
|
auto rev = Hash(std::string { json["sha"] }, htSHA1);
|
||||||
input->rev = Hash(path[2], htSHA1);
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
else if (std::regex_match(path[2], refRegex))
|
return rev;
|
||||||
input->ref = path[2];
|
|
||||||
else
|
|
||||||
throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
|
|
||||||
} else
|
|
||||||
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
|
||||||
|
|
||||||
for (auto &[name, value] : url.query) {
|
|
||||||
if (name == "rev") {
|
|
||||||
if (input->rev)
|
|
||||||
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
|
|
||||||
input->rev = Hash(value, htSHA1);
|
|
||||||
}
|
|
||||||
else if (name == "ref") {
|
|
||||||
if (!std::regex_match(value, refRegex))
|
|
||||||
throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
|
|
||||||
if (input->ref)
|
|
||||||
throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
|
|
||||||
input->ref = value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (input->ref && input->rev)
|
std::string getDownloadUrl(const Input & input) const override
|
||||||
throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
|
|
||||||
|
|
||||||
input->owner = path[0];
|
|
||||||
input->repo = path[1];
|
|
||||||
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "github") return {};
|
// FIXME: use regular /archive URLs instead? api.github.com
|
||||||
|
// might have stricter rate limits.
|
||||||
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
|
||||||
|
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
|
||||||
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||||
|
input.getRev()->to_string(Base16, false));
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev")
|
if (accessToken != "")
|
||||||
throw Error("unsupported GitHub input attribute '%s'", name);
|
url += "?access_token=" + accessToken;
|
||||||
|
|
||||||
auto input = std::make_unique<GitHubInput>();
|
return url;
|
||||||
input->owner = getStrAttr(attrs, "owner");
|
}
|
||||||
input->repo = getStrAttr(attrs, "repo");
|
|
||||||
input->ref = maybeGetStrAttr(attrs, "ref");
|
void clone(const Input & input, const Path & destDir) override
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
{
|
||||||
input->rev = Hash(*rev, htSHA1);
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
|
||||||
return input;
|
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
|
||||||
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||||
|
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
|
||||||
|
.clone(destDir);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
|
{
|
||||||
|
std::string type() override { return "gitlab"; }
|
||||||
|
|
||||||
|
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||||
|
{
|
||||||
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
|
||||||
|
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
|
||||||
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||||
|
auto json = nlohmann::json::parse(
|
||||||
|
readFile(
|
||||||
|
store->toRealPath(
|
||||||
|
downloadFile(store, url, "source", false).storePath)));
|
||||||
|
auto rev = Hash(std::string(json[0]["id"]), htSHA1);
|
||||||
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
|
return rev;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string getDownloadUrl(const Input & input) const override
|
||||||
|
{
|
||||||
|
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute
|
||||||
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
|
||||||
|
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
|
||||||
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||||
|
input.getRev()->to_string(Base16, false));
|
||||||
|
|
||||||
|
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`)
|
||||||
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
|
if (accessToken != "")
|
||||||
|
url += "?access_token=" + accessToken;*/
|
||||||
|
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
void clone(const Input & input, const Path & destDir) override
|
||||||
|
{
|
||||||
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
|
||||||
|
// FIXME: get username somewhere
|
||||||
|
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
|
||||||
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||||
|
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
|
||||||
|
.clone(destDir);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
|
||||||
|
static auto r2 = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
104
src/libfetchers/indirect.cc
Normal file
104
src/libfetchers/indirect.cc
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
#include "fetchers.hh"
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
||||||
|
|
||||||
|
struct IndirectInputScheme : InputScheme
|
||||||
|
{
|
||||||
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != "flake") return {};
|
||||||
|
|
||||||
|
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||||
|
|
||||||
|
std::optional<Hash> rev;
|
||||||
|
std::optional<std::string> ref;
|
||||||
|
|
||||||
|
if (path.size() == 1) {
|
||||||
|
} else if (path.size() == 2) {
|
||||||
|
if (std::regex_match(path[1], revRegex))
|
||||||
|
rev = Hash(path[1], htSHA1);
|
||||||
|
else if (std::regex_match(path[1], refRegex))
|
||||||
|
ref = path[1];
|
||||||
|
else
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
|
||||||
|
} else if (path.size() == 3) {
|
||||||
|
if (!std::regex_match(path[1], refRegex))
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]);
|
||||||
|
ref = path[1];
|
||||||
|
if (!std::regex_match(path[2], revRegex))
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
||||||
|
rev = Hash(path[2], htSHA1);
|
||||||
|
} else
|
||||||
|
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
|
std::string id = path[0];
|
||||||
|
if (!std::regex_match(id, flakeRegex))
|
||||||
|
throw BadURL("'%s' is not a valid flake ID", id);
|
||||||
|
|
||||||
|
// FIXME: forbid query params?
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.direct = false;
|
||||||
|
input.attrs.insert_or_assign("type", "indirect");
|
||||||
|
input.attrs.insert_or_assign("id", id);
|
||||||
|
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
|
{
|
||||||
|
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
|
||||||
|
|
||||||
|
for (auto & [name, value] : attrs)
|
||||||
|
if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash")
|
||||||
|
throw Error("unsupported indirect input attribute '%s'", name);
|
||||||
|
|
||||||
|
auto id = getStrAttr(attrs, "id");
|
||||||
|
if (!std::regex_match(id, flakeRegex))
|
||||||
|
throw BadURL("'%s' is not a valid flake ID", id);
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.direct = false;
|
||||||
|
input.attrs = attrs;
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL toURL(const Input & input) override
|
||||||
|
{
|
||||||
|
ParsedURL url;
|
||||||
|
url.scheme = "flake";
|
||||||
|
url.path = getStrAttr(input.attrs, "id");
|
||||||
|
if (auto ref = input.getRef()) { url.path += '/'; url.path += *ref; };
|
||||||
|
if (auto rev = input.getRev()) { url.path += '/'; url.path += rev->gitRev(); };
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool hasAllInfo(const Input & input) override
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
Input applyOverrides(
|
||||||
|
const Input & _input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) override
|
||||||
|
{
|
||||||
|
auto input(_input);
|
||||||
|
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||||
|
{
|
||||||
|
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
||||||
|
|
||||||
|
}
|
|
@ -10,76 +10,124 @@ using namespace std::string_literals;
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
struct MercurialInput : Input
|
struct MercurialInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
ParsedURL url;
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
std::optional<std::string> ref;
|
|
||||||
std::optional<Hash> rev;
|
|
||||||
|
|
||||||
MercurialInput(const ParsedURL & url) : url(url)
|
|
||||||
{ }
|
|
||||||
|
|
||||||
std::string type() const override { return "hg"; }
|
|
||||||
|
|
||||||
bool operator ==(const Input & other) const override
|
|
||||||
{
|
{
|
||||||
auto other2 = dynamic_cast<const MercurialInput *>(&other);
|
if (url.scheme != "hg+http" &&
|
||||||
return
|
url.scheme != "hg+https" &&
|
||||||
other2
|
url.scheme != "hg+ssh" &&
|
||||||
&& url == other2->url
|
url.scheme != "hg+file") return {};
|
||||||
&& rev == other2->rev
|
|
||||||
&& ref == other2->ref;
|
auto url2(url);
|
||||||
|
url2.scheme = std::string(url2.scheme, 3);
|
||||||
|
url2.query.clear();
|
||||||
|
|
||||||
|
Attrs attrs;
|
||||||
|
attrs.emplace("type", "hg");
|
||||||
|
|
||||||
|
for (auto &[name, value] : url.query) {
|
||||||
|
if (name == "rev" || name == "ref")
|
||||||
|
attrs.emplace(name, value);
|
||||||
|
else
|
||||||
|
url2.query.emplace(name, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isImmutable() const override
|
attrs.emplace("url", url2.to_string());
|
||||||
{
|
|
||||||
return (bool) rev || narHash;
|
return inputFromAttrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<std::string> getRef() const override { return ref; }
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
|
|
||||||
std::optional<Hash> getRev() const override { return rev; }
|
|
||||||
|
|
||||||
ParsedURL toURL() const override
|
|
||||||
{
|
{
|
||||||
ParsedURL url2(url);
|
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
|
||||||
url2.scheme = "hg+" + url2.scheme;
|
|
||||||
if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
|
for (auto & [name, value] : attrs)
|
||||||
if (ref) url2.query.insert_or_assign("ref", *ref);
|
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash")
|
||||||
|
throw Error("unsupported Mercurial input attribute '%s'", name);
|
||||||
|
|
||||||
|
parseURL(getStrAttr(attrs, "url"));
|
||||||
|
|
||||||
|
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
||||||
|
if (!std::regex_match(*ref, refRegex))
|
||||||
|
throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
ParsedURL toURL(const Input & input) override
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
|
url.scheme = "hg+" + url.scheme;
|
||||||
|
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs toAttrsInternal() const override
|
bool hasAllInfo(const Input & input) override
|
||||||
{
|
{
|
||||||
Attrs attrs;
|
// FIXME: ugly, need to distinguish between dirty and clean
|
||||||
attrs.emplace("url", url.to_string());
|
// default trees.
|
||||||
if (ref)
|
return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount");
|
||||||
attrs.emplace("ref", *ref);
|
|
||||||
if (rev)
|
|
||||||
attrs.emplace("rev", rev->gitRev());
|
|
||||||
return attrs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<bool, std::string> getActualUrl() const
|
Input applyOverrides(
|
||||||
|
const Input & input,
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) override
|
||||||
{
|
{
|
||||||
|
auto res(input);
|
||||||
|
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) res.attrs.insert_or_assign("ref", *ref);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> getSourcePath(const Input & input) override
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
|
if (url.scheme == "file" && !input.getRef() && !input.getRev())
|
||||||
|
return url.path;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||||
|
{
|
||||||
|
auto sourcePath = getSourcePath(input);
|
||||||
|
assert(sourcePath);
|
||||||
|
|
||||||
|
// FIXME: shut up if file is already tracked.
|
||||||
|
runProgram("hg", true,
|
||||||
|
{ "add", *sourcePath + "/" + std::string(file) });
|
||||||
|
|
||||||
|
if (commitMsg)
|
||||||
|
runProgram("hg", true,
|
||||||
|
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
bool isLocal = url.scheme == "file";
|
bool isLocal = url.scheme == "file";
|
||||||
return {isLocal, isLocal ? url.path : url.base};
|
return {isLocal, isLocal ? url.path : url.base};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
{
|
{
|
||||||
auto name = "source";
|
auto name = "source";
|
||||||
|
|
||||||
auto input = std::make_shared<MercurialInput>(*this);
|
Input input(_input);
|
||||||
|
|
||||||
auto [isLocal, actualUrl_] = getActualUrl();
|
auto [isLocal, actualUrl_] = getActualUrl(input);
|
||||||
auto actualUrl = actualUrl_; // work around clang bug
|
auto actualUrl = actualUrl_; // work around clang bug
|
||||||
|
|
||||||
// FIXME: return lastModified.
|
// FIXME: return lastModified.
|
||||||
|
|
||||||
// FIXME: don't clone local repositories.
|
// FIXME: don't clone local repositories.
|
||||||
|
|
||||||
if (!input->ref && !input->rev && isLocal && pathExists(actualUrl + "/.hg")) {
|
if (!input.getRef() && !input.getRev() && isLocal && pathExists(actualUrl + "/.hg")) {
|
||||||
|
|
||||||
bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
|
bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
|
||||||
|
|
||||||
|
@ -94,7 +142,7 @@ struct MercurialInput : Input
|
||||||
if (settings.warnDirty)
|
if (settings.warnDirty)
|
||||||
warn("Mercurial tree '%s' is unclean", actualUrl);
|
warn("Mercurial tree '%s' is unclean", actualUrl);
|
||||||
|
|
||||||
input->ref = chomp(runProgram("hg", true, { "branch", "-R", actualUrl }));
|
input.attrs.insert_or_assign("ref", chomp(runProgram("hg", true, { "branch", "-R", actualUrl })));
|
||||||
|
|
||||||
auto files = tokenizeString<std::set<std::string>>(
|
auto files = tokenizeString<std::set<std::string>>(
|
||||||
runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
||||||
|
@ -116,60 +164,54 @@ struct MercurialInput : Input
|
||||||
|
|
||||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||||
|
|
||||||
return {Tree {
|
return {
|
||||||
.actualPath = store->printStorePath(storePath),
|
Tree(store->printStorePath(storePath), std::move(storePath)),
|
||||||
.storePath = std::move(storePath),
|
input
|
||||||
}, input};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!input->ref) input->ref = "default";
|
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||||
|
|
||||||
auto getImmutableAttrs = [&]()
|
auto getImmutableAttrs = [&]()
|
||||||
{
|
{
|
||||||
return Attrs({
|
return Attrs({
|
||||||
{"type", "hg"},
|
{"type", "hg"},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"rev", input->rev->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||||
-> std::pair<Tree, std::shared_ptr<const Input>>
|
-> std::pair<Tree, Input>
|
||||||
{
|
{
|
||||||
assert(input->rev);
|
assert(input.getRev());
|
||||||
assert(!rev || rev == input->rev);
|
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||||
|
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||||
return {
|
return {
|
||||||
Tree{
|
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||||
.actualPath = store->toRealPath(storePath),
|
|
||||||
.storePath = std::move(storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.revCount = getIntAttr(infoAttrs, "revCount"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input
|
input
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
if (input->rev) {
|
if (input.getRev()) {
|
||||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(input->rev || input->ref);
|
auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef();
|
||||||
auto revOrRef = input->rev ? input->rev->gitRev() : *input->ref;
|
|
||||||
|
|
||||||
Attrs mutableAttrs({
|
Attrs mutableAttrs({
|
||||||
{"type", "hg"},
|
{"type", "hg"},
|
||||||
{"name", name},
|
{"name", name},
|
||||||
{"url", actualUrl},
|
{"url", actualUrl},
|
||||||
{"ref", *input->ref},
|
{"ref", *input.getRef()},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||||
if (!rev || rev == rev2) {
|
if (!input.getRev() || input.getRev() == rev2) {
|
||||||
input->rev = rev2;
|
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,10 +220,10 @@ struct MercurialInput : Input
|
||||||
|
|
||||||
/* If this is a commit hash that we already have, we don't
|
/* If this is a commit hash that we already have, we don't
|
||||||
have to pull again. */
|
have to pull again. */
|
||||||
if (!(input->rev
|
if (!(input.getRev()
|
||||||
&& pathExists(cacheDir)
|
&& pathExists(cacheDir)
|
||||||
&& runProgram(
|
&& runProgram(
|
||||||
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
|
RunOptions("hg", { "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
|
||||||
.killStderr(true)).second == "1"))
|
.killStderr(true)).second == "1"))
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
||||||
|
@ -210,9 +252,9 @@ struct MercurialInput : Input
|
||||||
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||||
assert(tokens.size() == 3);
|
assert(tokens.size() == 3);
|
||||||
|
|
||||||
input->rev = Hash(tokens[0], htSHA1);
|
input.attrs.insert_or_assign("rev", Hash(tokens[0], htSHA1).gitRev());
|
||||||
auto revCount = std::stoull(tokens[1]);
|
auto revCount = std::stoull(tokens[1]);
|
||||||
input->ref = tokens[2];
|
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||||
return makeResult(res->first, std::move(res->second));
|
return makeResult(res->first, std::move(res->second));
|
||||||
|
@ -220,18 +262,18 @@ struct MercurialInput : Input
|
||||||
Path tmpDir = createTempDir();
|
Path tmpDir = createTempDir();
|
||||||
AutoDelete delTmpDir(tmpDir, true);
|
AutoDelete delTmpDir(tmpDir, true);
|
||||||
|
|
||||||
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input->rev->gitRev(), tmpDir });
|
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input.getRev()->gitRev(), tmpDir });
|
||||||
|
|
||||||
deletePath(tmpDir + "/.hg_archival.txt");
|
deletePath(tmpDir + "/.hg_archival.txt");
|
||||||
|
|
||||||
auto storePath = store->addToStore(name, tmpDir);
|
auto storePath = store->addToStore(name, tmpDir);
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input->rev->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
{"revCount", (int64_t) revCount},
|
{"revCount", (int64_t) revCount},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!this->rev)
|
if (!_input.getRev())
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
store,
|
store,
|
||||||
mutableAttrs,
|
mutableAttrs,
|
||||||
|
@ -250,54 +292,6 @@ struct MercurialInput : Input
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct MercurialInputScheme : InputScheme
|
|
||||||
{
|
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
|
||||||
{
|
|
||||||
if (url.scheme != "hg+http" &&
|
|
||||||
url.scheme != "hg+https" &&
|
|
||||||
url.scheme != "hg+ssh" &&
|
|
||||||
url.scheme != "hg+file") return nullptr;
|
|
||||||
|
|
||||||
auto url2(url);
|
|
||||||
url2.scheme = std::string(url2.scheme, 3);
|
|
||||||
url2.query.clear();
|
|
||||||
|
|
||||||
Attrs attrs;
|
|
||||||
attrs.emplace("type", "hg");
|
|
||||||
|
|
||||||
for (auto &[name, value] : url.query) {
|
|
||||||
if (name == "rev" || name == "ref")
|
|
||||||
attrs.emplace(name, value);
|
|
||||||
else
|
|
||||||
url2.query.emplace(name, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs.emplace("url", url2.to_string());
|
|
||||||
|
|
||||||
return inputFromAttrs(attrs);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
|
||||||
{
|
|
||||||
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name != "type" && name != "url" && name != "ref" && name != "rev")
|
|
||||||
throw Error("unsupported Mercurial input attribute '%s'", name);
|
|
||||||
|
|
||||||
auto input = std::make_unique<MercurialInput>(parseURL(getStrAttr(attrs, "url")));
|
|
||||||
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
|
|
||||||
if (!std::regex_match(*ref, refRegex))
|
|
||||||
throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
|
|
||||||
input->ref = *ref;
|
|
||||||
}
|
|
||||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
|
||||||
input->rev = Hash(*rev, htSHA1);
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,65 +3,86 @@
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
struct PathInput : Input
|
struct PathInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
Path path;
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != "path") return {};
|
||||||
|
|
||||||
/* Allow the user to pass in "fake" tree info attributes. This is
|
if (url.authority && *url.authority != "")
|
||||||
useful for making a pinned tree work the same as the repository
|
throw Error("path URL '%s' should not have an authority ('%s')", url.url, *url.authority);
|
||||||
from which is exported
|
|
||||||
|
Input input;
|
||||||
|
input.attrs.insert_or_assign("type", "path");
|
||||||
|
input.attrs.insert_or_assign("path", url.path);
|
||||||
|
|
||||||
|
for (auto & [name, value] : url.query)
|
||||||
|
if (name == "rev" || name == "narHash")
|
||||||
|
input.attrs.insert_or_assign(name, value);
|
||||||
|
else if (name == "revCount" || name == "lastModified") {
|
||||||
|
uint64_t n;
|
||||||
|
if (!string2Int(value, n))
|
||||||
|
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
||||||
|
input.attrs.insert_or_assign(name, n);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
|
{
|
||||||
|
if (maybeGetStrAttr(attrs, "type") != "path") return {};
|
||||||
|
|
||||||
|
getStrAttr(attrs, "path");
|
||||||
|
|
||||||
|
for (auto & [name, value] : attrs)
|
||||||
|
/* Allow the user to pass in "fake" tree info
|
||||||
|
attributes. This is useful for making a pinned tree
|
||||||
|
work the same as the repository from which is exported
|
||||||
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
|
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
|
||||||
std::optional<Hash> rev;
|
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
|
||||||
std::optional<uint64_t> revCount;
|
// checked in Input::fromAttrs
|
||||||
std::optional<time_t> lastModified;
|
;
|
||||||
|
else
|
||||||
|
throw Error("unsupported path input attribute '%s'", name);
|
||||||
|
|
||||||
std::string type() const override { return "path"; }
|
Input input;
|
||||||
|
input.attrs = attrs;
|
||||||
std::optional<Hash> getRev() const override { return rev; }
|
return input;
|
||||||
|
|
||||||
bool operator ==(const Input & other) const override
|
|
||||||
{
|
|
||||||
auto other2 = dynamic_cast<const PathInput *>(&other);
|
|
||||||
return
|
|
||||||
other2
|
|
||||||
&& path == other2->path
|
|
||||||
&& rev == other2->rev
|
|
||||||
&& revCount == other2->revCount
|
|
||||||
&& lastModified == other2->lastModified;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isImmutable() const override
|
ParsedURL toURL(const Input & input) override
|
||||||
{
|
{
|
||||||
return (bool) narHash;
|
auto query = attrsToQuery(input.attrs);
|
||||||
}
|
|
||||||
|
|
||||||
ParsedURL toURL() const override
|
|
||||||
{
|
|
||||||
auto query = attrsToQuery(toAttrsInternal());
|
|
||||||
query.erase("path");
|
query.erase("path");
|
||||||
|
query.erase("type");
|
||||||
return ParsedURL {
|
return ParsedURL {
|
||||||
.scheme = "path",
|
.scheme = "path",
|
||||||
.path = path,
|
.path = getStrAttr(input.attrs, "path"),
|
||||||
.query = query,
|
.query = query,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs toAttrsInternal() const override
|
bool hasAllInfo(const Input & input) override
|
||||||
{
|
{
|
||||||
Attrs attrs;
|
return true;
|
||||||
attrs.emplace("path", path);
|
|
||||||
if (rev)
|
|
||||||
attrs.emplace("rev", rev->gitRev());
|
|
||||||
if (revCount)
|
|
||||||
attrs.emplace("revCount", *revCount);
|
|
||||||
if (lastModified)
|
|
||||||
attrs.emplace("lastModified", *lastModified);
|
|
||||||
return attrs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
std::optional<Path> getSourcePath(const Input & input) override
|
||||||
{
|
{
|
||||||
auto input = std::make_shared<PathInput>(*this);
|
return getStrAttr(input.attrs, "path");
|
||||||
|
}
|
||||||
|
|
||||||
|
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
|
||||||
|
{
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||||
|
{
|
||||||
|
auto path = getStrAttr(input.attrs, "path");
|
||||||
|
|
||||||
// FIXME: check whether access to 'path' is allowed.
|
// FIXME: check whether access to 'path' is allowed.
|
||||||
|
|
||||||
|
@ -74,73 +95,11 @@ struct PathInput : Input
|
||||||
// FIXME: try to substitute storePath.
|
// FIXME: try to substitute storePath.
|
||||||
storePath = store->addToStore("source", path);
|
storePath = store->addToStore("source", path);
|
||||||
|
|
||||||
return
|
return {
|
||||||
{
|
Tree(store->toRealPath(*storePath), std::move(*storePath)),
|
||||||
Tree {
|
|
||||||
.actualPath = store->toRealPath(*storePath),
|
|
||||||
.storePath = std::move(*storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.revCount = revCount,
|
|
||||||
.lastModified = lastModified
|
|
||||||
}
|
|
||||||
},
|
|
||||||
input
|
input
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
struct PathInputScheme : InputScheme
|
|
||||||
{
|
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
|
||||||
{
|
|
||||||
if (url.scheme != "path") return nullptr;
|
|
||||||
|
|
||||||
auto input = std::make_unique<PathInput>();
|
|
||||||
input->path = url.path;
|
|
||||||
|
|
||||||
for (auto & [name, value] : url.query)
|
|
||||||
if (name == "rev")
|
|
||||||
input->rev = Hash(value, htSHA1);
|
|
||||||
else if (name == "revCount") {
|
|
||||||
uint64_t revCount;
|
|
||||||
if (!string2Int(value, revCount))
|
|
||||||
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
|
||||||
input->revCount = revCount;
|
|
||||||
}
|
|
||||||
else if (name == "lastModified") {
|
|
||||||
time_t lastModified;
|
|
||||||
if (!string2Int(value, lastModified))
|
|
||||||
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
|
||||||
input->lastModified = lastModified;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
|
|
||||||
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
|
||||||
{
|
|
||||||
if (maybeGetStrAttr(attrs, "type") != "path") return {};
|
|
||||||
|
|
||||||
auto input = std::make_unique<PathInput>();
|
|
||||||
input->path = getStrAttr(attrs, "path");
|
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
|
||||||
if (name == "rev")
|
|
||||||
input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
|
|
||||||
else if (name == "revCount")
|
|
||||||
input->revCount = getIntAttr(attrs, "revCount");
|
|
||||||
else if (name == "lastModified")
|
|
||||||
input->lastModified = getIntAttr(attrs, "lastModified");
|
|
||||||
else if (name == "type" || name == "path")
|
|
||||||
;
|
|
||||||
else
|
|
||||||
throw Error("unsupported path input attribute '%s'", name);
|
|
||||||
|
|
||||||
return input;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
|
||||||
|
|
212
src/libfetchers/registry.cc
Normal file
212
src/libfetchers/registry.cc
Normal file
|
@ -0,0 +1,212 @@
|
||||||
|
#include "registry.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "globals.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> Registry::read(
|
||||||
|
const Path & path, RegistryType type)
|
||||||
|
{
|
||||||
|
auto registry = std::make_shared<Registry>(type);
|
||||||
|
|
||||||
|
if (!pathExists(path))
|
||||||
|
return std::make_shared<Registry>(type);
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
auto json = nlohmann::json::parse(readFile(path));
|
||||||
|
|
||||||
|
auto version = json.value("version", 0);
|
||||||
|
|
||||||
|
if (version == 2) {
|
||||||
|
for (auto & i : json["flakes"]) {
|
||||||
|
auto toAttrs = jsonToAttrs(i["to"]);
|
||||||
|
Attrs extraAttrs;
|
||||||
|
auto j = toAttrs.find("dir");
|
||||||
|
if (j != toAttrs.end()) {
|
||||||
|
extraAttrs.insert(*j);
|
||||||
|
toAttrs.erase(j);
|
||||||
|
}
|
||||||
|
auto exact = i.find("exact");
|
||||||
|
registry->entries.push_back(
|
||||||
|
Entry {
|
||||||
|
.from = Input::fromAttrs(jsonToAttrs(i["from"])),
|
||||||
|
.to = Input::fromAttrs(std::move(toAttrs)),
|
||||||
|
.extraAttrs = extraAttrs,
|
||||||
|
.exact = exact != i.end() && exact.value()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
throw Error("flake registry '%s' has unsupported version %d", path, version);
|
||||||
|
|
||||||
|
} catch (nlohmann::json::exception & e) {
|
||||||
|
warn("cannot parse flake registry '%s': %s", path, e.what());
|
||||||
|
} catch (Error & e) {
|
||||||
|
warn("cannot read flake registry '%s': %s", path, e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::write(const Path & path)
|
||||||
|
{
|
||||||
|
nlohmann::json arr;
|
||||||
|
for (auto & entry : entries) {
|
||||||
|
nlohmann::json obj;
|
||||||
|
obj["from"] = attrsToJson(entry.from.toAttrs());
|
||||||
|
obj["to"] = attrsToJson(entry.to.toAttrs());
|
||||||
|
if (!entry.extraAttrs.empty())
|
||||||
|
obj["to"].update(attrsToJson(entry.extraAttrs));
|
||||||
|
if (entry.exact)
|
||||||
|
obj["exact"] = true;
|
||||||
|
arr.emplace_back(std::move(obj));
|
||||||
|
}
|
||||||
|
|
||||||
|
nlohmann::json json;
|
||||||
|
json["version"] = 2;
|
||||||
|
json["flakes"] = std::move(arr);
|
||||||
|
|
||||||
|
createDirs(dirOf(path));
|
||||||
|
writeFile(path, json.dump(2));
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::add(
|
||||||
|
const Input & from,
|
||||||
|
const Input & to,
|
||||||
|
const Attrs & extraAttrs)
|
||||||
|
{
|
||||||
|
entries.emplace_back(
|
||||||
|
Entry {
|
||||||
|
.from = from,
|
||||||
|
.to = to,
|
||||||
|
.extraAttrs = extraAttrs
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::remove(const Input & input)
|
||||||
|
{
|
||||||
|
// FIXME: use C++20 std::erase.
|
||||||
|
for (auto i = entries.begin(); i != entries.end(); )
|
||||||
|
if (i->from == input)
|
||||||
|
i = entries.erase(i);
|
||||||
|
else
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
|
||||||
|
static Path getSystemRegistryPath()
|
||||||
|
{
|
||||||
|
return settings.nixConfDir + "/registry.json";
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> getSystemRegistry()
|
||||||
|
{
|
||||||
|
static auto systemRegistry =
|
||||||
|
Registry::read(getSystemRegistryPath(), Registry::System);
|
||||||
|
return systemRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
Path getUserRegistryPath()
|
||||||
|
{
|
||||||
|
return getHome() + "/.config/nix/registry.json";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> getUserRegistry()
|
||||||
|
{
|
||||||
|
static auto userRegistry =
|
||||||
|
Registry::read(getUserRegistryPath(), Registry::User);
|
||||||
|
return userRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> flagRegistry =
|
||||||
|
std::make_shared<Registry>(Registry::Flag);
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> getFlagRegistry()
|
||||||
|
{
|
||||||
|
return flagRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
void overrideRegistry(
|
||||||
|
const Input & from,
|
||||||
|
const Input & to,
|
||||||
|
const Attrs & extraAttrs)
|
||||||
|
{
|
||||||
|
flagRegistry->add(from, to, extraAttrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
|
||||||
|
{
|
||||||
|
static auto reg = [&]() {
|
||||||
|
auto path = settings.flakeRegistry.get();
|
||||||
|
|
||||||
|
if (!hasPrefix(path, "/")) {
|
||||||
|
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
|
||||||
|
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
|
||||||
|
store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json", true);
|
||||||
|
path = store->toRealPath(storePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Registry::read(path, Registry::Global);
|
||||||
|
}();
|
||||||
|
|
||||||
|
return reg;
|
||||||
|
}
|
||||||
|
|
||||||
|
Registries getRegistries(ref<Store> store)
|
||||||
|
{
|
||||||
|
Registries registries;
|
||||||
|
registries.push_back(getFlagRegistry());
|
||||||
|
registries.push_back(getUserRegistry());
|
||||||
|
registries.push_back(getSystemRegistry());
|
||||||
|
registries.push_back(getGlobalRegistry(store));
|
||||||
|
return registries;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Input, Attrs> lookupInRegistries(
|
||||||
|
ref<Store> store,
|
||||||
|
const Input & _input)
|
||||||
|
{
|
||||||
|
Attrs extraAttrs;
|
||||||
|
int n = 0;
|
||||||
|
Input input(_input);
|
||||||
|
|
||||||
|
restart:
|
||||||
|
|
||||||
|
n++;
|
||||||
|
if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string());
|
||||||
|
|
||||||
|
for (auto & registry : getRegistries(store)) {
|
||||||
|
// FIXME: O(n)
|
||||||
|
for (auto & entry : registry->entries) {
|
||||||
|
if (entry.exact) {
|
||||||
|
if (entry.from == input) {
|
||||||
|
input = entry.to;
|
||||||
|
extraAttrs = entry.extraAttrs;
|
||||||
|
goto restart;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (entry.from.contains(input)) {
|
||||||
|
input = entry.to.applyOverrides(
|
||||||
|
!entry.from.getRef() && input.getRef() ? input.getRef() : std::optional<std::string>(),
|
||||||
|
!entry.from.getRev() && input.getRev() ? input.getRev() : std::optional<Hash>());
|
||||||
|
extraAttrs = entry.extraAttrs;
|
||||||
|
goto restart;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input.isDirect())
|
||||||
|
throw Error("cannot find flake '%s' in the flake registries", input.to_string());
|
||||||
|
|
||||||
|
debug("looked up '%s' -> '%s'", _input.to_string(), input.to_string());
|
||||||
|
|
||||||
|
return {input, extraAttrs};
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
64
src/libfetchers/registry.hh
Normal file
64
src/libfetchers/registry.hh
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
|
||||||
|
namespace nix { class Store; }
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
struct Registry
|
||||||
|
{
|
||||||
|
enum RegistryType {
|
||||||
|
Flag = 0,
|
||||||
|
User = 1,
|
||||||
|
System = 2,
|
||||||
|
Global = 3,
|
||||||
|
};
|
||||||
|
|
||||||
|
RegistryType type;
|
||||||
|
|
||||||
|
struct Entry
|
||||||
|
{
|
||||||
|
Input from, to;
|
||||||
|
Attrs extraAttrs;
|
||||||
|
bool exact = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::vector<Entry> entries;
|
||||||
|
|
||||||
|
Registry(RegistryType type)
|
||||||
|
: type(type)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> read(
|
||||||
|
const Path & path, RegistryType type);
|
||||||
|
|
||||||
|
void write(const Path & path);
|
||||||
|
|
||||||
|
void add(
|
||||||
|
const Input & from,
|
||||||
|
const Input & to,
|
||||||
|
const Attrs & extraAttrs);
|
||||||
|
|
||||||
|
void remove(const Input & input);
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::vector<std::shared_ptr<Registry>> Registries;
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> getUserRegistry();
|
||||||
|
|
||||||
|
Path getUserRegistryPath();
|
||||||
|
|
||||||
|
Registries getRegistries(ref<Store> store);
|
||||||
|
|
||||||
|
void overrideRegistry(
|
||||||
|
const Input & from,
|
||||||
|
const Input & to,
|
||||||
|
const Attrs & extraAttrs);
|
||||||
|
|
||||||
|
std::pair<Input, Attrs> lookupInRegistries(
|
||||||
|
ref<Store> store,
|
||||||
|
const Input & input);
|
||||||
|
|
||||||
|
}
|
|
@ -105,7 +105,7 @@ DownloadFileResult downloadFile(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Tree downloadTarball(
|
std::pair<Tree, time_t> downloadTarball(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
|
@ -120,12 +120,9 @@ Tree downloadTarball(
|
||||||
auto cached = getCache()->lookupExpired(store, inAttrs);
|
auto cached = getCache()->lookupExpired(store, inAttrs);
|
||||||
|
|
||||||
if (cached && !cached->expired)
|
if (cached && !cached->expired)
|
||||||
return Tree {
|
return {
|
||||||
.actualPath = store->toRealPath(cached->storePath),
|
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
|
||||||
.storePath = std::move(cached->storePath),
|
getIntAttr(cached->infoAttrs, "lastModified")
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = getIntAttr(cached->infoAttrs, "lastModified"),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
auto res = downloadFile(store, url, name, immutable);
|
auto res = downloadFile(store, url, name, immutable);
|
||||||
|
@ -160,117 +157,72 @@ Tree downloadTarball(
|
||||||
*unpackedStorePath,
|
*unpackedStorePath,
|
||||||
immutable);
|
immutable);
|
||||||
|
|
||||||
return Tree {
|
return {
|
||||||
.actualPath = store->toRealPath(*unpackedStorePath),
|
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
|
||||||
.storePath = std::move(*unpackedStorePath),
|
lastModified,
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = lastModified,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TarballInput : Input
|
|
||||||
{
|
|
||||||
ParsedURL url;
|
|
||||||
std::optional<Hash> hash;
|
|
||||||
|
|
||||||
TarballInput(const ParsedURL & url) : url(url)
|
|
||||||
{ }
|
|
||||||
|
|
||||||
std::string type() const override { return "tarball"; }
|
|
||||||
|
|
||||||
bool operator ==(const Input & other) const override
|
|
||||||
{
|
|
||||||
auto other2 = dynamic_cast<const TarballInput *>(&other);
|
|
||||||
return
|
|
||||||
other2
|
|
||||||
&& to_string() == other2->to_string()
|
|
||||||
&& hash == other2->hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isImmutable() const override
|
|
||||||
{
|
|
||||||
return hash || narHash;
|
|
||||||
}
|
|
||||||
|
|
||||||
ParsedURL toURL() const override
|
|
||||||
{
|
|
||||||
auto url2(url);
|
|
||||||
// NAR hashes are preferred over file hashes since tar/zip files
|
|
||||||
// don't have a canonical representation.
|
|
||||||
if (narHash)
|
|
||||||
url2.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
|
||||||
else if (hash)
|
|
||||||
url2.query.insert_or_assign("hash", hash->to_string(SRI, true));
|
|
||||||
return url2;
|
|
||||||
}
|
|
||||||
|
|
||||||
Attrs toAttrsInternal() const override
|
|
||||||
{
|
|
||||||
Attrs attrs;
|
|
||||||
attrs.emplace("url", url.to_string());
|
|
||||||
if (hash)
|
|
||||||
attrs.emplace("hash", hash->to_string(SRI, true));
|
|
||||||
return attrs;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
|
||||||
{
|
|
||||||
auto tree = downloadTarball(store, url.to_string(), "source", false);
|
|
||||||
|
|
||||||
auto input = std::make_shared<TarballInput>(*this);
|
|
||||||
input->narHash = store->queryPathInfo(tree.storePath)->narHash;
|
|
||||||
|
|
||||||
return {std::move(tree), input};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct TarballInputScheme : InputScheme
|
struct TarballInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
{
|
{
|
||||||
if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
|
if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return {};
|
||||||
|
|
||||||
if (!hasSuffix(url.path, ".zip")
|
if (!hasSuffix(url.path, ".zip")
|
||||||
&& !hasSuffix(url.path, ".tar")
|
&& !hasSuffix(url.path, ".tar")
|
||||||
&& !hasSuffix(url.path, ".tar.gz")
|
&& !hasSuffix(url.path, ".tar.gz")
|
||||||
&& !hasSuffix(url.path, ".tar.xz")
|
&& !hasSuffix(url.path, ".tar.xz")
|
||||||
&& !hasSuffix(url.path, ".tar.bz2"))
|
&& !hasSuffix(url.path, ".tar.bz2"))
|
||||||
return nullptr;
|
return {};
|
||||||
|
|
||||||
auto input = std::make_unique<TarballInput>(url);
|
|
||||||
|
|
||||||
auto hash = input->url.query.find("hash");
|
|
||||||
if (hash != input->url.query.end()) {
|
|
||||||
// FIXME: require SRI hash.
|
|
||||||
input->hash = Hash(hash->second);
|
|
||||||
input->url.query.erase(hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto narHash = input->url.query.find("narHash");
|
|
||||||
if (narHash != input->url.query.end()) {
|
|
||||||
// FIXME: require SRI hash.
|
|
||||||
input->narHash = Hash(narHash->second);
|
|
||||||
input->url.query.erase(narHash);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
Input input;
|
||||||
|
input.attrs.insert_or_assign("type", "tarball");
|
||||||
|
input.attrs.insert_or_assign("url", url.to_string());
|
||||||
|
auto narHash = url.query.find("narHash");
|
||||||
|
if (narHash != url.query.end())
|
||||||
|
input.attrs.insert_or_assign("narHash", narHash->second);
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
|
||||||
{
|
{
|
||||||
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
|
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
|
||||||
|
|
||||||
for (auto & [name, value] : attrs)
|
for (auto & [name, value] : attrs)
|
||||||
if (name != "type" && name != "url" && name != "hash")
|
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash")
|
||||||
throw Error("unsupported tarball input attribute '%s'", name);
|
throw Error("unsupported tarball input attribute '%s'", name);
|
||||||
|
|
||||||
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
|
Input input;
|
||||||
if (auto hash = maybeGetStrAttr(attrs, "hash"))
|
input.attrs = attrs;
|
||||||
input->hash = newHashAllowEmpty(*hash, {});
|
//input.immutable = (bool) maybeGetStrAttr(input.attrs, "hash");
|
||||||
|
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ParsedURL toURL(const Input & input) override
|
||||||
|
{
|
||||||
|
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||||
|
// NAR hashes are preferred over file hashes since tar/zip files
|
||||||
|
// don't have a canonical representation.
|
||||||
|
if (auto narHash = input.getNarHash())
|
||||||
|
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
|
||||||
|
/*
|
||||||
|
else if (auto hash = maybeGetStrAttr(input.attrs, "hash"))
|
||||||
|
url.query.insert_or_assign("hash", Hash(*hash).to_string(SRI, true));
|
||||||
|
*/
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool hasAllInfo(const Input & input) override
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||||
|
{
|
||||||
|
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
|
||||||
|
return {std::move(tree), input};
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
#include "tree-info.hh"
|
|
||||||
#include "store-api.hh"
|
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
|
|
||||||
namespace nix::fetchers {
|
|
||||||
|
|
||||||
StorePath TreeInfo::computeStorePath(Store & store) const
|
|
||||||
{
|
|
||||||
assert(narHash);
|
|
||||||
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "source");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,29 +0,0 @@
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include "path.hh"
|
|
||||||
#include "hash.hh"
|
|
||||||
|
|
||||||
#include <nlohmann/json_fwd.hpp>
|
|
||||||
|
|
||||||
namespace nix { class Store; }
|
|
||||||
|
|
||||||
namespace nix::fetchers {
|
|
||||||
|
|
||||||
struct TreeInfo
|
|
||||||
{
|
|
||||||
Hash narHash;
|
|
||||||
std::optional<uint64_t> revCount;
|
|
||||||
std::optional<time_t> lastModified;
|
|
||||||
|
|
||||||
bool operator ==(const TreeInfo & other) const
|
|
||||||
{
|
|
||||||
return
|
|
||||||
narHash == other.narHash
|
|
||||||
&& revCount == other.revCount
|
|
||||||
&& lastModified == other.lastModified;
|
|
||||||
}
|
|
||||||
|
|
||||||
StorePath computeStorePath(Store & store) const;
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
|
@ -34,9 +34,19 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
||||||
try {
|
try {
|
||||||
globalConfig.set(name, value);
|
globalConfig.set(name, value);
|
||||||
} catch (UsageError & e) {
|
} catch (UsageError & e) {
|
||||||
|
if (!completions)
|
||||||
warn(e.what());
|
warn(e.what());
|
||||||
}
|
}
|
||||||
}},
|
}},
|
||||||
|
.completer = [](size_t index, std::string_view prefix) {
|
||||||
|
if (index == 0) {
|
||||||
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
|
globalConfig.getSettings(settings);
|
||||||
|
for (auto & s : settings)
|
||||||
|
if (hasPrefix(s.first, prefix))
|
||||||
|
completions->insert(s.first);
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
#include "progress-bar.hh"
|
#include "progress-bar.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
LogFormat defaultLogFormat = LogFormat::raw;
|
LogFormat defaultLogFormat = LogFormat::raw;
|
||||||
|
|
||||||
LogFormat parseLogFormat(const std::string & logFormatStr) {
|
LogFormat parseLogFormat(const std::string & logFormatStr) {
|
||||||
if (logFormatStr == "raw")
|
if (logFormatStr == "raw" || getEnv("NIX_GET_COMPLETIONS"))
|
||||||
return LogFormat::raw;
|
return LogFormat::raw;
|
||||||
else if (logFormatStr == "raw-with-logs")
|
else if (logFormatStr == "raw-with-logs")
|
||||||
return LogFormat::rawWithLogs;
|
return LogFormat::rawWithLogs;
|
||||||
|
|
|
@ -36,7 +36,7 @@ void printGCWarning()
|
||||||
|
|
||||||
void printMissing(ref<Store> store, const std::vector<StorePathWithOutputs> & paths, Verbosity lvl)
|
void printMissing(ref<Store> store, const std::vector<StorePathWithOutputs> & paths, Verbosity lvl)
|
||||||
{
|
{
|
||||||
unsigned long long downloadSize, narSize;
|
uint64_t downloadSize, narSize;
|
||||||
StorePathSet willBuild, willSubstitute, unknown;
|
StorePathSet willBuild, willSubstitute, unknown;
|
||||||
store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
store->queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||||
printMissing(store, willBuild, willSubstitute, unknown, downloadSize, narSize, lvl);
|
printMissing(store, willBuild, willSubstitute, unknown, downloadSize, narSize, lvl);
|
||||||
|
@ -45,7 +45,7 @@ void printMissing(ref<Store> store, const std::vector<StorePathWithOutputs> & pa
|
||||||
|
|
||||||
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
||||||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||||
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl)
|
uint64_t downloadSize, uint64_t narSize, Verbosity lvl)
|
||||||
{
|
{
|
||||||
if (!willBuild.empty()) {
|
if (!willBuild.empty()) {
|
||||||
if (willBuild.size() == 1)
|
if (willBuild.size() == 1)
|
||||||
|
@ -384,7 +384,7 @@ RunPager::~RunPager()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
string showBytes(unsigned long long bytes)
|
string showBytes(uint64_t bytes)
|
||||||
{
|
{
|
||||||
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
|
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,7 +47,7 @@ void printMissing(
|
||||||
|
|
||||||
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
||||||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||||
unsigned long long downloadSize, unsigned long long narSize, Verbosity lvl = lvlInfo);
|
uint64_t downloadSize, uint64_t narSize, Verbosity lvl = lvlInfo);
|
||||||
|
|
||||||
string getArg(const string & opt,
|
string getArg(const string & opt,
|
||||||
Strings::iterator & i, const Strings::iterator & end);
|
Strings::iterator & i, const Strings::iterator & end);
|
||||||
|
@ -110,7 +110,7 @@ extern volatile ::sig_atomic_t blockInt;
|
||||||
|
|
||||||
/* GC helpers. */
|
/* GC helpers. */
|
||||||
|
|
||||||
string showBytes(unsigned long long bytes);
|
string showBytes(uint64_t bytes);
|
||||||
|
|
||||||
struct GCResults;
|
struct GCResults;
|
||||||
|
|
||||||
|
|
|
@ -178,7 +178,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
||||||
auto [fileHash, fileSize] = fileHashSink.finish();
|
auto [fileHash, fileSize] = fileHashSink.finish();
|
||||||
narInfo->fileHash = fileHash;
|
narInfo->fileHash = fileHash;
|
||||||
narInfo->fileSize = fileSize;
|
narInfo->fileSize = fileSize;
|
||||||
narInfo->url = "nar/" + narInfo->fileHash.to_string(Base32, false) + ".nar"
|
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
|
||||||
+ (compression == "xz" ? ".xz" :
|
+ (compression == "xz" ? ".xz" :
|
||||||
compression == "bzip2" ? ".bz2" :
|
compression == "bzip2" ? ".bz2" :
|
||||||
compression == "br" ? ".br" :
|
compression == "br" ? ".br" :
|
||||||
|
@ -372,7 +372,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
method for very large paths, but `copyPath' is mainly used for
|
method for very large paths, but `copyPath' is mainly used for
|
||||||
small files. */
|
small files. */
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
Hash h;
|
std::optional<Hash> h;
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
dumpPath(srcPath, sink, filter);
|
dumpPath(srcPath, sink, filter);
|
||||||
h = hashString(hashAlgo, *sink.s);
|
h = hashString(hashAlgo, *sink.s);
|
||||||
|
@ -382,7 +382,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
||||||
h = hashString(hashAlgo, s);
|
h = hashString(hashAlgo, s);
|
||||||
}
|
}
|
||||||
|
|
||||||
ValidPathInfo info(makeFixedOutputPath(method, h, name));
|
ValidPathInfo info(makeFixedOutputPath(method, *h, name));
|
||||||
|
|
||||||
auto source = StringSource { *sink.s };
|
auto source = StringSource { *sink.s };
|
||||||
addToStore(info, source, repair, CheckSigs);
|
addToStore(info, source, repair, CheckSigs);
|
||||||
|
|
|
@ -297,7 +297,7 @@ public:
|
||||||
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||||
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath,
|
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath,
|
||||||
const BasicDerivation & drv, BuildMode buildMode = bmNormal);
|
const BasicDerivation & drv, BuildMode buildMode = bmNormal);
|
||||||
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair);
|
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||||
|
|
||||||
/* Remove a dead goal. */
|
/* Remove a dead goal. */
|
||||||
void removeGoal(GoalPtr goal);
|
void removeGoal(GoalPtr goal);
|
||||||
|
@ -1047,7 +1047,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation
|
||||||
{
|
{
|
||||||
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
|
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
|
||||||
state = &DerivationGoal::haveDerivation;
|
state = &DerivationGoal::haveDerivation;
|
||||||
name = fmt("building of %s", worker.store.showPaths(drv.outputPaths()));
|
name = fmt("building of %s", worker.store.showPaths(drv.outputPaths(worker.store)));
|
||||||
trace("created");
|
trace("created");
|
||||||
|
|
||||||
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
||||||
|
@ -1182,7 +1182,7 @@ void DerivationGoal::haveDerivation()
|
||||||
retrySubstitution = false;
|
retrySubstitution = false;
|
||||||
|
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputs)
|
||||||
worker.store.addTempRoot(i.second.path);
|
worker.store.addTempRoot(i.second.path(worker.store, drv->name));
|
||||||
|
|
||||||
/* Check what outputs paths are not already valid. */
|
/* Check what outputs paths are not already valid. */
|
||||||
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
|
auto invalidOutputs = checkPathValidity(false, buildMode == bmRepair);
|
||||||
|
@ -1206,7 +1206,7 @@ void DerivationGoal::haveDerivation()
|
||||||
them. */
|
them. */
|
||||||
if (settings.useSubstitutes && parsedDrv->substitutesAllowed())
|
if (settings.useSubstitutes && parsedDrv->substitutesAllowed())
|
||||||
for (auto & i : invalidOutputs)
|
for (auto & i : invalidOutputs)
|
||||||
addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair));
|
addWaitee(worker.makeSubstitutionGoal(i, buildMode == bmRepair ? Repair : NoRepair, getDerivationCA(*drv)));
|
||||||
|
|
||||||
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
if (waitees.empty()) /* to prevent hang (no wake-up event) */
|
||||||
outputsSubstituted();
|
outputsSubstituted();
|
||||||
|
@ -1290,12 +1290,12 @@ void DerivationGoal::repairClosure()
|
||||||
StorePathSet outputClosure;
|
StorePathSet outputClosure;
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
if (!wantOutput(i.first, wantedOutputs)) continue;
|
||||||
worker.store.computeFSClosure(i.second.path, outputClosure);
|
worker.store.computeFSClosure(i.second.path(worker.store, drv->name), outputClosure);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Filter out our own outputs (which we have already checked). */
|
/* Filter out our own outputs (which we have already checked). */
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputs)
|
||||||
outputClosure.erase(i.second.path);
|
outputClosure.erase(i.second.path(worker.store, drv->name));
|
||||||
|
|
||||||
/* Get all dependencies of this derivation so that we know which
|
/* Get all dependencies of this derivation so that we know which
|
||||||
derivation is responsible for which path in the output
|
derivation is responsible for which path in the output
|
||||||
|
@ -1307,7 +1307,7 @@ void DerivationGoal::repairClosure()
|
||||||
if (i.isDerivation()) {
|
if (i.isDerivation()) {
|
||||||
Derivation drv = worker.store.derivationFromPath(i);
|
Derivation drv = worker.store.derivationFromPath(i);
|
||||||
for (auto & j : drv.outputs)
|
for (auto & j : drv.outputs)
|
||||||
outputsToDrv.insert_or_assign(j.second.path, i);
|
outputsToDrv.insert_or_assign(j.second.path(worker.store, drv.name), i);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check each path (slow!). */
|
/* Check each path (slow!). */
|
||||||
|
@ -1379,7 +1379,7 @@ void DerivationGoal::inputsRealised()
|
||||||
for (auto & j : i.second) {
|
for (auto & j : i.second) {
|
||||||
auto k = inDrv.outputs.find(j);
|
auto k = inDrv.outputs.find(j);
|
||||||
if (k != inDrv.outputs.end())
|
if (k != inDrv.outputs.end())
|
||||||
worker.store.computeFSClosure(k->second.path, inputPaths);
|
worker.store.computeFSClosure(k->second.path(worker.store, inDrv.name), inputPaths);
|
||||||
else
|
else
|
||||||
throw Error(
|
throw Error(
|
||||||
"derivation '%s' requires non-existent output '%s' from input derivation '%s'",
|
"derivation '%s' requires non-existent output '%s' from input derivation '%s'",
|
||||||
|
@ -1432,7 +1432,7 @@ void DerivationGoal::tryToBuild()
|
||||||
goal can start a build, and if not, the main loop will sleep a
|
goal can start a build, and if not, the main loop will sleep a
|
||||||
few seconds and then retry this goal. */
|
few seconds and then retry this goal. */
|
||||||
PathSet lockFiles;
|
PathSet lockFiles;
|
||||||
for (auto & outPath : drv->outputPaths())
|
for (auto & outPath : drv->outputPaths(worker.store))
|
||||||
lockFiles.insert(worker.store.Store::toRealPath(outPath));
|
lockFiles.insert(worker.store.Store::toRealPath(outPath));
|
||||||
|
|
||||||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||||
|
@ -1460,16 +1460,16 @@ void DerivationGoal::tryToBuild()
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
missingPaths = drv->outputPaths();
|
missingPaths = drv->outputPaths(worker.store);
|
||||||
if (buildMode != bmCheck)
|
if (buildMode != bmCheck)
|
||||||
for (auto & i : validPaths) missingPaths.erase(i);
|
for (auto & i : validPaths) missingPaths.erase(i);
|
||||||
|
|
||||||
/* If any of the outputs already exist but are not valid, delete
|
/* If any of the outputs already exist but are not valid, delete
|
||||||
them. */
|
them. */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
if (worker.store.isValidPath(i.second.path)) continue;
|
if (worker.store.isValidPath(i.second.path(worker.store, drv->name))) continue;
|
||||||
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path));
|
debug("removing invalid path '%s'", worker.store.printStorePath(i.second.path(worker.store, drv->name)));
|
||||||
deletePath(worker.store.Store::toRealPath(i.second.path));
|
deletePath(worker.store.Store::toRealPath(i.second.path(worker.store, drv->name)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Don't do a remote build if the derivation has the attribute
|
/* Don't do a remote build if the derivation has the attribute
|
||||||
|
@ -1646,13 +1646,13 @@ void DerivationGoal::buildDone()
|
||||||
So instead, check if the disk is (nearly) full now. If
|
So instead, check if the disk is (nearly) full now. If
|
||||||
so, we don't mark this build as a permanent failure. */
|
so, we don't mark this build as a permanent failure. */
|
||||||
#if HAVE_STATVFS
|
#if HAVE_STATVFS
|
||||||
unsigned long long required = 8ULL * 1024 * 1024; // FIXME: make configurable
|
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
|
||||||
struct statvfs st;
|
struct statvfs st;
|
||||||
if (statvfs(worker.store.realStoreDir.c_str(), &st) == 0 &&
|
if (statvfs(worker.store.realStoreDir.c_str(), &st) == 0 &&
|
||||||
(unsigned long long) st.f_bavail * st.f_bsize < required)
|
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||||
diskFull = true;
|
diskFull = true;
|
||||||
if (statvfs(tmpDir.c_str(), &st) == 0 &&
|
if (statvfs(tmpDir.c_str(), &st) == 0 &&
|
||||||
(unsigned long long) st.f_bavail * st.f_bsize < required)
|
(uint64_t) st.f_bavail * st.f_bsize < required)
|
||||||
diskFull = true;
|
diskFull = true;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -1692,7 +1692,7 @@ void DerivationGoal::buildDone()
|
||||||
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
||||||
Logger::Fields{worker.store.printStorePath(drvPath)});
|
Logger::Fields{worker.store.printStorePath(drvPath)});
|
||||||
PushActivity pact(act.id);
|
PushActivity pact(act.id);
|
||||||
auto outputPaths = drv->outputPaths();
|
auto outputPaths = drv->outputPaths(worker.store);
|
||||||
std::map<std::string, std::string> hookEnvironment = getEnv();
|
std::map<std::string, std::string> hookEnvironment = getEnv();
|
||||||
|
|
||||||
hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
|
hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
|
||||||
|
@ -1920,7 +1920,7 @@ StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
|
||||||
if (j.isDerivation()) {
|
if (j.isDerivation()) {
|
||||||
Derivation drv = worker.store.derivationFromPath(j);
|
Derivation drv = worker.store.derivationFromPath(j);
|
||||||
for (auto & k : drv.outputs)
|
for (auto & k : drv.outputs)
|
||||||
worker.store.computeFSClosure(k.second.path, paths);
|
worker.store.computeFSClosure(k.second.path(worker.store, drv.name), paths);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2015,7 +2015,7 @@ void DerivationGoal::startBuilder()
|
||||||
|
|
||||||
/* Substitute output placeholders with the actual output paths. */
|
/* Substitute output placeholders with the actual output paths. */
|
||||||
for (auto & output : drv->outputs)
|
for (auto & output : drv->outputs)
|
||||||
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path);
|
inputRewrites[hashPlaceholder(output.first)] = worker.store.printStorePath(output.second.path(worker.store, drv->name));
|
||||||
|
|
||||||
/* Construct the environment passed to the builder. */
|
/* Construct the environment passed to the builder. */
|
||||||
initEnv();
|
initEnv();
|
||||||
|
@ -2200,7 +2200,7 @@ void DerivationGoal::startBuilder()
|
||||||
(typically the dependencies of /bin/sh). Throw them
|
(typically the dependencies of /bin/sh). Throw them
|
||||||
out. */
|
out. */
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputs)
|
||||||
dirsInChroot.erase(worker.store.printStorePath(i.second.path));
|
dirsInChroot.erase(worker.store.printStorePath(i.second.path(worker.store, drv->name)));
|
||||||
|
|
||||||
#elif __APPLE__
|
#elif __APPLE__
|
||||||
/* We don't really have any parent prep work to do (yet?)
|
/* We don't really have any parent prep work to do (yet?)
|
||||||
|
@ -2613,7 +2613,7 @@ void DerivationGoal::writeStructuredAttrs()
|
||||||
/* Add an "outputs" object containing the output paths. */
|
/* Add an "outputs" object containing the output paths. */
|
||||||
nlohmann::json outputs;
|
nlohmann::json outputs;
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputs)
|
||||||
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path), inputRewrites);
|
outputs[i.first] = rewriteStrings(worker.store.printStorePath(i.second.path(worker.store, drv->name)), inputRewrites);
|
||||||
json["outputs"] = outputs;
|
json["outputs"] = outputs;
|
||||||
|
|
||||||
/* Handle exportReferencesGraph. */
|
/* Handle exportReferencesGraph. */
|
||||||
|
@ -2774,7 +2774,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
goal.addDependency(info.path);
|
goal.addDependency(info.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
||||||
{
|
{
|
||||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
||||||
|
@ -2817,7 +2817,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
auto drv = derivationFromPath(path.path);
|
auto drv = derivationFromPath(path.path);
|
||||||
for (auto & output : drv.outputs)
|
for (auto & output : drv.outputs)
|
||||||
if (wantOutput(output.first, path.outputs))
|
if (wantOutput(output.first, path.outputs))
|
||||||
newPaths.insert(output.second.path);
|
newPaths.insert(output.second.path(*this, drv.name));
|
||||||
} else if (!goal.isAllowed(path.path))
|
} else if (!goal.isAllowed(path.path))
|
||||||
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
throw InvalidPath("cannot build unknown path '%s' in recursive Nix", printStorePath(path.path));
|
||||||
}
|
}
|
||||||
|
@ -2851,7 +2851,7 @@ struct RestrictedStore : public LocalFSStore
|
||||||
|
|
||||||
void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
||||||
unsigned long long & downloadSize, unsigned long long & narSize) override
|
uint64_t & downloadSize, uint64_t & narSize) override
|
||||||
{
|
{
|
||||||
/* This is slightly impure since it leaks information to the
|
/* This is slightly impure since it leaks information to the
|
||||||
client about what paths will be built/substituted or are
|
client about what paths will be built/substituted or are
|
||||||
|
@ -3579,7 +3579,7 @@ StorePathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv
|
||||||
if (store.isStorePath(i))
|
if (store.isStorePath(i))
|
||||||
result.insert(store.parseStorePath(i));
|
result.insert(store.parseStorePath(i));
|
||||||
else if (drv.outputs.count(i))
|
else if (drv.outputs.count(i))
|
||||||
result.insert(drv.outputs.find(i)->second.path);
|
result.insert(drv.outputs.find(i)->second.path(store, drv.name));
|
||||||
else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
|
else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -3617,7 +3617,7 @@ void DerivationGoal::registerOutputs()
|
||||||
if (hook) {
|
if (hook) {
|
||||||
bool allValid = true;
|
bool allValid = true;
|
||||||
for (auto & i : drv->outputs)
|
for (auto & i : drv->outputs)
|
||||||
if (!worker.store.isValidPath(i.second.path)) allValid = false;
|
if (!worker.store.isValidPath(i.second.path(worker.store, drv->name))) allValid = false;
|
||||||
if (allValid) return;
|
if (allValid) return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3638,23 +3638,23 @@ void DerivationGoal::registerOutputs()
|
||||||
Nix calls. */
|
Nix calls. */
|
||||||
StorePathSet referenceablePaths;
|
StorePathSet referenceablePaths;
|
||||||
for (auto & p : inputPaths) referenceablePaths.insert(p);
|
for (auto & p : inputPaths) referenceablePaths.insert(p);
|
||||||
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path);
|
for (auto & i : drv->outputs) referenceablePaths.insert(i.second.path(worker.store, drv->name));
|
||||||
for (auto & p : addedPaths) referenceablePaths.insert(p);
|
for (auto & p : addedPaths) referenceablePaths.insert(p);
|
||||||
|
|
||||||
/* Check whether the output paths were created, and grep each
|
/* Check whether the output paths were created, and grep each
|
||||||
output path to determine what other paths it references. Also make all
|
output path to determine what other paths it references. Also make all
|
||||||
output paths read-only. */
|
output paths read-only. */
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
auto path = worker.store.printStorePath(i.second.path);
|
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
|
||||||
if (!missingPaths.count(i.second.path)) continue;
|
if (!missingPaths.count(i.second.path(worker.store, drv->name))) continue;
|
||||||
|
|
||||||
Path actualPath = path;
|
Path actualPath = path;
|
||||||
if (needsHashRewrite()) {
|
if (needsHashRewrite()) {
|
||||||
auto r = redirectedOutputs.find(i.second.path);
|
auto r = redirectedOutputs.find(i.second.path(worker.store, drv->name));
|
||||||
if (r != redirectedOutputs.end()) {
|
if (r != redirectedOutputs.end()) {
|
||||||
auto redirected = worker.store.Store::toRealPath(r->second);
|
auto redirected = worker.store.Store::toRealPath(r->second);
|
||||||
if (buildMode == bmRepair
|
if (buildMode == bmRepair
|
||||||
&& redirectedBadOutputs.count(i.second.path)
|
&& redirectedBadOutputs.count(i.second.path(worker.store, drv->name))
|
||||||
&& pathExists(redirected))
|
&& pathExists(redirected))
|
||||||
replaceValidPath(path, redirected);
|
replaceValidPath(path, redirected);
|
||||||
if (buildMode == bmCheck)
|
if (buildMode == bmCheck)
|
||||||
|
@ -3723,7 +3723,9 @@ void DerivationGoal::registerOutputs()
|
||||||
|
|
||||||
if (fixedOutput) {
|
if (fixedOutput) {
|
||||||
|
|
||||||
if (i.second.hash->method == FileIngestionMethod::Flat) {
|
FixedOutputHash outputHash = std::get<DerivationOutputFixed>(i.second.output).hash;
|
||||||
|
|
||||||
|
if (outputHash.method == FileIngestionMethod::Flat) {
|
||||||
/* The output path should be a regular file without execute permission. */
|
/* The output path should be a regular file without execute permission. */
|
||||||
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
|
@ -3734,13 +3736,13 @@ void DerivationGoal::registerOutputs()
|
||||||
|
|
||||||
/* Check the hash. In hash mode, move the path produced by
|
/* Check the hash. In hash mode, move the path produced by
|
||||||
the derivation to its content-addressed location. */
|
the derivation to its content-addressed location. */
|
||||||
Hash h2 = i.second.hash->method == FileIngestionMethod::Recursive
|
Hash h2 = outputHash.method == FileIngestionMethod::Recursive
|
||||||
? hashPath(*i.second.hash->hash.type, actualPath).first
|
? hashPath(outputHash.hash.type, actualPath).first
|
||||||
: hashFile(*i.second.hash->hash.type, actualPath);
|
: hashFile(outputHash.hash.type, actualPath);
|
||||||
|
|
||||||
auto dest = worker.store.makeFixedOutputPath(i.second.hash->method, h2, i.second.path.name());
|
auto dest = worker.store.makeFixedOutputPath(outputHash.method, h2, i.second.path(worker.store, drv->name).name());
|
||||||
|
|
||||||
if (i.second.hash->hash != h2) {
|
if (outputHash.hash != h2) {
|
||||||
|
|
||||||
/* Throw an error after registering the path as
|
/* Throw an error after registering the path as
|
||||||
valid. */
|
valid. */
|
||||||
|
@ -3748,7 +3750,7 @@ void DerivationGoal::registerOutputs()
|
||||||
delayedException = std::make_exception_ptr(
|
delayedException = std::make_exception_ptr(
|
||||||
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
BuildError("hash mismatch in fixed-output derivation '%s':\n wanted: %s\n got: %s",
|
||||||
worker.store.printStorePath(dest),
|
worker.store.printStorePath(dest),
|
||||||
i.second.hash->hash.to_string(SRI, true),
|
outputHash.hash.to_string(SRI, true),
|
||||||
h2.to_string(SRI, true)));
|
h2.to_string(SRI, true)));
|
||||||
|
|
||||||
Path actualDest = worker.store.Store::toRealPath(dest);
|
Path actualDest = worker.store.Store::toRealPath(dest);
|
||||||
|
@ -3770,7 +3772,7 @@ void DerivationGoal::registerOutputs()
|
||||||
assert(worker.store.parseStorePath(path) == dest);
|
assert(worker.store.parseStorePath(path) == dest);
|
||||||
|
|
||||||
ca = FixedOutputHash {
|
ca = FixedOutputHash {
|
||||||
.method = i.second.hash->method,
|
.method = outputHash.method,
|
||||||
.hash = h2,
|
.hash = h2,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -3785,8 +3787,10 @@ void DerivationGoal::registerOutputs()
|
||||||
time. The hash is stored in the database so that we can
|
time. The hash is stored in the database so that we can
|
||||||
verify later on whether nobody has messed with the store. */
|
verify later on whether nobody has messed with the store. */
|
||||||
debug("scanning for references inside '%1%'", path);
|
debug("scanning for references inside '%1%'", path);
|
||||||
HashResult hash;
|
// HashResult hash;
|
||||||
auto references = worker.store.parseStorePathSet(scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths), hash));
|
auto pathSetAndHash = scanForReferences(actualPath, worker.store.printStorePathSet(referenceablePaths));
|
||||||
|
auto references = worker.store.parseStorePathSet(pathSetAndHash.first);
|
||||||
|
HashResult hash = pathSetAndHash.second;
|
||||||
|
|
||||||
if (buildMode == bmCheck) {
|
if (buildMode == bmCheck) {
|
||||||
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
|
if (!worker.store.isValidPath(worker.store.parseStorePath(path))) continue;
|
||||||
|
@ -3894,7 +3898,7 @@ void DerivationGoal::registerOutputs()
|
||||||
/* If this is the first round of several, then move the output out of the way. */
|
/* If this is the first round of several, then move the output out of the way. */
|
||||||
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
|
if (nrRounds > 1 && curRound == 1 && curRound < nrRounds && keepPreviousRound) {
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
auto path = worker.store.printStorePath(i.second.path);
|
auto path = worker.store.printStorePath(i.second.path(worker.store, drv->name));
|
||||||
Path prev = path + checkSuffix;
|
Path prev = path + checkSuffix;
|
||||||
deletePath(prev);
|
deletePath(prev);
|
||||||
Path dst = path + checkSuffix;
|
Path dst = path + checkSuffix;
|
||||||
|
@ -3912,7 +3916,7 @@ void DerivationGoal::registerOutputs()
|
||||||
if the result was not determistic? */
|
if the result was not determistic? */
|
||||||
if (curRound == nrRounds) {
|
if (curRound == nrRounds) {
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
Path prev = worker.store.printStorePath(i.second.path) + checkSuffix;
|
Path prev = worker.store.printStorePath(i.second.path(worker.store, drv->name)) + checkSuffix;
|
||||||
deletePath(prev);
|
deletePath(prev);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4213,9 +4217,9 @@ StorePathSet DerivationGoal::checkPathValidity(bool returnValid, bool checkHash)
|
||||||
for (auto & i : drv->outputs) {
|
for (auto & i : drv->outputs) {
|
||||||
if (!wantOutput(i.first, wantedOutputs)) continue;
|
if (!wantOutput(i.first, wantedOutputs)) continue;
|
||||||
bool good =
|
bool good =
|
||||||
worker.store.isValidPath(i.second.path) &&
|
worker.store.isValidPath(i.second.path(worker.store, drv->name)) &&
|
||||||
(!checkHash || worker.pathContentsGood(i.second.path));
|
(!checkHash || worker.pathContentsGood(i.second.path(worker.store, drv->name)));
|
||||||
if (good == returnValid) result.insert(i.second.path);
|
if (good == returnValid) result.insert(i.second.path(worker.store, drv->name));
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -4272,6 +4276,10 @@ private:
|
||||||
/* The store path that should be realised through a substitute. */
|
/* The store path that should be realised through a substitute. */
|
||||||
StorePath storePath;
|
StorePath storePath;
|
||||||
|
|
||||||
|
/* The path the substituter refers to the path as. This will be
|
||||||
|
* different when the stores have different names. */
|
||||||
|
std::optional<StorePath> subPath;
|
||||||
|
|
||||||
/* The remaining substituters. */
|
/* The remaining substituters. */
|
||||||
std::list<ref<Store>> subs;
|
std::list<ref<Store>> subs;
|
||||||
|
|
||||||
|
@ -4305,8 +4313,11 @@ private:
|
||||||
typedef void (SubstitutionGoal::*GoalState)();
|
typedef void (SubstitutionGoal::*GoalState)();
|
||||||
GoalState state;
|
GoalState state;
|
||||||
|
|
||||||
|
/* Content address for recomputing store path */
|
||||||
|
std::optional<ContentAddress> ca;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair);
|
SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||||
~SubstitutionGoal();
|
~SubstitutionGoal();
|
||||||
|
|
||||||
void timedOut(Error && ex) override { abort(); };
|
void timedOut(Error && ex) override { abort(); };
|
||||||
|
@ -4336,10 +4347,11 @@ public:
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair)
|
SubstitutionGoal::SubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||||
: Goal(worker)
|
: Goal(worker)
|
||||||
, storePath(storePath)
|
, storePath(storePath)
|
||||||
, repair(repair)
|
, repair(repair)
|
||||||
|
, ca(ca)
|
||||||
{
|
{
|
||||||
state = &SubstitutionGoal::init;
|
state = &SubstitutionGoal::init;
|
||||||
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
|
name = fmt("substitution of '%s'", worker.store.printStorePath(this->storePath));
|
||||||
|
@ -4414,14 +4426,18 @@ void SubstitutionGoal::tryNext()
|
||||||
sub = subs.front();
|
sub = subs.front();
|
||||||
subs.pop_front();
|
subs.pop_front();
|
||||||
|
|
||||||
if (sub->storeDir != worker.store.storeDir) {
|
if (ca) {
|
||||||
|
subPath = sub->makeFixedOutputPathFromCA(storePath.name(), *ca);
|
||||||
|
if (sub->storeDir == worker.store.storeDir)
|
||||||
|
assert(subPath == storePath);
|
||||||
|
} else if (sub->storeDir != worker.store.storeDir) {
|
||||||
tryNext();
|
tryNext();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// FIXME: make async
|
// FIXME: make async
|
||||||
info = sub->queryPathInfo(storePath);
|
info = sub->queryPathInfo(subPath ? *subPath : storePath);
|
||||||
} catch (InvalidPath &) {
|
} catch (InvalidPath &) {
|
||||||
tryNext();
|
tryNext();
|
||||||
return;
|
return;
|
||||||
|
@ -4440,6 +4456,19 @@ void SubstitutionGoal::tryNext()
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (info->path != storePath) {
|
||||||
|
if (info->isContentAddressed(*sub) && info->references.empty()) {
|
||||||
|
auto info2 = std::make_shared<ValidPathInfo>(*info);
|
||||||
|
info2->path = storePath;
|
||||||
|
info = info2;
|
||||||
|
} else {
|
||||||
|
printError("asked '%s' for '%s' but got '%s'",
|
||||||
|
sub->getUri(), worker.store.printStorePath(storePath), sub->printStorePath(info->path));
|
||||||
|
tryNext();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Update the total expected download size. */
|
/* Update the total expected download size. */
|
||||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(info);
|
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(info);
|
||||||
|
|
||||||
|
@ -4529,7 +4558,7 @@ void SubstitutionGoal::tryToRun()
|
||||||
PushActivity pact(act.id);
|
PushActivity pact(act.id);
|
||||||
|
|
||||||
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
|
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
|
||||||
storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
|
subPath ? *subPath : storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
|
||||||
|
|
||||||
promise.set_value();
|
promise.set_value();
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -4662,11 +4691,11 @@ std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair)
|
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||||
{
|
{
|
||||||
GoalPtr goal = substitutionGoals[path].lock(); // FIXME
|
GoalPtr goal = substitutionGoals[path].lock(); // FIXME
|
||||||
if (!goal) {
|
if (!goal) {
|
||||||
goal = std::make_shared<SubstitutionGoal>(path, *this, repair);
|
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
|
||||||
substitutionGoals.insert_or_assign(path, goal);
|
substitutionGoals.insert_or_assign(path, goal);
|
||||||
wakeUp(goal);
|
wakeUp(goal);
|
||||||
}
|
}
|
||||||
|
@ -5008,7 +5037,7 @@ bool Worker::pathContentsGood(const StorePath & path)
|
||||||
if (!pathExists(store.printStorePath(path)))
|
if (!pathExists(store.printStorePath(path)))
|
||||||
res = false;
|
res = false;
|
||||||
else {
|
else {
|
||||||
HashResult current = hashPath(*info->narHash.type, store.printStorePath(path));
|
HashResult current = hashPath(info->narHash->type, store.printStorePath(path));
|
||||||
Hash nullHash(htSHA256);
|
Hash nullHash(htSHA256);
|
||||||
res = info->narHash == nullHash || info->narHash == current.first;
|
res = info->narHash == nullHash || info->narHash == current.first;
|
||||||
}
|
}
|
||||||
|
@ -5034,7 +5063,7 @@ void Worker::markContentsGood(const StorePath & path)
|
||||||
static void primeCache(Store & store, const std::vector<StorePathWithOutputs> & paths)
|
static void primeCache(Store & store, const std::vector<StorePathWithOutputs> & paths)
|
||||||
{
|
{
|
||||||
StorePathSet willBuild, willSubstitute, unknown;
|
StorePathSet willBuild, willSubstitute, unknown;
|
||||||
unsigned long long downloadSize, narSize;
|
uint64_t downloadSize, narSize;
|
||||||
store.queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
store.queryMissing(paths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||||
|
|
||||||
if (!willBuild.empty() && 0 == settings.maxBuildJobs && getMachines().empty())
|
if (!willBuild.empty() && 0 == settings.maxBuildJobs && getMachines().empty())
|
||||||
|
|
|
@ -9,7 +9,7 @@ struct Package {
|
||||||
Path path;
|
Path path;
|
||||||
bool active;
|
bool active;
|
||||||
int priority;
|
int priority;
|
||||||
Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
|
Package(const Path & path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::vector<Package> Packages;
|
typedef std::vector<Package> Packages;
|
||||||
|
|
|
@ -58,23 +58,6 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/* We always have one output, and if it's a fixed-output derivation (as
|
|
||||||
checked below) it must be the only output */
|
|
||||||
auto & output = drv.outputs.begin()->second;
|
|
||||||
|
|
||||||
/* Try the hashed mirrors first. */
|
|
||||||
if (output.hash && output.hash->method == FileIngestionMethod::Flat)
|
|
||||||
for (auto hashedMirror : settings.hashedMirrors.get())
|
|
||||||
try {
|
|
||||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
|
||||||
auto & h = output.hash->hash;
|
|
||||||
fetch(hashedMirror + printHashType(*h.type) + "/" + h.to_string(Base16, false));
|
|
||||||
return;
|
|
||||||
} catch (Error & e) {
|
|
||||||
debug(e.what());
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Otherwise try the specified URL. */
|
|
||||||
fetch(mainUrl);
|
fetch(mainUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
std::string FixedOutputHash::printMethodAlgo() const {
|
std::string FixedOutputHash::printMethodAlgo() const {
|
||||||
return makeFileIngestionPrefix(method) + printHashType(*hash.type);
|
return makeFileIngestionPrefix(method) + printHashType(hash.type);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
|
||||||
|
@ -46,7 +46,7 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
||||||
if (prefix == "text") {
|
if (prefix == "text") {
|
||||||
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
auto hashTypeAndHash = rawCa.substr(prefixSeparator+1, string::npos);
|
||||||
Hash hash = Hash(string(hashTypeAndHash));
|
Hash hash = Hash(string(hashTypeAndHash));
|
||||||
if (*hash.type != htSHA256) {
|
if (hash.type != htSHA256) {
|
||||||
throw Error("parseContentAddress: the text hash should have type SHA256");
|
throw Error("parseContentAddress: the text hash should have type SHA256");
|
||||||
}
|
}
|
||||||
return TextHash { hash };
|
return TextHash { hash };
|
||||||
|
|
|
@ -173,31 +173,6 @@ struct TunnelSource : BufferedSource
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/* If the NAR archive contains a single file at top-level, then save
|
|
||||||
the contents of the file to `s'. Otherwise barf. */
|
|
||||||
struct RetrieveRegularNARSink : ParseSink
|
|
||||||
{
|
|
||||||
bool regular;
|
|
||||||
string s;
|
|
||||||
|
|
||||||
RetrieveRegularNARSink() : regular(true) { }
|
|
||||||
|
|
||||||
void createDirectory(const Path & path)
|
|
||||||
{
|
|
||||||
regular = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void receiveContents(unsigned char * data, unsigned int len)
|
|
||||||
{
|
|
||||||
s.append((const char *) data, len);
|
|
||||||
}
|
|
||||||
|
|
||||||
void createSymlink(const Path & path, const string & target)
|
|
||||||
{
|
|
||||||
regular = false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ClientSettings
|
struct ClientSettings
|
||||||
{
|
{
|
||||||
bool keepFailed;
|
bool keepFailed;
|
||||||
|
@ -314,7 +289,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
auto hash = store->queryPathInfo(path)->narHash;
|
auto hash = store->queryPathInfo(path)->narHash;
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << hash.to_string(Base16, false);
|
to << hash->to_string(Base16, false);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,25 +350,28 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
}
|
}
|
||||||
|
|
||||||
case wopAddToStore: {
|
case wopAddToStore: {
|
||||||
std::string s, baseName;
|
HashType hashAlgo;
|
||||||
|
std::string baseName;
|
||||||
FileIngestionMethod method;
|
FileIngestionMethod method;
|
||||||
{
|
{
|
||||||
bool fixed; uint8_t recursive;
|
bool fixed;
|
||||||
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
|
uint8_t recursive;
|
||||||
|
std::string hashAlgoRaw;
|
||||||
|
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
|
||||||
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
|
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
|
||||||
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
|
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
|
||||||
method = FileIngestionMethod { recursive };
|
method = FileIngestionMethod { recursive };
|
||||||
/* Compatibility hack. */
|
/* Compatibility hack. */
|
||||||
if (!fixed) {
|
if (!fixed) {
|
||||||
s = "sha256";
|
hashAlgoRaw = "sha256";
|
||||||
method = FileIngestionMethod::Recursive;
|
method = FileIngestionMethod::Recursive;
|
||||||
}
|
}
|
||||||
|
hashAlgo = parseHashType(hashAlgoRaw);
|
||||||
}
|
}
|
||||||
HashType hashAlgo = parseHashType(s);
|
|
||||||
|
|
||||||
StringSink savedNAR;
|
StringSink saved;
|
||||||
TeeSource savedNARSource(from, savedNAR);
|
TeeSource savedNARSource(from, saved);
|
||||||
RetrieveRegularNARSink savedRegular;
|
RetrieveRegularNARSink savedRegular { saved };
|
||||||
|
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
if (method == FileIngestionMethod::Recursive) {
|
||||||
/* Get the entire NAR dump from the client and save it to
|
/* Get the entire NAR dump from the client and save it to
|
||||||
|
@ -407,11 +385,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
if (!savedRegular.regular) throw Error("regular file expected");
|
if (!savedRegular.regular) throw Error("regular file expected");
|
||||||
|
|
||||||
auto path = store->addToStoreFromDump(
|
// FIXME: try to stream directly from `from`.
|
||||||
method == FileIngestionMethod::Recursive ? *savedNAR.s : savedRegular.s,
|
StringSource dumpSource { *saved.s };
|
||||||
baseName,
|
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
|
||||||
method,
|
|
||||||
hashAlgo);
|
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
|
|
||||||
to << store->printStorePath(path);
|
to << store->printStorePath(path);
|
||||||
|
@ -475,7 +451,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
case wopBuildDerivation: {
|
case wopBuildDerivation: {
|
||||||
auto drvPath = store->parseStorePath(readString(from));
|
auto drvPath = store->parseStorePath(readString(from));
|
||||||
BasicDerivation drv;
|
BasicDerivation drv;
|
||||||
readDerivation(from, *store, drv);
|
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
|
||||||
BuildMode buildMode = (BuildMode) readInt(from);
|
BuildMode buildMode = (BuildMode) readInt(from);
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
if (!trusted)
|
if (!trusted)
|
||||||
|
@ -603,7 +579,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
auto path = store->parseStorePath(readString(from));
|
auto path = store->parseStorePath(readString(from));
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
store->querySubstitutablePathInfos({path}, infos);
|
store->querySubstitutablePathInfos({{path, std::nullopt}}, infos);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
auto i = infos.find(path);
|
auto i = infos.find(path);
|
||||||
if (i == infos.end())
|
if (i == infos.end())
|
||||||
|
@ -619,10 +595,16 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
}
|
}
|
||||||
|
|
||||||
case wopQuerySubstitutablePathInfos: {
|
case wopQuerySubstitutablePathInfos: {
|
||||||
auto paths = readStorePaths<StorePathSet>(*store, from);
|
|
||||||
logger->startWork();
|
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
store->querySubstitutablePathInfos(paths, infos);
|
StorePathCAMap pathsMap = {};
|
||||||
|
if (GET_PROTOCOL_MINOR(clientVersion) < 22) {
|
||||||
|
auto paths = readStorePaths<StorePathSet>(*store, from);
|
||||||
|
for (auto & path : paths)
|
||||||
|
pathsMap.emplace(path, std::nullopt);
|
||||||
|
} else
|
||||||
|
pathsMap = readStorePathCAMap(*store, from);
|
||||||
|
logger->startWork();
|
||||||
|
store->querySubstitutablePathInfos(pathsMap, infos);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << infos.size();
|
to << infos.size();
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
|
@ -656,7 +638,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
|
||||||
to << 1;
|
to << 1;
|
||||||
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
|
||||||
<< info->narHash.to_string(Base16, false);
|
<< info->narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*store, to, info->references);
|
writeStorePaths(*store, to, info->references);
|
||||||
to << info->registrationTime << info->narSize;
|
to << info->registrationTime << info->narSize;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
|
||||||
|
@ -727,15 +709,73 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
if (!trusted)
|
if (!trusted)
|
||||||
info.ultimate = false;
|
info.ultimate = false;
|
||||||
|
|
||||||
std::string saved;
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 23) {
|
||||||
|
|
||||||
|
struct FramedSource : Source
|
||||||
|
{
|
||||||
|
Source & from;
|
||||||
|
bool eof = false;
|
||||||
|
std::vector<unsigned char> pending;
|
||||||
|
size_t pos = 0;
|
||||||
|
|
||||||
|
FramedSource(Source & from) : from(from)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
~FramedSource()
|
||||||
|
{
|
||||||
|
if (!eof) {
|
||||||
|
while (true) {
|
||||||
|
auto n = readInt(from);
|
||||||
|
if (!n) break;
|
||||||
|
std::vector<unsigned char> data(n);
|
||||||
|
from(data.data(), n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t read(unsigned char * data, size_t len) override
|
||||||
|
{
|
||||||
|
if (eof) throw EndOfFile("reached end of FramedSource");
|
||||||
|
|
||||||
|
if (pos >= pending.size()) {
|
||||||
|
size_t len = readInt(from);
|
||||||
|
if (!len) {
|
||||||
|
eof = true;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
pending = std::vector<unsigned char>(len);
|
||||||
|
pos = 0;
|
||||||
|
from(pending.data(), len);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto n = std::min(len, pending.size() - pos);
|
||||||
|
memcpy(data, pending.data() + pos, n);
|
||||||
|
pos += n;
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
logger->startWork();
|
||||||
|
|
||||||
|
{
|
||||||
|
FramedSource source(from);
|
||||||
|
store->addToStore(info, source, (RepairFlag) repair,
|
||||||
|
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
|
}
|
||||||
|
|
||||||
|
logger->stopWork();
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
std::unique_ptr<Source> source;
|
std::unique_ptr<Source> source;
|
||||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||||
source = std::make_unique<TunnelSource>(from, to);
|
source = std::make_unique<TunnelSource>(from, to);
|
||||||
else {
|
else {
|
||||||
TeeParseSink tee(from);
|
StringSink saved;
|
||||||
parseDump(tee, tee.source);
|
TeeSource tee { from, saved };
|
||||||
saved = std::move(*tee.saved.s);
|
ParseSink ether;
|
||||||
source = std::make_unique<StringSource>(saved);
|
parseDump(ether, tee);
|
||||||
|
source = std::make_unique<StringSource>(std::move(*saved.s));
|
||||||
}
|
}
|
||||||
|
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
|
@ -745,6 +785,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
|
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
|
}
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -754,7 +796,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
targets.push_back(store->parsePathWithOutputs(s));
|
targets.push_back(store->parsePathWithOutputs(s));
|
||||||
logger->startWork();
|
logger->startWork();
|
||||||
StorePathSet willBuild, willSubstitute, unknown;
|
StorePathSet willBuild, willSubstitute, unknown;
|
||||||
unsigned long long downloadSize, narSize;
|
uint64_t downloadSize, narSize;
|
||||||
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
writeStorePaths(*store, to, willBuild);
|
writeStorePaths(*store, to, willBuild);
|
||||||
|
|
|
@ -7,12 +7,20 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
const StorePath & BasicDerivation::findOutput(const string & id) const
|
// FIXME Put this somewhere?
|
||||||
|
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||||
|
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||||
|
|
||||||
|
StorePath DerivationOutput::path(const Store & store, std::string_view drvName) const
|
||||||
{
|
{
|
||||||
auto i = outputs.find(id);
|
return std::visit(overloaded {
|
||||||
if (i == outputs.end())
|
[](DerivationOutputInputAddressed doi) {
|
||||||
throw Error("derivation has no output '%s'", id);
|
return doi.path;
|
||||||
return i->second.path;
|
},
|
||||||
|
[&](DerivationOutputFixed dof) {
|
||||||
|
return store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
|
||||||
|
}
|
||||||
|
}, output);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -107,7 +115,6 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
|
||||||
expect(str, ","); const auto hash = parseString(str);
|
expect(str, ","); const auto hash = parseString(str);
|
||||||
expect(str, ")");
|
expect(str, ")");
|
||||||
|
|
||||||
std::optional<FixedOutputHash> fsh;
|
|
||||||
if (hashAlgo != "") {
|
if (hashAlgo != "") {
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (string(hashAlgo, 0, 2) == "r:") {
|
if (string(hashAlgo, 0, 2) == "r:") {
|
||||||
|
@ -115,22 +122,29 @@ static DerivationOutput parseDerivationOutput(const Store & store, std::istrings
|
||||||
hashAlgo = string(hashAlgo, 2);
|
hashAlgo = string(hashAlgo, 2);
|
||||||
}
|
}
|
||||||
const HashType hashType = parseHashType(hashAlgo);
|
const HashType hashType = parseHashType(hashAlgo);
|
||||||
fsh = FixedOutputHash {
|
|
||||||
.method = std::move(method),
|
|
||||||
.hash = Hash(hash, hashType),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return DerivationOutput {
|
return DerivationOutput {
|
||||||
|
.output = DerivationOutputFixed {
|
||||||
|
.hash = FixedOutputHash {
|
||||||
|
.method = std::move(method),
|
||||||
|
.hash = Hash(hash, hashType),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else
|
||||||
|
return DerivationOutput {
|
||||||
|
.output = DerivationOutputInputAddressed {
|
||||||
.path = std::move(path),
|
.path = std::move(path),
|
||||||
.hash = std::move(fsh),
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Derivation parseDerivation(const Store & store, std::string && s)
|
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name)
|
||||||
{
|
{
|
||||||
Derivation drv;
|
Derivation drv;
|
||||||
|
drv.name = name;
|
||||||
|
|
||||||
std::istringstream str(std::move(s));
|
std::istringstream str(std::move(s));
|
||||||
expect(str, "Derive([");
|
expect(str, "Derive([");
|
||||||
|
|
||||||
|
@ -235,10 +249,14 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
if (first) first = false; else s += ',';
|
if (first) first = false; else s += ',';
|
||||||
s += '('; printUnquotedString(s, i.first);
|
s += '('; printUnquotedString(s, i.first);
|
||||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path));
|
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name)));
|
||||||
s += ','; printUnquotedString(s, i.second.hash ? i.second.hash->printMethodAlgo() : "");
|
if (auto hash = std::get_if<DerivationOutputFixed>(&i.second.output)) {
|
||||||
s += ','; printUnquotedString(s,
|
s += ','; printUnquotedString(s, hash->hash.printMethodAlgo());
|
||||||
i.second.hash ? i.second.hash->hash.to_string(Base16, false) : "");
|
s += ','; printUnquotedString(s, hash->hash.hash.to_string(Base16, false));
|
||||||
|
} else {
|
||||||
|
s += ','; printUnquotedString(s, "");
|
||||||
|
s += ','; printUnquotedString(s, "");
|
||||||
|
}
|
||||||
s += ')';
|
s += ')';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -294,7 +312,7 @@ bool BasicDerivation::isFixedOutput() const
|
||||||
{
|
{
|
||||||
return outputs.size() == 1 &&
|
return outputs.size() == 1 &&
|
||||||
outputs.begin()->first == "out" &&
|
outputs.begin()->first == "out" &&
|
||||||
outputs.begin()->second.hash;
|
std::holds_alternative<DerivationOutputFixed>(outputs.begin()->second.output);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -326,10 +344,11 @@ Hash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOutput
|
||||||
/* Return a fixed hash for fixed-output derivations. */
|
/* Return a fixed hash for fixed-output derivations. */
|
||||||
if (drv.isFixedOutput()) {
|
if (drv.isFixedOutput()) {
|
||||||
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||||
|
auto hash = std::get<DerivationOutputFixed>(i->second.output);
|
||||||
return hashString(htSHA256, "fixed:out:"
|
return hashString(htSHA256, "fixed:out:"
|
||||||
+ i->second.hash->printMethodAlgo() + ":"
|
+ hash.hash.printMethodAlgo() + ":"
|
||||||
+ i->second.hash->hash.to_string(Base16, false) + ":"
|
+ hash.hash.hash.to_string(Base16, false) + ":"
|
||||||
+ store.printStorePath(i->second.path));
|
+ store.printStorePath(i->second.path(store, drv.name)));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* For other derivations, replace the inputs paths with recursive
|
/* For other derivations, replace the inputs paths with recursive
|
||||||
|
@ -363,11 +382,11 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePathSet BasicDerivation::outputPaths() const
|
StorePathSet BasicDerivation::outputPaths(const Store & store) const
|
||||||
{
|
{
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (auto & i : outputs)
|
for (auto & i : outputs)
|
||||||
paths.insert(i.second.path);
|
paths.insert(i.second.path(store, name));
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -377,7 +396,6 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||||
auto hashAlgo = readString(in);
|
auto hashAlgo = readString(in);
|
||||||
auto hash = readString(in);
|
auto hash = readString(in);
|
||||||
|
|
||||||
std::optional<FixedOutputHash> fsh;
|
|
||||||
if (hashAlgo != "") {
|
if (hashAlgo != "") {
|
||||||
auto method = FileIngestionMethod::Flat;
|
auto method = FileIngestionMethod::Flat;
|
||||||
if (string(hashAlgo, 0, 2) == "r:") {
|
if (string(hashAlgo, 0, 2) == "r:") {
|
||||||
|
@ -385,15 +403,19 @@ static DerivationOutput readDerivationOutput(Source & in, const Store & store)
|
||||||
hashAlgo = string(hashAlgo, 2);
|
hashAlgo = string(hashAlgo, 2);
|
||||||
}
|
}
|
||||||
auto hashType = parseHashType(hashAlgo);
|
auto hashType = parseHashType(hashAlgo);
|
||||||
fsh = FixedOutputHash {
|
return DerivationOutput {
|
||||||
|
.output = DerivationOutputFixed {
|
||||||
|
.hash = FixedOutputHash {
|
||||||
.method = std::move(method),
|
.method = std::move(method),
|
||||||
.hash = Hash(hash, hashType),
|
.hash = Hash(hash, hashType),
|
||||||
};
|
},
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
} else
|
||||||
return DerivationOutput {
|
return DerivationOutput {
|
||||||
|
.output = DerivationOutputInputAddressed {
|
||||||
.path = std::move(path),
|
.path = std::move(path),
|
||||||
.hash = std::move(fsh),
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -406,8 +428,19 @@ StringSet BasicDerivation::outputNames() const
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv)
|
std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath) {
|
||||||
|
auto nameWithSuffix = drvPath.name();
|
||||||
|
constexpr std::string_view extension = ".drv";
|
||||||
|
assert(hasSuffix(nameWithSuffix, extension));
|
||||||
|
nameWithSuffix.remove_suffix(extension.size());
|
||||||
|
return nameWithSuffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name)
|
||||||
{
|
{
|
||||||
|
drv.name = name;
|
||||||
|
|
||||||
drv.outputs.clear();
|
drv.outputs.clear();
|
||||||
auto nr = readNum<size_t>(in);
|
auto nr = readNum<size_t>(in);
|
||||||
for (size_t n = 0; n < nr; n++) {
|
for (size_t n = 0; n < nr; n++) {
|
||||||
|
@ -436,10 +469,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
||||||
out << drv.outputs.size();
|
out << drv.outputs.size();
|
||||||
for (auto & i : drv.outputs) {
|
for (auto & i : drv.outputs) {
|
||||||
out << i.first
|
out << i.first
|
||||||
<< store.printStorePath(i.second.path);
|
<< store.printStorePath(i.second.path(store, drv.name));
|
||||||
if (i.second.hash) {
|
if (auto hash = std::get_if<DerivationOutputFixed>(&i.second.output)) {
|
||||||
out << i.second.hash->printMethodAlgo()
|
out << hash->hash.printMethodAlgo()
|
||||||
<< i.second.hash->hash.to_string(Base16, false);
|
<< hash->hash.hash.to_string(Base16, false);
|
||||||
} else {
|
} else {
|
||||||
out << "" << "";
|
out << "" << "";
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,10 +13,20 @@ namespace nix {
|
||||||
|
|
||||||
/* Abstract syntax of derivations. */
|
/* Abstract syntax of derivations. */
|
||||||
|
|
||||||
struct DerivationOutput
|
struct DerivationOutputInputAddressed
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<FixedOutputHash> hash; /* hash used for expected hash computation */
|
};
|
||||||
|
|
||||||
|
struct DerivationOutputFixed
|
||||||
|
{
|
||||||
|
FixedOutputHash hash; /* hash used for expected hash computation */
|
||||||
|
};
|
||||||
|
|
||||||
|
struct DerivationOutput
|
||||||
|
{
|
||||||
|
std::variant<DerivationOutputInputAddressed, DerivationOutputFixed> output;
|
||||||
|
StorePath path(const Store & store, std::string_view drvName) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||||
|
@ -35,24 +45,23 @@ struct BasicDerivation
|
||||||
Path builder;
|
Path builder;
|
||||||
Strings args;
|
Strings args;
|
||||||
StringPairs env;
|
StringPairs env;
|
||||||
|
std::string name;
|
||||||
|
|
||||||
BasicDerivation() { }
|
BasicDerivation() { }
|
||||||
virtual ~BasicDerivation() { };
|
virtual ~BasicDerivation() { };
|
||||||
|
|
||||||
/* Return the path corresponding to the output identifier `id' in
|
|
||||||
the given derivation. */
|
|
||||||
const StorePath & findOutput(const std::string & id) const;
|
|
||||||
|
|
||||||
bool isBuiltin() const;
|
bool isBuiltin() const;
|
||||||
|
|
||||||
/* Return true iff this is a fixed-output derivation. */
|
/* Return true iff this is a fixed-output derivation. */
|
||||||
bool isFixedOutput() const;
|
bool isFixedOutput() const;
|
||||||
|
|
||||||
/* Return the output paths of a derivation. */
|
/* Return the output paths of a derivation. */
|
||||||
StorePathSet outputPaths() const;
|
StorePathSet outputPaths(const Store & store) const;
|
||||||
|
|
||||||
/* Return the output names of a derivation. */
|
/* Return the output names of a derivation. */
|
||||||
StringSet outputNames() const;
|
StringSet outputNames() const;
|
||||||
|
|
||||||
|
static std::string_view nameFromPath(const StorePath & storePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Derivation : BasicDerivation
|
struct Derivation : BasicDerivation
|
||||||
|
@ -76,7 +85,7 @@ StorePath writeDerivation(ref<Store> store,
|
||||||
const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
|
const Derivation & drv, std::string_view name, RepairFlag repair = NoRepair);
|
||||||
|
|
||||||
/* Read a derivation from a file. */
|
/* Read a derivation from a file. */
|
||||||
Derivation parseDerivation(const Store & store, std::string && s);
|
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
||||||
|
|
||||||
// FIXME: remove
|
// FIXME: remove
|
||||||
bool isDerivation(const string & fileName);
|
bool isDerivation(const string & fileName);
|
||||||
|
@ -93,7 +102,7 @@ bool wantOutput(const string & output, const std::set<string> & wanted);
|
||||||
struct Source;
|
struct Source;
|
||||||
struct Sink;
|
struct Sink;
|
||||||
|
|
||||||
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv);
|
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
|
||||||
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
|
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
|
||||||
|
|
||||||
std::string hashPlaceholder(const std::string & outputName);
|
std::string hashPlaceholder(const std::string & outputName);
|
||||||
|
|
|
@ -38,9 +38,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
|
||||||
filesystem corruption from spreading to other machines.
|
filesystem corruption from spreading to other machines.
|
||||||
Don't complain if the stored hash is zero (unknown). */
|
Don't complain if the stored hash is zero (unknown). */
|
||||||
Hash hash = hashSink.currentHash().first;
|
Hash hash = hashSink.currentHash().first;
|
||||||
if (hash != info->narHash && info->narHash != Hash(*info->narHash.type))
|
if (hash != info->narHash && info->narHash != Hash(info->narHash->type))
|
||||||
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
|
||||||
printStorePath(path), info->narHash.to_string(Base32, true), hash.to_string(Base32, true));
|
printStorePath(path), info->narHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||||
|
|
||||||
teeSink
|
teeSink
|
||||||
<< exportMagic
|
<< exportMagic
|
||||||
|
@ -60,8 +60,10 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
|
if (n != 1) throw Error("input doesn't look like something created by 'nix-store --export'");
|
||||||
|
|
||||||
/* Extract the NAR from the source. */
|
/* Extract the NAR from the source. */
|
||||||
TeeParseSink tee(source);
|
StringSink saved;
|
||||||
parseDump(tee, tee.source);
|
TeeSource tee { source, saved };
|
||||||
|
ParseSink ether;
|
||||||
|
parseDump(ether, tee);
|
||||||
|
|
||||||
uint32_t magic = readInt(source);
|
uint32_t magic = readInt(source);
|
||||||
if (magic != exportMagic)
|
if (magic != exportMagic)
|
||||||
|
@ -77,15 +79,15 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
||||||
if (deriver != "")
|
if (deriver != "")
|
||||||
info.deriver = parseStorePath(deriver);
|
info.deriver = parseStorePath(deriver);
|
||||||
|
|
||||||
info.narHash = hashString(htSHA256, *tee.saved.s);
|
info.narHash = hashString(htSHA256, *saved.s);
|
||||||
info.narSize = tee.saved.s->size();
|
info.narSize = saved.s->size();
|
||||||
|
|
||||||
// Ignore optional legacy signature.
|
// Ignore optional legacy signature.
|
||||||
if (readInt(source) == 1)
|
if (readInt(source) == 1)
|
||||||
readString(source);
|
readString(source);
|
||||||
|
|
||||||
// Can't use underlying source, which would have been exhausted
|
// Can't use underlying source, which would have been exhausted
|
||||||
auto source = StringSource { *tee.saved.s };
|
auto source = StringSource { *saved.s };
|
||||||
addToStore(info, source, NoRepair, checkSigs);
|
addToStore(info, source, NoRepair, checkSigs);
|
||||||
|
|
||||||
res.push_back(info.path);
|
res.push_back(info.path);
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
#include <queue>
|
#include <queue>
|
||||||
#include <random>
|
#include <random>
|
||||||
#include <thread>
|
#include <thread>
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
using namespace std::string_literals;
|
using namespace std::string_literals;
|
||||||
|
|
||||||
|
@ -56,7 +57,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
Callback<FileTransferResult> callback;
|
Callback<FileTransferResult> callback;
|
||||||
CURL * req = 0;
|
CURL * req = 0;
|
||||||
bool active = false; // whether the handle has been added to the multi object
|
bool active = false; // whether the handle has been added to the multi object
|
||||||
std::string status;
|
std::string statusMsg;
|
||||||
|
|
||||||
unsigned int attempt = 0;
|
unsigned int attempt = 0;
|
||||||
|
|
||||||
|
@ -123,7 +124,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
if (requestHeaders) curl_slist_free_all(requestHeaders);
|
if (requestHeaders) curl_slist_free_all(requestHeaders);
|
||||||
try {
|
try {
|
||||||
if (!done)
|
if (!done)
|
||||||
fail(FileTransferError(Interrupted, "download of '%s' was interrupted", request.uri));
|
fail(FileTransferError(Interrupted, nullptr, "download of '%s' was interrupted", request.uri));
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
ignoreException();
|
ignoreException();
|
||||||
}
|
}
|
||||||
|
@ -144,6 +145,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
LambdaSink finalSink;
|
LambdaSink finalSink;
|
||||||
std::shared_ptr<CompressionSink> decompressionSink;
|
std::shared_ptr<CompressionSink> decompressionSink;
|
||||||
|
std::optional<StringSink> errorSink;
|
||||||
|
|
||||||
std::exception_ptr writeException;
|
std::exception_ptr writeException;
|
||||||
|
|
||||||
|
@ -153,9 +155,19 @@ struct curlFileTransfer : public FileTransfer
|
||||||
size_t realSize = size * nmemb;
|
size_t realSize = size * nmemb;
|
||||||
result.bodySize += realSize;
|
result.bodySize += realSize;
|
||||||
|
|
||||||
if (!decompressionSink)
|
if (!decompressionSink) {
|
||||||
decompressionSink = makeDecompressionSink(encoding, finalSink);
|
decompressionSink = makeDecompressionSink(encoding, finalSink);
|
||||||
|
if (! successfulStatuses.count(getHTTPStatus())) {
|
||||||
|
// In this case we want to construct a TeeSink, to keep
|
||||||
|
// the response around (which we figure won't be big
|
||||||
|
// like an actual download should be) to improve error
|
||||||
|
// messages.
|
||||||
|
errorSink = StringSink { };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errorSink)
|
||||||
|
(*errorSink)((unsigned char *) contents, realSize);
|
||||||
(*decompressionSink)((unsigned char *) contents, realSize);
|
(*decompressionSink)((unsigned char *) contents, realSize);
|
||||||
|
|
||||||
return realSize;
|
return realSize;
|
||||||
|
@ -175,12 +187,13 @@ struct curlFileTransfer : public FileTransfer
|
||||||
size_t realSize = size * nmemb;
|
size_t realSize = size * nmemb;
|
||||||
std::string line((char *) contents, realSize);
|
std::string line((char *) contents, realSize);
|
||||||
printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
|
printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
|
||||||
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
|
static std::regex statusLine("HTTP/[^ ]+ +[0-9]+(.*)", std::regex::extended | std::regex::icase);
|
||||||
|
std::smatch match;
|
||||||
|
if (std::regex_match(line, match, statusLine)) {
|
||||||
result.etag = "";
|
result.etag = "";
|
||||||
auto ss = tokenizeString<vector<string>>(line, " ");
|
|
||||||
status = ss.size() >= 2 ? ss[1] : "";
|
|
||||||
result.data = std::make_shared<std::string>();
|
result.data = std::make_shared<std::string>();
|
||||||
result.bodySize = 0;
|
result.bodySize = 0;
|
||||||
|
statusMsg = trim(match[1]);
|
||||||
acceptRanges = false;
|
acceptRanges = false;
|
||||||
encoding = "";
|
encoding = "";
|
||||||
} else {
|
} else {
|
||||||
|
@ -194,7 +207,9 @@ struct curlFileTransfer : public FileTransfer
|
||||||
the expected ETag on a 200 response, then shut
|
the expected ETag on a 200 response, then shut
|
||||||
down the connection because we already have the
|
down the connection because we already have the
|
||||||
data. */
|
data. */
|
||||||
if (result.etag == request.expectedETag && status == "200") {
|
long httpStatus = 0;
|
||||||
|
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
|
||||||
|
if (result.etag == request.expectedETag && httpStatus == 200) {
|
||||||
debug(format("shutting down on 200 HTTP response with expected ETag"));
|
debug(format("shutting down on 200 HTTP response with expected ETag"));
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -408,16 +423,21 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
attempt++;
|
attempt++;
|
||||||
|
|
||||||
|
std::shared_ptr<std::string> response;
|
||||||
|
if (errorSink)
|
||||||
|
response = errorSink->s;
|
||||||
auto exc =
|
auto exc =
|
||||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||||
? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
|
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||||
: httpStatus != 0
|
: httpStatus != 0
|
||||||
? FileTransferError(err,
|
? FileTransferError(err,
|
||||||
fmt("unable to %s '%s': HTTP error %d",
|
response,
|
||||||
request.verb(), request.uri, httpStatus)
|
fmt("unable to %s '%s': HTTP error %d ('%s')",
|
||||||
|
request.verb(), request.uri, httpStatus, statusMsg)
|
||||||
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||||
)
|
)
|
||||||
: FileTransferError(err,
|
: FileTransferError(err,
|
||||||
|
response,
|
||||||
fmt("unable to %s '%s': %s (%d)",
|
fmt("unable to %s '%s': %s (%d)",
|
||||||
request.verb(), request.uri, curl_easy_strerror(code), code));
|
request.verb(), request.uri, curl_easy_strerror(code), code));
|
||||||
|
|
||||||
|
@ -675,7 +695,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
auto s3Res = s3Helper.getObject(bucketName, key);
|
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||||
FileTransferResult res;
|
FileTransferResult res;
|
||||||
if (!s3Res.data)
|
if (!s3Res.data)
|
||||||
throw FileTransferError(NotFound, fmt("S3 object '%s' does not exist", request.uri));
|
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
|
||||||
res.data = s3Res.data;
|
res.data = s3Res.data;
|
||||||
callback(std::move(res));
|
callback(std::move(res));
|
||||||
#else
|
#else
|
||||||
|
@ -820,6 +840,21 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args)
|
||||||
|
: Error(args...), error(error), response(response)
|
||||||
|
{
|
||||||
|
const auto hf = hintfmt(args...);
|
||||||
|
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
|
||||||
|
// to print different messages for different verbosity levels. For now
|
||||||
|
// we add some heuristics for detecting when we want to show the response.
|
||||||
|
if (response && (response->size() < 1024 || response->find("<html>") != string::npos)) {
|
||||||
|
err.hint = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), *response);
|
||||||
|
} else {
|
||||||
|
err.hint = hf;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool isUri(const string & s)
|
bool isUri(const string & s)
|
||||||
{
|
{
|
||||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||||
|
|
|
@ -103,10 +103,12 @@ class FileTransferError : public Error
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
FileTransfer::Error error;
|
FileTransfer::Error error;
|
||||||
|
std::shared_ptr<string> response; // intentionally optional
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
FileTransferError(FileTransfer::Error error, const Args & ... args)
|
FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args);
|
||||||
: Error(args...), error(error)
|
|
||||||
{ }
|
virtual const char* sname() const override { return "FileTransferError"; }
|
||||||
};
|
};
|
||||||
|
|
||||||
bool isUri(const string & s);
|
bool isUri(const string & s);
|
||||||
|
|
|
@ -500,7 +500,7 @@ struct LocalStore::GCState
|
||||||
StorePathSet alive;
|
StorePathSet alive;
|
||||||
bool gcKeepOutputs;
|
bool gcKeepOutputs;
|
||||||
bool gcKeepDerivations;
|
bool gcKeepDerivations;
|
||||||
unsigned long long bytesInvalidated;
|
uint64_t bytesInvalidated;
|
||||||
bool moveToTrash = true;
|
bool moveToTrash = true;
|
||||||
bool shouldDelete;
|
bool shouldDelete;
|
||||||
GCState(const GCOptions & options, GCResults & results)
|
GCState(const GCOptions & options, GCResults & results)
|
||||||
|
@ -518,7 +518,7 @@ bool LocalStore::isActiveTempFile(const GCState & state,
|
||||||
|
|
||||||
void LocalStore::deleteGarbage(GCState & state, const Path & path)
|
void LocalStore::deleteGarbage(GCState & state, const Path & path)
|
||||||
{
|
{
|
||||||
unsigned long long bytesFreed;
|
uint64_t bytesFreed;
|
||||||
deletePath(path, bytesFreed);
|
deletePath(path, bytesFreed);
|
||||||
state.results.bytesFreed += bytesFreed;
|
state.results.bytesFreed += bytesFreed;
|
||||||
}
|
}
|
||||||
|
@ -528,7 +528,7 @@ void LocalStore::deletePathRecursive(GCState & state, const Path & path)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
unsigned long long size = 0;
|
uint64_t size = 0;
|
||||||
|
|
||||||
auto storePath = maybeParseStorePath(path);
|
auto storePath = maybeParseStorePath(path);
|
||||||
if (storePath && isValidPath(*storePath)) {
|
if (storePath && isValidPath(*storePath)) {
|
||||||
|
@ -687,7 +687,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
|
||||||
AutoCloseDir dir(opendir(linksDir.c_str()));
|
AutoCloseDir dir(opendir(linksDir.c_str()));
|
||||||
if (!dir) throw SysError("opening directory '%1%'", linksDir);
|
if (!dir) throw SysError("opening directory '%1%'", linksDir);
|
||||||
|
|
||||||
long long actualSize = 0, unsharedSize = 0;
|
int64_t actualSize = 0, unsharedSize = 0;
|
||||||
|
|
||||||
struct dirent * dirent;
|
struct dirent * dirent;
|
||||||
while (errno = 0, dirent = readdir(dir.get())) {
|
while (errno = 0, dirent = readdir(dir.get())) {
|
||||||
|
@ -717,10 +717,10 @@ void LocalStore::removeUnusedLinks(const GCState & state)
|
||||||
struct stat st;
|
struct stat st;
|
||||||
if (stat(linksDir.c_str(), &st) == -1)
|
if (stat(linksDir.c_str(), &st) == -1)
|
||||||
throw SysError("statting '%1%'", linksDir);
|
throw SysError("statting '%1%'", linksDir);
|
||||||
long long overhead = st.st_blocks * 512ULL;
|
auto overhead = st.st_blocks * 512ULL;
|
||||||
|
|
||||||
printInfo(format("note: currently hard linking saves %.2f MiB")
|
printInfo("note: currently hard linking saves %.2f MiB",
|
||||||
% ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
|
((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -335,9 +335,6 @@ public:
|
||||||
"setuid/setgid bits or with file capabilities."};
|
"setuid/setgid bits or with file capabilities."};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
Setting<Strings> hashedMirrors{this, {"http://tarballs.nixos.org/"}, "hashed-mirrors",
|
|
||||||
"A list of servers used by builtins.fetchurl to fetch files by hash."};
|
|
||||||
|
|
||||||
Setting<uint64_t> minFree{this, 0, "min-free",
|
Setting<uint64_t> minFree{this, 0, "min-free",
|
||||||
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
"Automatically run the garbage collector when free disk space drops below the specified amount."};
|
||||||
|
|
||||||
|
@ -368,6 +365,9 @@ public:
|
||||||
|
|
||||||
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
|
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
|
||||||
"Maximum size of NARs before spilling them to disk."};
|
"Maximum size of NARs before spilling them to disk."};
|
||||||
|
|
||||||
|
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
||||||
|
"Path or URI of the global flake registry."};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -113,7 +113,7 @@ struct LegacySSHStore : public Store
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4) {
|
||||||
auto s = readString(conn->from);
|
auto s = readString(conn->from);
|
||||||
info->narHash = s.empty() ? Hash() : Hash(s);
|
info->narHash = s.empty() ? std::optional<Hash>{} : Hash{s};
|
||||||
info->ca = parseContentAddressOpt(readString(conn->from));
|
info->ca = parseContentAddressOpt(readString(conn->from));
|
||||||
info->sigs = readStrings<StringSet>(conn->from);
|
info->sigs = readStrings<StringSet>(conn->from);
|
||||||
}
|
}
|
||||||
|
@ -138,7 +138,7 @@ struct LegacySSHStore : public Store
|
||||||
<< cmdAddToStoreNar
|
<< cmdAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base16, false);
|
<< info.narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to
|
conn->to
|
||||||
<< info.registrationTime
|
<< info.registrationTime
|
||||||
|
|
|
@ -560,19 +560,12 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
||||||
DerivationOutputs::const_iterator out = drv.outputs.find("out");
|
DerivationOutputs::const_iterator out = drv.outputs.find("out");
|
||||||
if (out == drv.outputs.end())
|
if (out == drv.outputs.end())
|
||||||
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
|
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
|
||||||
|
|
||||||
check(
|
|
||||||
makeFixedOutputPath(
|
|
||||||
out->second.hash->method,
|
|
||||||
out->second.hash->hash,
|
|
||||||
drvName),
|
|
||||||
out->second.path, "out");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else {
|
else {
|
||||||
Hash h = hashDerivationModulo(*this, drv, true);
|
Hash h = hashDerivationModulo(*this, drv, true);
|
||||||
for (auto & i : drv.outputs)
|
for (auto & i : drv.outputs)
|
||||||
check(makeOutputPath(i.first, h, drvName), i.second.path, i.first);
|
check(makeOutputPath(i.first, h, drvName), i.second.path(*this, drv.name), i.first);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -586,7 +579,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
|
|
||||||
state.stmtRegisterValidPath.use()
|
state.stmtRegisterValidPath.use()
|
||||||
(printStorePath(info.path))
|
(printStorePath(info.path))
|
||||||
(info.narHash.to_string(Base16, true))
|
(info.narHash->to_string(Base16, true))
|
||||||
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
(info.registrationTime == 0 ? time(0) : info.registrationTime)
|
||||||
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
(info.deriver ? printStorePath(*info.deriver) : "", (bool) info.deriver)
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
|
@ -594,7 +587,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(renderContentAddress(info.ca), (bool) info.ca)
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
.exec();
|
.exec();
|
||||||
uint64_t id = sqlite3_last_insert_rowid(state.db);
|
uint64_t id = state.db.getLastInsertedRowId();
|
||||||
|
|
||||||
/* If this is a derivation, then store the derivation outputs in
|
/* If this is a derivation, then store the derivation outputs in
|
||||||
the database. This is useful for the garbage collector: it can
|
the database. This is useful for the garbage collector: it can
|
||||||
|
@ -614,7 +607,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
||||||
state.stmtAddDerivationOutput.use()
|
state.stmtAddDerivationOutput.use()
|
||||||
(id)
|
(id)
|
||||||
(i.first)
|
(i.first)
|
||||||
(printStorePath(i.second.path))
|
(printStorePath(i.second.path(*this, drv.name)))
|
||||||
.exec();
|
.exec();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -686,7 +679,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
state.stmtUpdatePathInfo.use()
|
state.stmtUpdatePathInfo.use()
|
||||||
(info.narSize, info.narSize != 0)
|
(info.narSize, info.narSize != 0)
|
||||||
(info.narHash.to_string(Base16, true))
|
(info.narHash->to_string(Base16, true))
|
||||||
(info.ultimate ? 1 : 0, info.ultimate)
|
(info.ultimate ? 1 : 0, info.ultimate)
|
||||||
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
|
||||||
(renderContentAddress(info.ca), (bool) info.ca)
|
(renderContentAddress(info.ca), (bool) info.ca)
|
||||||
|
@ -846,20 +839,32 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void LocalStore::querySubstitutablePathInfos(const StorePathSet & paths,
|
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
|
||||||
SubstitutablePathInfos & infos)
|
|
||||||
{
|
{
|
||||||
if (!settings.useSubstitutes) return;
|
if (!settings.useSubstitutes) return;
|
||||||
for (auto & sub : getDefaultSubstituters()) {
|
for (auto & sub : getDefaultSubstituters()) {
|
||||||
if (sub->storeDir != storeDir) continue;
|
|
||||||
for (auto & path : paths) {
|
for (auto & path : paths) {
|
||||||
if (infos.count(path)) continue;
|
auto subPath(path.first);
|
||||||
debug("checking substituter '%s' for path '%s'", sub->getUri(), printStorePath(path));
|
|
||||||
|
// recompute store path so that we can use a different store root
|
||||||
|
if (path.second) {
|
||||||
|
subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
|
||||||
|
if (sub->storeDir == storeDir)
|
||||||
|
assert(subPath == path.first);
|
||||||
|
if (subPath != path.first)
|
||||||
|
debug("replaced path '%s' with '%s' for substituter '%s'", printStorePath(path.first), sub->printStorePath(subPath), sub->getUri());
|
||||||
|
} else if (sub->storeDir != storeDir) continue;
|
||||||
|
|
||||||
|
debug("checking substituter '%s' for path '%s'", sub->getUri(), sub->printStorePath(subPath));
|
||||||
try {
|
try {
|
||||||
auto info = sub->queryPathInfo(path);
|
auto info = sub->queryPathInfo(subPath);
|
||||||
|
|
||||||
|
if (sub->storeDir != storeDir && !(info->isContentAddressed(*sub) && info->references.empty()))
|
||||||
|
continue;
|
||||||
|
|
||||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
||||||
std::shared_ptr<const ValidPathInfo>(info));
|
std::shared_ptr<const ValidPathInfo>(info));
|
||||||
infos.insert_or_assign(path, SubstitutablePathInfo{
|
infos.insert_or_assign(path.first, SubstitutablePathInfo{
|
||||||
info->deriver,
|
info->deriver,
|
||||||
info->references,
|
info->references,
|
||||||
narInfo ? narInfo->fileSize : 0,
|
narInfo ? narInfo->fileSize : 0,
|
||||||
|
@ -900,7 +905,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
|
|
||||||
for (auto & i : infos) {
|
for (auto & i : infos) {
|
||||||
assert(i.narHash.type == htSHA256);
|
assert(i.narHash && i.narHash->type == htSHA256);
|
||||||
if (isValidPath_(*state, i.path))
|
if (isValidPath_(*state, i.path))
|
||||||
updatePathInfo(*state, i);
|
updatePathInfo(*state, i);
|
||||||
else
|
else
|
||||||
|
@ -1013,7 +1018,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
|
||||||
if (hashResult.first != info.narHash)
|
if (hashResult.first != info.narHash)
|
||||||
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("hash mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
printStorePath(info.path), info.narHash.to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
printStorePath(info.path), info.narHash->to_string(Base32, true), hashResult.first.to_string(Base32, true));
|
||||||
|
|
||||||
if (hashResult.second != info.narSize)
|
if (hashResult.second != info.narSize)
|
||||||
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
throw Error("size mismatch importing path '%s';\n wanted: %s\n got: %s",
|
||||||
|
@ -1033,82 +1038,26 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
|
|
||||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
|
||||||
{
|
|
||||||
Hash h = hashString(hashAlgo, dump);
|
|
||||||
|
|
||||||
auto dstPath = makeFixedOutputPath(method, h, name);
|
|
||||||
|
|
||||||
addTempRoot(dstPath);
|
|
||||||
|
|
||||||
if (repair || !isValidPath(dstPath)) {
|
|
||||||
|
|
||||||
/* The first check above is an optimisation to prevent
|
|
||||||
unnecessary lock acquisition. */
|
|
||||||
|
|
||||||
auto realPath = Store::toRealPath(dstPath);
|
|
||||||
|
|
||||||
PathLocks outputLock({realPath});
|
|
||||||
|
|
||||||
if (repair || !isValidPath(dstPath)) {
|
|
||||||
|
|
||||||
deletePath(realPath);
|
|
||||||
|
|
||||||
autoGC();
|
|
||||||
|
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
|
||||||
StringSource source(dump);
|
|
||||||
restorePath(realPath, source);
|
|
||||||
} else
|
|
||||||
writeFile(realPath, dump);
|
|
||||||
|
|
||||||
canonicalisePathMetaData(realPath, -1);
|
|
||||||
|
|
||||||
/* Register the SHA-256 hash of the NAR serialisation of
|
|
||||||
the path in the database. We may just have computed it
|
|
||||||
above (if called with recursive == true and hashAlgo ==
|
|
||||||
sha256); otherwise, compute it here. */
|
|
||||||
HashResult hash;
|
|
||||||
if (method == FileIngestionMethod::Recursive) {
|
|
||||||
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
|
|
||||||
hash.second = dump.size();
|
|
||||||
} else
|
|
||||||
hash = hashPath(htSHA256, realPath);
|
|
||||||
|
|
||||||
optimisePath(realPath); // FIXME: combine with hashPath()
|
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
|
||||||
info.narHash = hash.first;
|
|
||||||
info.narSize = hash.second;
|
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = h };
|
|
||||||
registerValidPath(info);
|
|
||||||
}
|
|
||||||
|
|
||||||
outputLock.setDeletion(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
return dstPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||||
{
|
{
|
||||||
Path srcPath(absPath(_srcPath));
|
Path srcPath(absPath(_srcPath));
|
||||||
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
|
if (method == FileIngestionMethod::Recursive)
|
||||||
|
dumpPath(srcPath, sink, filter);
|
||||||
|
else
|
||||||
|
readFile(srcPath, sink);
|
||||||
|
});
|
||||||
|
return addToStoreFromDump(*source, name, method, hashAlgo, repair);
|
||||||
|
}
|
||||||
|
|
||||||
if (method != FileIngestionMethod::Recursive)
|
|
||||||
return addToStoreFromDump(readFile(srcPath), name, method, hashAlgo, repair);
|
|
||||||
|
|
||||||
/* For computing the NAR hash. */
|
StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
||||||
auto sha256Sink = std::make_unique<HashSink>(htSHA256);
|
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
||||||
|
{
|
||||||
/* For computing the store path. In recursive SHA-256 mode, this
|
/* For computing the store path. */
|
||||||
is the same as the NAR hash, so no need to do it again. */
|
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||||
std::unique_ptr<HashSink> hashSink =
|
TeeSource source { source0, *hashSink };
|
||||||
hashAlgo == htSHA256
|
|
||||||
? nullptr
|
|
||||||
: std::make_unique<HashSink>(hashAlgo);
|
|
||||||
|
|
||||||
/* Read the source path into memory, but only if it's up to
|
/* Read the source path into memory, but only if it's up to
|
||||||
narBufferSize bytes. If it's larger, write it to a temporary
|
narBufferSize bytes. If it's larger, write it to a temporary
|
||||||
|
@ -1116,55 +1065,49 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
destination store path is already valid, we just delete the
|
destination store path is already valid, we just delete the
|
||||||
temporary path. Otherwise, we move it to the destination store
|
temporary path. Otherwise, we move it to the destination store
|
||||||
path. */
|
path. */
|
||||||
bool inMemory = true;
|
bool inMemory = false;
|
||||||
std::string nar;
|
|
||||||
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
std::string dump;
|
||||||
|
|
||||||
LambdaSink sink2([&](const unsigned char * buf, size_t len) {
|
/* Fill out buffer, and decide whether we are working strictly in
|
||||||
(*sha256Sink)(buf, len);
|
memory based on whether we break out because the buffer is full
|
||||||
if (hashSink) (*hashSink)(buf, len);
|
or the original source is empty */
|
||||||
|
while (dump.size() < settings.narBufferSize) {
|
||||||
if (inMemory) {
|
auto oldSize = dump.size();
|
||||||
if (nar.size() + len > settings.narBufferSize) {
|
constexpr size_t chunkSize = 65536;
|
||||||
inMemory = false;
|
auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
|
||||||
sink << 1;
|
dump.resize(oldSize + want);
|
||||||
sink((const unsigned char *) nar.data(), nar.size());
|
auto got = 0;
|
||||||
nar.clear();
|
try {
|
||||||
} else {
|
got = source.read((uint8_t *) dump.data() + oldSize, want);
|
||||||
nar.append((const char *) buf, len);
|
} catch (EndOfFile &) {
|
||||||
|
inMemory = true;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
dump.resize(oldSize + got);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!inMemory) sink(buf, len);
|
|
||||||
});
|
|
||||||
|
|
||||||
dumpPath(srcPath, sink2, filter);
|
|
||||||
});
|
|
||||||
|
|
||||||
std::unique_ptr<AutoDelete> delTempDir;
|
std::unique_ptr<AutoDelete> delTempDir;
|
||||||
Path tempPath;
|
Path tempPath;
|
||||||
|
|
||||||
try {
|
if (!inMemory) {
|
||||||
/* Wait for the source coroutine to give us some dummy
|
/* Drain what we pulled so far, and then keep on pulling */
|
||||||
data. This is so that we don't create the temporary
|
StringSource dumpSource { dump };
|
||||||
directory if the NAR fits in memory. */
|
ChainSource bothSource { dumpSource, source };
|
||||||
readInt(*source);
|
|
||||||
|
|
||||||
auto tempDir = createTempDir(realStoreDir, "add");
|
auto tempDir = createTempDir(realStoreDir, "add");
|
||||||
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||||
tempPath = tempDir + "/x";
|
tempPath = tempDir + "/x";
|
||||||
|
|
||||||
restorePath(tempPath, *source);
|
if (method == FileIngestionMethod::Recursive)
|
||||||
|
restorePath(tempPath, bothSource);
|
||||||
|
else
|
||||||
|
writeFile(tempPath, bothSource);
|
||||||
|
|
||||||
} catch (EndOfFile &) {
|
dump.clear();
|
||||||
if (!inMemory) throw;
|
|
||||||
/* The NAR fits in memory, so we didn't do restorePath(). */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto sha256 = sha256Sink->finish();
|
auto [hash, size] = hashSink->finish();
|
||||||
|
|
||||||
Hash hash = hashSink ? hashSink->finish().first : sha256.first;
|
|
||||||
|
|
||||||
auto dstPath = makeFixedOutputPath(method, hash, name);
|
auto dstPath = makeFixedOutputPath(method, hash, name);
|
||||||
|
|
||||||
|
@ -1186,22 +1129,34 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
|
||||||
autoGC();
|
autoGC();
|
||||||
|
|
||||||
if (inMemory) {
|
if (inMemory) {
|
||||||
|
StringSource dumpSource { dump };
|
||||||
/* Restore from the NAR in memory. */
|
/* Restore from the NAR in memory. */
|
||||||
StringSource source(nar);
|
if (method == FileIngestionMethod::Recursive)
|
||||||
restorePath(realPath, source);
|
restorePath(realPath, dumpSource);
|
||||||
|
else
|
||||||
|
writeFile(realPath, dumpSource);
|
||||||
} else {
|
} else {
|
||||||
/* Move the temporary path we restored above. */
|
/* Move the temporary path we restored above. */
|
||||||
if (rename(tempPath.c_str(), realPath.c_str()))
|
if (rename(tempPath.c_str(), realPath.c_str()))
|
||||||
throw Error("renaming '%s' to '%s'", tempPath, realPath);
|
throw Error("renaming '%s' to '%s'", tempPath, realPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* For computing the nar hash. In recursive SHA-256 mode, this
|
||||||
|
is the same as the store hash, so no need to do it again. */
|
||||||
|
auto narHash = std::pair { hash, size };
|
||||||
|
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
|
||||||
|
HashSink narSink { htSHA256 };
|
||||||
|
dumpPath(realPath, narSink);
|
||||||
|
narHash = narSink.finish();
|
||||||
|
}
|
||||||
|
|
||||||
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
|
canonicalisePathMetaData(realPath, -1); // FIXME: merge into restorePath
|
||||||
|
|
||||||
optimisePath(realPath);
|
optimisePath(realPath);
|
||||||
|
|
||||||
ValidPathInfo info(dstPath);
|
ValidPathInfo info(dstPath);
|
||||||
info.narHash = sha256.first;
|
info.narHash = narHash.first;
|
||||||
info.narSize = sha256.second;
|
info.narSize = narHash.second;
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
@ -1359,9 +1314,9 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
|
|
||||||
std::unique_ptr<AbstractHashSink> hashSink;
|
std::unique_ptr<AbstractHashSink> hashSink;
|
||||||
if (!info->ca || !info->references.count(info->path))
|
if (!info->ca || !info->references.count(info->path))
|
||||||
hashSink = std::make_unique<HashSink>(*info->narHash.type);
|
hashSink = std::make_unique<HashSink>(info->narHash->type);
|
||||||
else
|
else
|
||||||
hashSink = std::make_unique<HashModuloSink>(*info->narHash.type, std::string(info->path.hashPart()));
|
hashSink = std::make_unique<HashModuloSink>(info->narHash->type, std::string(info->path.hashPart()));
|
||||||
|
|
||||||
dumpPath(Store::toRealPath(i), *hashSink);
|
dumpPath(Store::toRealPath(i), *hashSink);
|
||||||
auto current = hashSink->finish();
|
auto current = hashSink->finish();
|
||||||
|
@ -1370,7 +1325,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
||||||
logError({
|
logError({
|
||||||
.name = "Invalid hash - path modified",
|
.name = "Invalid hash - path modified",
|
||||||
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
|
||||||
printStorePath(i), info->narHash.to_string(Base32, true), current.first.to_string(Base32, true))
|
printStorePath(i), info->narHash->to_string(Base32, true), current.first.to_string(Base32, true))
|
||||||
});
|
});
|
||||||
if (repair) repairPath(i); else errors = true;
|
if (repair) repairPath(i); else errors = true;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -26,8 +26,8 @@ const int nixSchemaVersion = 10;
|
||||||
struct OptimiseStats
|
struct OptimiseStats
|
||||||
{
|
{
|
||||||
unsigned long filesLinked = 0;
|
unsigned long filesLinked = 0;
|
||||||
unsigned long long bytesFreed = 0;
|
uint64_t bytesFreed = 0;
|
||||||
unsigned long long blocksFreed = 0;
|
uint64_t blocksFreed = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ public:
|
||||||
|
|
||||||
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
||||||
|
|
||||||
void querySubstitutablePathInfos(const StorePathSet & paths,
|
void querySubstitutablePathInfos(const StorePathCAMap & paths,
|
||||||
SubstitutablePathInfos & infos) override;
|
SubstitutablePathInfos & infos) override;
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & source,
|
void addToStore(const ValidPathInfo & info, Source & source,
|
||||||
|
@ -150,7 +150,7 @@ public:
|
||||||
in `dump', which is either a NAR serialisation (if recursive ==
|
in `dump', which is either a NAR serialisation (if recursive ==
|
||||||
true) or simply the contents of a regular file (if recursive ==
|
true) or simply the contents of a regular file (if recursive ==
|
||||||
false). */
|
false). */
|
||||||
StorePath addToStoreFromDump(const string & dump, const string & name,
|
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
|
||||||
|
|
||||||
StorePath addTextToStore(const string & name, const string & s,
|
StorePath addTextToStore(const string & name, const string & s,
|
||||||
|
|
|
@ -61,3 +61,6 @@ $(d)/build.cc:
|
||||||
clean-files += $(d)/schema.sql.gen.hh
|
clean-files += $(d)/schema.sql.gen.hh
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
|
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
|
||||||
|
|
||||||
|
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
||||||
|
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
||||||
|
|
|
@ -108,9 +108,19 @@ void Store::computeFSClosure(const StorePath & startPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv)
|
||||||
|
{
|
||||||
|
auto out = drv.outputs.find("out");
|
||||||
|
if (out != drv.outputs.end()) {
|
||||||
|
if (auto v = std::get_if<DerivationOutputFixed>(&out->second.output))
|
||||||
|
return v->hash;
|
||||||
|
}
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
|
StorePathSet & willBuild_, StorePathSet & willSubstitute_, StorePathSet & unknown_,
|
||||||
unsigned long long & downloadSize_, unsigned long long & narSize_)
|
uint64_t & downloadSize_, uint64_t & narSize_)
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths");
|
Activity act(*logger, lvlDebug, actUnknown, "querying info about missing paths");
|
||||||
|
|
||||||
|
@ -122,8 +132,8 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
{
|
{
|
||||||
std::unordered_set<std::string> done;
|
std::unordered_set<std::string> done;
|
||||||
StorePathSet & unknown, & willSubstitute, & willBuild;
|
StorePathSet & unknown, & willSubstitute, & willBuild;
|
||||||
unsigned long long & downloadSize;
|
uint64_t & downloadSize;
|
||||||
unsigned long long & narSize;
|
uint64_t & narSize;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct DrvState
|
struct DrvState
|
||||||
|
@ -157,7 +167,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
auto outPath = parseStorePath(outPathS);
|
auto outPath = parseStorePath(outPathS);
|
||||||
|
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
querySubstitutablePathInfos({outPath}, infos);
|
querySubstitutablePathInfos({{outPath, getDerivationCA(*drv)}}, infos);
|
||||||
|
|
||||||
if (infos.empty()) {
|
if (infos.empty()) {
|
||||||
drvState_->lock()->done = true;
|
drvState_->lock()->done = true;
|
||||||
|
@ -198,8 +208,8 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
PathSet invalid;
|
PathSet invalid;
|
||||||
for (auto & j : drv->outputs)
|
for (auto & j : drv->outputs)
|
||||||
if (wantOutput(j.first, path.outputs)
|
if (wantOutput(j.first, path.outputs)
|
||||||
&& !isValidPath(j.second.path))
|
&& !isValidPath(j.second.path(*this, drv->name)))
|
||||||
invalid.insert(printStorePath(j.second.path));
|
invalid.insert(printStorePath(j.second.path(*this, drv->name)));
|
||||||
if (invalid.empty()) return;
|
if (invalid.empty()) return;
|
||||||
|
|
||||||
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
|
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
|
||||||
|
@ -214,7 +224,7 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
if (isValidPath(path.path)) return;
|
if (isValidPath(path.path)) return;
|
||||||
|
|
||||||
SubstitutablePathInfos infos;
|
SubstitutablePathInfos infos;
|
||||||
querySubstitutablePathInfos({path.path}, infos);
|
querySubstitutablePathInfos({{path.path, std::nullopt}}, infos);
|
||||||
|
|
||||||
if (infos.empty()) {
|
if (infos.empty()) {
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
|
|
|
@ -79,14 +79,14 @@ struct NarAccessor : public FSAccessor
|
||||||
parents.top()->isExecutable = true;
|
parents.top()->isExecutable = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void preallocateContents(unsigned long long size) override
|
void preallocateContents(uint64_t size) override
|
||||||
{
|
{
|
||||||
assert(size <= std::numeric_limits<uint64_t>::max());
|
assert(size <= std::numeric_limits<uint64_t>::max());
|
||||||
parents.top()->size = (uint64_t) size;
|
parents.top()->size = (uint64_t) size;
|
||||||
parents.top()->start = pos;
|
parents.top()->start = pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
void receiveContents(unsigned char * data, unsigned int len) override
|
void receiveContents(unsigned char * data, size_t len) override
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
void createSymlink(const Path & path, const string & target) override
|
void createSymlink(const Path & path, const string & target) override
|
||||||
|
|
|
@ -230,9 +230,9 @@ public:
|
||||||
(std::string(info->path.name()))
|
(std::string(info->path.name()))
|
||||||
(narInfo ? narInfo->url : "", narInfo != 0)
|
(narInfo ? narInfo->url : "", narInfo != 0)
|
||||||
(narInfo ? narInfo->compression : "", narInfo != 0)
|
(narInfo ? narInfo->compression : "", narInfo != 0)
|
||||||
(narInfo && narInfo->fileHash ? narInfo->fileHash.to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(Base32, true) : "", narInfo && narInfo->fileHash)
|
||||||
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
|
||||||
(info->narHash.to_string(Base32, true))
|
(info->narHash->to_string(Base32, true))
|
||||||
(info->narSize)
|
(info->narSize)
|
||||||
(concatStringsSep(" ", info->shortRefs()))
|
(concatStringsSep(" ", info->shortRefs()))
|
||||||
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)
|
||||||
|
|
|
@ -7,15 +7,14 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
: ValidPathInfo(StorePath(StorePath::dummy)) // FIXME: hack
|
||||||
{
|
{
|
||||||
auto corrupt = [&]() {
|
auto corrupt = [&]() {
|
||||||
throw Error("NAR info file '%1%' is corrupt", whence);
|
return Error("NAR info file '%1%' is corrupt", whence);
|
||||||
};
|
};
|
||||||
|
|
||||||
auto parseHashField = [&](const string & s) {
|
auto parseHashField = [&](const string & s) {
|
||||||
try {
|
try {
|
||||||
return Hash(s);
|
return Hash(s);
|
||||||
} catch (BadHash &) {
|
} catch (BadHash &) {
|
||||||
corrupt();
|
throw corrupt();
|
||||||
return Hash(); // never reached
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -25,12 +24,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
while (pos < s.size()) {
|
while (pos < s.size()) {
|
||||||
|
|
||||||
size_t colon = s.find(':', pos);
|
size_t colon = s.find(':', pos);
|
||||||
if (colon == std::string::npos) corrupt();
|
if (colon == std::string::npos) throw corrupt();
|
||||||
|
|
||||||
std::string name(s, pos, colon - pos);
|
std::string name(s, pos, colon - pos);
|
||||||
|
|
||||||
size_t eol = s.find('\n', colon + 2);
|
size_t eol = s.find('\n', colon + 2);
|
||||||
if (eol == std::string::npos) corrupt();
|
if (eol == std::string::npos) throw corrupt();
|
||||||
|
|
||||||
std::string value(s, colon + 2, eol - colon - 2);
|
std::string value(s, colon + 2, eol - colon - 2);
|
||||||
|
|
||||||
|
@ -45,16 +44,16 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "FileHash")
|
else if (name == "FileHash")
|
||||||
fileHash = parseHashField(value);
|
fileHash = parseHashField(value);
|
||||||
else if (name == "FileSize") {
|
else if (name == "FileSize") {
|
||||||
if (!string2Int(value, fileSize)) corrupt();
|
if (!string2Int(value, fileSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "NarHash")
|
else if (name == "NarHash")
|
||||||
narHash = parseHashField(value);
|
narHash = parseHashField(value);
|
||||||
else if (name == "NarSize") {
|
else if (name == "NarSize") {
|
||||||
if (!string2Int(value, narSize)) corrupt();
|
if (!string2Int(value, narSize)) throw corrupt();
|
||||||
}
|
}
|
||||||
else if (name == "References") {
|
else if (name == "References") {
|
||||||
auto refs = tokenizeString<Strings>(value, " ");
|
auto refs = tokenizeString<Strings>(value, " ");
|
||||||
if (!references.empty()) corrupt();
|
if (!references.empty()) throw corrupt();
|
||||||
for (auto & r : refs)
|
for (auto & r : refs)
|
||||||
references.insert(StorePath(r));
|
references.insert(StorePath(r));
|
||||||
}
|
}
|
||||||
|
@ -67,7 +66,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "Sig")
|
else if (name == "Sig")
|
||||||
sigs.insert(value);
|
sigs.insert(value);
|
||||||
else if (name == "CA") {
|
else if (name == "CA") {
|
||||||
if (ca) corrupt();
|
if (ca) throw corrupt();
|
||||||
// FIXME: allow blank ca or require skipping field?
|
// FIXME: allow blank ca or require skipping field?
|
||||||
ca = parseContentAddressOpt(value);
|
ca = parseContentAddressOpt(value);
|
||||||
}
|
}
|
||||||
|
@ -77,7 +76,7 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
|
|
||||||
if (compression == "") compression = "bzip2";
|
if (compression == "") compression = "bzip2";
|
||||||
|
|
||||||
if (!havePath || url.empty() || narSize == 0 || !narHash) corrupt();
|
if (!havePath || url.empty() || narSize == 0 || !narHash) throw corrupt();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string NarInfo::to_string(const Store & store) const
|
std::string NarInfo::to_string(const Store & store) const
|
||||||
|
@ -87,11 +86,11 @@ std::string NarInfo::to_string(const Store & store) const
|
||||||
res += "URL: " + url + "\n";
|
res += "URL: " + url + "\n";
|
||||||
assert(compression != "");
|
assert(compression != "");
|
||||||
res += "Compression: " + compression + "\n";
|
res += "Compression: " + compression + "\n";
|
||||||
assert(fileHash.type == htSHA256);
|
assert(fileHash && fileHash->type == htSHA256);
|
||||||
res += "FileHash: " + fileHash.to_string(Base32, true) + "\n";
|
res += "FileHash: " + fileHash->to_string(Base32, true) + "\n";
|
||||||
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
res += "FileSize: " + std::to_string(fileSize) + "\n";
|
||||||
assert(narHash.type == htSHA256);
|
assert(narHash && narHash->type == htSHA256);
|
||||||
res += "NarHash: " + narHash.to_string(Base32, true) + "\n";
|
res += "NarHash: " + narHash->to_string(Base32, true) + "\n";
|
||||||
res += "NarSize: " + std::to_string(narSize) + "\n";
|
res += "NarSize: " + std::to_string(narSize) + "\n";
|
||||||
|
|
||||||
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
|
||||||
|
|
|
@ -10,7 +10,7 @@ struct NarInfo : ValidPathInfo
|
||||||
{
|
{
|
||||||
std::string url;
|
std::string url;
|
||||||
std::string compression;
|
std::string compression;
|
||||||
Hash fileHash;
|
std::optional<Hash> fileHash;
|
||||||
uint64_t fileSize = 0;
|
uint64_t fileSize = 0;
|
||||||
std::string system;
|
std::string system;
|
||||||
|
|
||||||
|
|
|
@ -282,7 +282,7 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static string showBytes(unsigned long long bytes)
|
static string showBytes(uint64_t bytes)
|
||||||
{
|
{
|
||||||
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
|
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,6 +64,8 @@ typedef std::set<StorePath> StorePathSet;
|
||||||
typedef std::vector<StorePath> StorePaths;
|
typedef std::vector<StorePath> StorePaths;
|
||||||
typedef std::map<string, StorePath> OutputPathMap;
|
typedef std::map<string, StorePath> OutputPathMap;
|
||||||
|
|
||||||
|
typedef std::map<StorePath, std::optional<ContentAddress>> StorePathCAMap;
|
||||||
|
|
||||||
/* Extension of derivations in the Nix store. */
|
/* Extension of derivations in the Nix store. */
|
||||||
const std::string drvExtension = ".drv";
|
const std::string drvExtension = ".drv";
|
||||||
|
|
||||||
|
|
|
@ -12,30 +12,24 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
static bool cmpGensByNumber(const Generation & a, const Generation & b)
|
|
||||||
{
|
|
||||||
return a.number < b.number;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* Parse a generation name of the format
|
/* Parse a generation name of the format
|
||||||
`<profilename>-<number>-link'. */
|
`<profilename>-<number>-link'. */
|
||||||
static int parseName(const string & profileName, const string & name)
|
static std::optional<GenerationNumber> parseName(const string & profileName, const string & name)
|
||||||
{
|
{
|
||||||
if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1;
|
if (string(name, 0, profileName.size() + 1) != profileName + "-") return {};
|
||||||
string s = string(name, profileName.size() + 1);
|
string s = string(name, profileName.size() + 1);
|
||||||
string::size_type p = s.find("-link");
|
string::size_type p = s.find("-link");
|
||||||
if (p == string::npos) return -1;
|
if (p == string::npos) return {};
|
||||||
int n;
|
unsigned int n;
|
||||||
if (string2Int(string(s, 0, p), n) && n >= 0)
|
if (string2Int(string(s, 0, p), n) && n >= 0)
|
||||||
return n;
|
return n;
|
||||||
else
|
else
|
||||||
return -1;
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Generations findGenerations(Path profile, int & curGen)
|
std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile)
|
||||||
{
|
{
|
||||||
Generations gens;
|
Generations gens;
|
||||||
|
|
||||||
|
@ -43,30 +37,34 @@ Generations findGenerations(Path profile, int & curGen)
|
||||||
auto profileName = std::string(baseNameOf(profile));
|
auto profileName = std::string(baseNameOf(profile));
|
||||||
|
|
||||||
for (auto & i : readDirectory(profileDir)) {
|
for (auto & i : readDirectory(profileDir)) {
|
||||||
int n;
|
if (auto n = parseName(profileName, i.name)) {
|
||||||
if ((n = parseName(profileName, i.name)) != -1) {
|
auto path = profileDir + "/" + i.name;
|
||||||
Generation gen;
|
|
||||||
gen.path = profileDir + "/" + i.name;
|
|
||||||
gen.number = n;
|
|
||||||
struct stat st;
|
struct stat st;
|
||||||
if (lstat(gen.path.c_str(), &st) != 0)
|
if (lstat(path.c_str(), &st) != 0)
|
||||||
throw SysError("statting '%1%'", gen.path);
|
throw SysError("statting '%1%'", path);
|
||||||
gen.creationTime = st.st_mtime;
|
gens.push_back({
|
||||||
gens.push_back(gen);
|
.number = *n,
|
||||||
|
.path = path,
|
||||||
|
.creationTime = st.st_mtime
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gens.sort(cmpGensByNumber);
|
gens.sort([](const Generation & a, const Generation & b)
|
||||||
|
{
|
||||||
|
return a.number < b.number;
|
||||||
|
});
|
||||||
|
|
||||||
curGen = pathExists(profile)
|
return {
|
||||||
|
gens,
|
||||||
|
pathExists(profile)
|
||||||
? parseName(profileName, readLink(profile))
|
? parseName(profileName, readLink(profile))
|
||||||
: -1;
|
: std::nullopt
|
||||||
|
};
|
||||||
return gens;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void makeName(const Path & profile, unsigned int num,
|
static void makeName(const Path & profile, GenerationNumber num,
|
||||||
Path & outLink)
|
Path & outLink)
|
||||||
{
|
{
|
||||||
Path prefix = (format("%1%-%2%") % profile % num).str();
|
Path prefix = (format("%1%-%2%") % profile % num).str();
|
||||||
|
@ -78,10 +76,9 @@ Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath)
|
||||||
{
|
{
|
||||||
/* The new generation number should be higher than old the
|
/* The new generation number should be higher than old the
|
||||||
previous ones. */
|
previous ones. */
|
||||||
int dummy;
|
auto [gens, dummy] = findGenerations(profile);
|
||||||
Generations gens = findGenerations(profile, dummy);
|
|
||||||
|
|
||||||
unsigned int num;
|
GenerationNumber num;
|
||||||
if (gens.size() > 0) {
|
if (gens.size() > 0) {
|
||||||
Generation last = gens.back();
|
Generation last = gens.back();
|
||||||
|
|
||||||
|
@ -121,7 +118,7 @@ static void removeFile(const Path & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void deleteGeneration(const Path & profile, unsigned int gen)
|
void deleteGeneration(const Path & profile, GenerationNumber gen)
|
||||||
{
|
{
|
||||||
Path generation;
|
Path generation;
|
||||||
makeName(profile, gen, generation);
|
makeName(profile, gen, generation);
|
||||||
|
@ -129,7 +126,7 @@ void deleteGeneration(const Path & profile, unsigned int gen)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRun)
|
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
|
||||||
{
|
{
|
||||||
if (dryRun)
|
if (dryRun)
|
||||||
printInfo(format("would remove generation %1%") % gen);
|
printInfo(format("would remove generation %1%") % gen);
|
||||||
|
@ -140,31 +137,29 @@ static void deleteGeneration2(const Path & profile, unsigned int gen, bool dryRu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun)
|
void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun)
|
||||||
{
|
{
|
||||||
PathLocks lock;
|
PathLocks lock;
|
||||||
lockProfile(lock, profile);
|
lockProfile(lock, profile);
|
||||||
|
|
||||||
int curGen;
|
auto [gens, curGen] = findGenerations(profile);
|
||||||
Generations gens = findGenerations(profile, curGen);
|
|
||||||
|
|
||||||
if (gensToDelete.find(curGen) != gensToDelete.end())
|
if (gensToDelete.count(*curGen))
|
||||||
throw Error("cannot delete current generation of profile %1%'", profile);
|
throw Error("cannot delete current generation of profile %1%'", profile);
|
||||||
|
|
||||||
for (auto & i : gens) {
|
for (auto & i : gens) {
|
||||||
if (gensToDelete.find(i.number) == gensToDelete.end()) continue;
|
if (!gensToDelete.count(i.number)) continue;
|
||||||
deleteGeneration2(profile, i.number, dryRun);
|
deleteGeneration2(profile, i.number, dryRun);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void deleteGenerationsGreaterThan(const Path & profile, int max, bool dryRun)
|
void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun)
|
||||||
{
|
{
|
||||||
PathLocks lock;
|
PathLocks lock;
|
||||||
lockProfile(lock, profile);
|
lockProfile(lock, profile);
|
||||||
|
|
||||||
int curGen;
|
|
||||||
bool fromCurGen = false;
|
bool fromCurGen = false;
|
||||||
Generations gens = findGenerations(profile, curGen);
|
auto [gens, curGen] = findGenerations(profile);
|
||||||
for (auto i = gens.rbegin(); i != gens.rend(); ++i) {
|
for (auto i = gens.rbegin(); i != gens.rend(); ++i) {
|
||||||
if (i->number == curGen) {
|
if (i->number == curGen) {
|
||||||
fromCurGen = true;
|
fromCurGen = true;
|
||||||
|
@ -186,8 +181,7 @@ void deleteOldGenerations(const Path & profile, bool dryRun)
|
||||||
PathLocks lock;
|
PathLocks lock;
|
||||||
lockProfile(lock, profile);
|
lockProfile(lock, profile);
|
||||||
|
|
||||||
int curGen;
|
auto [gens, curGen] = findGenerations(profile);
|
||||||
Generations gens = findGenerations(profile, curGen);
|
|
||||||
|
|
||||||
for (auto & i : gens)
|
for (auto & i : gens)
|
||||||
if (i.number != curGen)
|
if (i.number != curGen)
|
||||||
|
@ -200,8 +194,7 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
|
||||||
PathLocks lock;
|
PathLocks lock;
|
||||||
lockProfile(lock, profile);
|
lockProfile(lock, profile);
|
||||||
|
|
||||||
int curGen;
|
auto [gens, curGen] = findGenerations(profile);
|
||||||
Generations gens = findGenerations(profile, curGen);
|
|
||||||
|
|
||||||
bool canDelete = false;
|
bool canDelete = false;
|
||||||
for (auto i = gens.rbegin(); i != gens.rend(); ++i)
|
for (auto i = gens.rbegin(); i != gens.rend(); ++i)
|
||||||
|
|
|
@ -9,37 +9,32 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
typedef unsigned int GenerationNumber;
|
||||||
|
|
||||||
struct Generation
|
struct Generation
|
||||||
{
|
{
|
||||||
int number;
|
GenerationNumber number;
|
||||||
Path path;
|
Path path;
|
||||||
time_t creationTime;
|
time_t creationTime;
|
||||||
Generation()
|
|
||||||
{
|
|
||||||
number = -1;
|
|
||||||
}
|
|
||||||
operator bool() const
|
|
||||||
{
|
|
||||||
return number != -1;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef list<Generation> Generations;
|
typedef std::list<Generation> Generations;
|
||||||
|
|
||||||
|
|
||||||
/* Returns the list of currently present generations for the specified
|
/* Returns the list of currently present generations for the specified
|
||||||
profile, sorted by generation number. */
|
profile, sorted by generation number. Also returns the number of
|
||||||
Generations findGenerations(Path profile, int & curGen);
|
the current generation. */
|
||||||
|
std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path profile);
|
||||||
|
|
||||||
class LocalFSStore;
|
class LocalFSStore;
|
||||||
|
|
||||||
Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
|
Path createGeneration(ref<LocalFSStore> store, Path profile, Path outPath);
|
||||||
|
|
||||||
void deleteGeneration(const Path & profile, unsigned int gen);
|
void deleteGeneration(const Path & profile, GenerationNumber gen);
|
||||||
|
|
||||||
void deleteGenerations(const Path & profile, const std::set<unsigned int> & gensToDelete, bool dryRun);
|
void deleteGenerations(const Path & profile, const std::set<GenerationNumber> & gensToDelete, bool dryRun);
|
||||||
|
|
||||||
void deleteGenerationsGreaterThan(const Path & profile, const int max, bool dryRun);
|
void deleteGenerationsGreaterThan(const Path & profile, GenerationNumber max, bool dryRun);
|
||||||
|
|
||||||
void deleteOldGenerations(const Path & profile, bool dryRun);
|
void deleteOldGenerations(const Path & profile, bool dryRun);
|
||||||
|
|
||||||
|
|
|
@ -48,13 +48,12 @@ static void search(const unsigned char * s, size_t len,
|
||||||
|
|
||||||
struct RefScanSink : Sink
|
struct RefScanSink : Sink
|
||||||
{
|
{
|
||||||
HashSink hashSink;
|
|
||||||
StringSet hashes;
|
StringSet hashes;
|
||||||
StringSet seen;
|
StringSet seen;
|
||||||
|
|
||||||
string tail;
|
string tail;
|
||||||
|
|
||||||
RefScanSink() : hashSink(htSHA256) { }
|
RefScanSink() { }
|
||||||
|
|
||||||
void operator () (const unsigned char * data, size_t len);
|
void operator () (const unsigned char * data, size_t len);
|
||||||
};
|
};
|
||||||
|
@ -62,8 +61,6 @@ struct RefScanSink : Sink
|
||||||
|
|
||||||
void RefScanSink::operator () (const unsigned char * data, size_t len)
|
void RefScanSink::operator () (const unsigned char * data, size_t len)
|
||||||
{
|
{
|
||||||
hashSink(data, len);
|
|
||||||
|
|
||||||
/* It's possible that a reference spans the previous and current
|
/* It's possible that a reference spans the previous and current
|
||||||
fragment, so search in the concatenation of the tail of the
|
fragment, so search in the concatenation of the tail of the
|
||||||
previous fragment and the start of the current fragment. */
|
previous fragment and the start of the current fragment. */
|
||||||
|
@ -79,10 +76,12 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
PathSet scanForReferences(const string & path,
|
std::pair<PathSet, HashResult> scanForReferences(const string & path,
|
||||||
const PathSet & refs, HashResult & hash)
|
const PathSet & refs)
|
||||||
{
|
{
|
||||||
RefScanSink sink;
|
RefScanSink refsSink;
|
||||||
|
HashSink hashSink { htSHA256 };
|
||||||
|
TeeSink sink { refsSink, hashSink };
|
||||||
std::map<string, Path> backMap;
|
std::map<string, Path> backMap;
|
||||||
|
|
||||||
/* For efficiency (and a higher hit rate), just search for the
|
/* For efficiency (and a higher hit rate), just search for the
|
||||||
|
@ -97,7 +96,7 @@ PathSet scanForReferences(const string & path,
|
||||||
assert(s.size() == refLength);
|
assert(s.size() == refLength);
|
||||||
assert(backMap.find(s) == backMap.end());
|
assert(backMap.find(s) == backMap.end());
|
||||||
// parseHash(htSHA256, s);
|
// parseHash(htSHA256, s);
|
||||||
sink.hashes.insert(s);
|
refsSink.hashes.insert(s);
|
||||||
backMap[s] = i;
|
backMap[s] = i;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -106,15 +105,15 @@ PathSet scanForReferences(const string & path,
|
||||||
|
|
||||||
/* Map the hashes found back to their store paths. */
|
/* Map the hashes found back to their store paths. */
|
||||||
PathSet found;
|
PathSet found;
|
||||||
for (auto & i : sink.seen) {
|
for (auto & i : refsSink.seen) {
|
||||||
std::map<string, Path>::iterator j;
|
std::map<string, Path>::iterator j;
|
||||||
if ((j = backMap.find(i)) == backMap.end()) abort();
|
if ((j = backMap.find(i)) == backMap.end()) abort();
|
||||||
found.insert(j->second);
|
found.insert(j->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
hash = sink.hashSink.finish();
|
auto hash = hashSink.finish();
|
||||||
|
|
||||||
return found;
|
return std::pair<PathSet, HashResult>(found, hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,8 +5,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
PathSet scanForReferences(const Path & path, const PathSet & refs,
|
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
|
||||||
HashResult & hash);
|
|
||||||
|
|
||||||
struct RewritingSink : Sink
|
struct RewritingSink : Sink
|
||||||
{
|
{
|
||||||
|
|
|
@ -39,6 +39,24 @@ void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths
|
||||||
out << store.printStorePath(i);
|
out << store.printStorePath(i);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
|
||||||
|
{
|
||||||
|
StorePathCAMap paths;
|
||||||
|
auto count = readNum<size_t>(from);
|
||||||
|
while (count--)
|
||||||
|
paths.insert_or_assign(store.parseStorePath(readString(from)), parseContentAddressOpt(readString(from)));
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths)
|
||||||
|
{
|
||||||
|
out << paths.size();
|
||||||
|
for (auto & i : paths) {
|
||||||
|
out << store.printStorePath(i.first);
|
||||||
|
out << renderContentAddress(i.second);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
|
std::map<string, StorePath> readOutputPathMap(const Store & store, Source & from)
|
||||||
{
|
{
|
||||||
std::map<string, StorePath> pathMap;
|
std::map<string, StorePath> pathMap;
|
||||||
|
@ -332,18 +350,17 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void RemoteStore::querySubstitutablePathInfos(const StorePathSet & paths,
|
void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, SubstitutablePathInfos & infos)
|
||||||
SubstitutablePathInfos & infos)
|
|
||||||
{
|
{
|
||||||
if (paths.empty()) return;
|
if (pathsMap.empty()) return;
|
||||||
|
|
||||||
auto conn(getConnection());
|
auto conn(getConnection());
|
||||||
|
|
||||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
|
||||||
|
|
||||||
for (auto & i : paths) {
|
for (auto & i : pathsMap) {
|
||||||
SubstitutablePathInfo info;
|
SubstitutablePathInfo info;
|
||||||
conn->to << wopQuerySubstitutablePathInfo << printStorePath(i);
|
conn->to << wopQuerySubstitutablePathInfo << printStorePath(i.first);
|
||||||
conn.processStderr();
|
conn.processStderr();
|
||||||
unsigned int reply = readInt(conn->from);
|
unsigned int reply = readInt(conn->from);
|
||||||
if (reply == 0) continue;
|
if (reply == 0) continue;
|
||||||
|
@ -353,13 +370,19 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathSet & paths,
|
||||||
info.references = readStorePaths<StorePathSet>(*this, conn->from);
|
info.references = readStorePaths<StorePathSet>(*this, conn->from);
|
||||||
info.downloadSize = readLongLong(conn->from);
|
info.downloadSize = readLongLong(conn->from);
|
||||||
info.narSize = readLongLong(conn->from);
|
info.narSize = readLongLong(conn->from);
|
||||||
infos.insert_or_assign(i, std::move(info));
|
infos.insert_or_assign(i.first, std::move(info));
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
conn->to << wopQuerySubstitutablePathInfos;
|
conn->to << wopQuerySubstitutablePathInfos;
|
||||||
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 22) {
|
||||||
|
StorePathSet paths;
|
||||||
|
for (auto & path : pathsMap)
|
||||||
|
paths.insert(path.first);
|
||||||
writeStorePaths(*this, conn->to, paths);
|
writeStorePaths(*this, conn->to, paths);
|
||||||
|
} else
|
||||||
|
writeStorePathCAMap(*this, conn->to, pathsMap);
|
||||||
conn.processStderr();
|
conn.processStderr();
|
||||||
size_t count = readNum<size_t>(conn->from);
|
size_t count = readNum<size_t>(conn->from);
|
||||||
for (size_t n = 0; n < count; n++) {
|
for (size_t n = 0; n < count; n++) {
|
||||||
|
@ -498,14 +521,89 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
|
||||||
conn->to << wopAddToStoreNar
|
conn->to << wopAddToStoreNar
|
||||||
<< printStorePath(info.path)
|
<< printStorePath(info.path)
|
||||||
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
<< (info.deriver ? printStorePath(*info.deriver) : "")
|
||||||
<< info.narHash.to_string(Base16, false);
|
<< info.narHash->to_string(Base16, false);
|
||||||
writeStorePaths(*this, conn->to, info.references);
|
writeStorePaths(*this, conn->to, info.references);
|
||||||
conn->to << info.registrationTime << info.narSize
|
conn->to << info.registrationTime << info.narSize
|
||||||
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
|
||||||
<< repair << !checkSigs;
|
<< repair << !checkSigs;
|
||||||
bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
|
|
||||||
if (!tunnel) copyNAR(source, conn->to);
|
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) {
|
||||||
conn.processStderr(0, tunnel ? &source : nullptr);
|
|
||||||
|
std::exception_ptr ex;
|
||||||
|
|
||||||
|
struct FramedSink : BufferedSink
|
||||||
|
{
|
||||||
|
ConnectionHandle & conn;
|
||||||
|
std::exception_ptr & ex;
|
||||||
|
|
||||||
|
FramedSink(ConnectionHandle & conn, std::exception_ptr & ex) : conn(conn), ex(ex)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
~FramedSink()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
conn->to << 0;
|
||||||
|
conn->to.flush();
|
||||||
|
} catch (...) {
|
||||||
|
ignoreException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void write(const unsigned char * data, size_t len) override
|
||||||
|
{
|
||||||
|
/* Don't send more data if the remote has
|
||||||
|
encountered an error. */
|
||||||
|
if (ex) {
|
||||||
|
auto ex2 = ex;
|
||||||
|
ex = nullptr;
|
||||||
|
std::rethrow_exception(ex2);
|
||||||
|
}
|
||||||
|
conn->to << len;
|
||||||
|
conn->to(data, len);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Handle log messages / exceptions from the remote on a
|
||||||
|
separate thread. */
|
||||||
|
std::thread stderrThread([&]()
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
conn.processStderr();
|
||||||
|
} catch (...) {
|
||||||
|
ex = std::current_exception();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Finally joinStderrThread([&]()
|
||||||
|
{
|
||||||
|
if (stderrThread.joinable()) {
|
||||||
|
stderrThread.join();
|
||||||
|
if (ex) {
|
||||||
|
try {
|
||||||
|
std::rethrow_exception(ex);
|
||||||
|
} catch (...) {
|
||||||
|
ignoreException();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
{
|
||||||
|
FramedSink sink(conn, ex);
|
||||||
|
copyNAR(source, sink);
|
||||||
|
sink.flush();
|
||||||
|
}
|
||||||
|
|
||||||
|
stderrThread.join();
|
||||||
|
if (ex)
|
||||||
|
std::rethrow_exception(ex);
|
||||||
|
|
||||||
|
} else if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21) {
|
||||||
|
conn.processStderr(0, &source);
|
||||||
|
} else {
|
||||||
|
copyNAR(source, conn->to);
|
||||||
|
conn.processStderr(0, nullptr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -707,7 +805,7 @@ void RemoteStore::addSignatures(const StorePath & storePath, const StringSet & s
|
||||||
|
|
||||||
void RemoteStore::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
void RemoteStore::queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
||||||
unsigned long long & downloadSize, unsigned long long & narSize)
|
uint64_t & downloadSize, uint64_t & narSize)
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
auto conn(getConnection());
|
auto conn(getConnection());
|
||||||
|
|
|
@ -56,7 +56,7 @@ public:
|
||||||
|
|
||||||
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
StorePathSet querySubstitutablePaths(const StorePathSet & paths) override;
|
||||||
|
|
||||||
void querySubstitutablePathInfos(const StorePathSet & paths,
|
void querySubstitutablePathInfos(const StorePathCAMap & paths,
|
||||||
SubstitutablePathInfos & infos) override;
|
SubstitutablePathInfos & infos) override;
|
||||||
|
|
||||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||||
|
@ -94,7 +94,7 @@ public:
|
||||||
|
|
||||||
void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
||||||
unsigned long long & downloadSize, unsigned long long & narSize) override;
|
uint64_t & downloadSize, uint64_t & narSize) override;
|
||||||
|
|
||||||
void connect() override;
|
void connect() override;
|
||||||
|
|
||||||
|
|
|
@ -343,13 +343,10 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
|
||||||
std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
|
std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1)
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
auto size = istream->tellg();
|
printInfo("uploaded 's3://%s/%s' in %d ms",
|
||||||
|
bucketName, path, duration);
|
||||||
printInfo("uploaded 's3://%s/%s' (%d bytes) in %d ms",
|
|
||||||
bucketName, path, size, duration);
|
|
||||||
|
|
||||||
stats.putTimeMs += duration;
|
stats.putTimeMs += duration;
|
||||||
stats.putBytes += size;
|
|
||||||
stats.put++;
|
stats.put++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ public:
|
||||||
struct Stats
|
struct Stats
|
||||||
{
|
{
|
||||||
std::atomic<uint64_t> put{0};
|
std::atomic<uint64_t> put{0};
|
||||||
std::atomic<uint64_t> putBytes{0};
|
|
||||||
std::atomic<uint64_t> putTimeMs{0};
|
std::atomic<uint64_t> putTimeMs{0};
|
||||||
std::atomic<uint64_t> get{0};
|
std::atomic<uint64_t> get{0};
|
||||||
std::atomic<uint64_t> getBytes{0};
|
std::atomic<uint64_t> getBytes{0};
|
||||||
|
|
|
@ -61,6 +61,11 @@ void SQLite::exec(const std::string & stmt)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint64_t SQLite::getLastInsertedRowId()
|
||||||
|
{
|
||||||
|
return sqlite3_last_insert_rowid(db);
|
||||||
|
}
|
||||||
|
|
||||||
void SQLiteStmt::create(sqlite3 * db, const string & sql)
|
void SQLiteStmt::create(sqlite3 * db, const string & sql)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
@ -95,10 +100,10 @@ SQLiteStmt::Use::~Use()
|
||||||
sqlite3_reset(stmt);
|
sqlite3_reset(stmt);
|
||||||
}
|
}
|
||||||
|
|
||||||
SQLiteStmt::Use & SQLiteStmt::Use::operator () (const std::string & value, bool notNull)
|
SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool notNull)
|
||||||
{
|
{
|
||||||
if (notNull) {
|
if (notNull) {
|
||||||
if (sqlite3_bind_text(stmt, curArg++, value.c_str(), -1, SQLITE_TRANSIENT) != SQLITE_OK)
|
if (sqlite3_bind_text(stmt, curArg++, value.data(), -1, SQLITE_TRANSIENT) != SQLITE_OK)
|
||||||
throwSQLiteError(stmt.db, "binding argument");
|
throwSQLiteError(stmt.db, "binding argument");
|
||||||
} else
|
} else
|
||||||
bind();
|
bind();
|
||||||
|
|
|
@ -26,6 +26,8 @@ struct SQLite
|
||||||
void isCache();
|
void isCache();
|
||||||
|
|
||||||
void exec(const std::string & stmt);
|
void exec(const std::string & stmt);
|
||||||
|
|
||||||
|
uint64_t getLastInsertedRowId();
|
||||||
};
|
};
|
||||||
|
|
||||||
/* RAII wrapper to create and destroy SQLite prepared statements. */
|
/* RAII wrapper to create and destroy SQLite prepared statements. */
|
||||||
|
@ -54,7 +56,7 @@ struct SQLiteStmt
|
||||||
~Use();
|
~Use();
|
||||||
|
|
||||||
/* Bind the next parameter. */
|
/* Bind the next parameter. */
|
||||||
Use & operator () (const std::string & value, bool notNull = true);
|
Use & operator () (std::string_view value, bool notNull = true);
|
||||||
Use & operator () (const unsigned char * data, size_t len, bool notNull = true);
|
Use & operator () (const unsigned char * data, size_t len, bool notNull = true);
|
||||||
Use & operator () (int64_t value, bool notNull = true);
|
Use & operator () (int64_t value, bool notNull = true);
|
||||||
Use & bind(); // null
|
Use & bind(); // null
|
||||||
|
|
|
@ -193,6 +193,23 @@ StorePath Store::makeFixedOutputPath(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME Put this somewhere?
|
||||||
|
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
||||||
|
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
||||||
|
|
||||||
|
StorePath Store::makeFixedOutputPathFromCA(std::string_view name, ContentAddress ca,
|
||||||
|
const StorePathSet & references, bool hasSelfReference) const
|
||||||
|
{
|
||||||
|
// New template
|
||||||
|
return std::visit(overloaded {
|
||||||
|
[&](TextHash th) {
|
||||||
|
return makeTextPath(name, th.hash, references);
|
||||||
|
},
|
||||||
|
[&](FixedOutputHash fsh) {
|
||||||
|
return makeFixedOutputPath(fsh.method, fsh.hash, name, references, hasSelfReference);
|
||||||
|
}
|
||||||
|
}, ca);
|
||||||
|
}
|
||||||
|
|
||||||
StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
|
||||||
const StorePathSet & references) const
|
const StorePathSet & references) const
|
||||||
|
@ -222,20 +239,73 @@ StorePath Store::computeStorePathForText(const string & name, const string & s,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
The aim of this function is to compute in one pass the correct ValidPathInfo for
|
||||||
|
the files that we are trying to add to the store. To accomplish that in one
|
||||||
|
pass, given the different kind of inputs that we can take (normal nar archives,
|
||||||
|
nar archives with non SHA-256 hashes, and flat files), we set up a net of sinks
|
||||||
|
and aliases. Also, since the dataflow is obfuscated by this, we include here a
|
||||||
|
graphviz diagram:
|
||||||
|
|
||||||
|
digraph graphname {
|
||||||
|
node [shape=box]
|
||||||
|
fileSource -> narSink
|
||||||
|
narSink [style=dashed]
|
||||||
|
narSink -> unsualHashTee [style = dashed, label = "Recursive && !SHA-256"]
|
||||||
|
narSink -> narHashSink [style = dashed, label = "else"]
|
||||||
|
unsualHashTee -> narHashSink
|
||||||
|
unsualHashTee -> caHashSink
|
||||||
|
fileSource -> parseSink
|
||||||
|
parseSink [style=dashed]
|
||||||
|
parseSink-> fileSink [style = dashed, label = "Flat"]
|
||||||
|
parseSink -> blank [style = dashed, label = "Recursive"]
|
||||||
|
fileSink -> caHashSink
|
||||||
|
}
|
||||||
|
*/
|
||||||
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
FileIngestionMethod method, HashType hashAlgo,
|
FileIngestionMethod method, HashType hashAlgo,
|
||||||
std::optional<Hash> expectedCAHash)
|
std::optional<Hash> expectedCAHash)
|
||||||
{
|
{
|
||||||
/* FIXME: inefficient: we're reading/hashing 'tmpFile' three
|
HashSink narHashSink { htSHA256 };
|
||||||
times. */
|
HashSink caHashSink { hashAlgo };
|
||||||
|
|
||||||
auto [narHash, narSize] = hashPath(htSHA256, srcPath);
|
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
|
||||||
|
they both write to caHashSink. Note that that requisite is currently true
|
||||||
|
because the former is only used in the flat case. */
|
||||||
|
RetrieveRegularNARSink fileSink { caHashSink };
|
||||||
|
TeeSink unusualHashTee { narHashSink, caHashSink };
|
||||||
|
|
||||||
auto hash = method == FileIngestionMethod::Recursive
|
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
|
||||||
? hashAlgo == htSHA256
|
? static_cast<Sink &>(unusualHashTee)
|
||||||
|
: narHashSink;
|
||||||
|
|
||||||
|
/* Functionally, this means that fileSource will yield the content of
|
||||||
|
srcPath. The fact that we use scratchpadSink as a temporary buffer here
|
||||||
|
is an implementation detail. */
|
||||||
|
auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
|
||||||
|
dumpPath(srcPath, scratchpadSink);
|
||||||
|
});
|
||||||
|
|
||||||
|
/* tapped provides the same data as fileSource, but we also write all the
|
||||||
|
information to narSink. */
|
||||||
|
TeeSource tapped { *fileSource, narSink };
|
||||||
|
|
||||||
|
ParseSink blank;
|
||||||
|
auto & parseSink = method == FileIngestionMethod::Flat
|
||||||
|
? fileSink
|
||||||
|
: blank;
|
||||||
|
|
||||||
|
/* The information that flows from tapped (besides being replicated in
|
||||||
|
narSink), is now put in parseSink. */
|
||||||
|
parseDump(parseSink, tapped);
|
||||||
|
|
||||||
|
/* We extract the result of the computation from the sink by calling
|
||||||
|
finish. */
|
||||||
|
auto [narHash, narSize] = narHashSink.finish();
|
||||||
|
|
||||||
|
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
|
||||||
? narHash
|
? narHash
|
||||||
: hashPath(hashAlgo, srcPath).first
|
: caHashSink.finish().first;
|
||||||
: hashFile(hashAlgo, srcPath);
|
|
||||||
|
|
||||||
if (expectedCAHash && expectedCAHash != hash)
|
if (expectedCAHash && expectedCAHash != hash)
|
||||||
throw Error("hash mismatch for '%s'", srcPath);
|
throw Error("hash mismatch for '%s'", srcPath);
|
||||||
|
@ -246,8 +316,8 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
|
||||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||||
|
|
||||||
if (!isValidPath(info.path)) {
|
if (!isValidPath(info.path)) {
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & scratchpadSink) {
|
||||||
dumpPath(srcPath, sink);
|
dumpPath(srcPath, scratchpadSink);
|
||||||
});
|
});
|
||||||
addToStore(info, *source);
|
addToStore(info, *source);
|
||||||
}
|
}
|
||||||
|
@ -485,7 +555,7 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
|
||||||
auto info = queryPathInfo(i);
|
auto info = queryPathInfo(i);
|
||||||
|
|
||||||
if (showHash) {
|
if (showHash) {
|
||||||
s += info->narHash.to_string(Base16, false) + "\n";
|
s += info->narHash->to_string(Base16, false) + "\n";
|
||||||
s += (format("%1%\n") % info->narSize).str();
|
s += (format("%1%\n") % info->narSize).str();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -517,7 +587,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
auto info = queryPathInfo(storePath);
|
auto info = queryPathInfo(storePath);
|
||||||
|
|
||||||
jsonPath
|
jsonPath
|
||||||
.attr("narHash", info->narHash.to_string(hashBase, true))
|
.attr("narHash", info->narHash->to_string(hashBase, true))
|
||||||
.attr("narSize", info->narSize);
|
.attr("narSize", info->narSize);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -560,7 +630,7 @@ void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & store
|
||||||
if (!narInfo->url.empty())
|
if (!narInfo->url.empty())
|
||||||
jsonPath.attr("url", narInfo->url);
|
jsonPath.attr("url", narInfo->url);
|
||||||
if (narInfo->fileHash)
|
if (narInfo->fileHash)
|
||||||
jsonPath.attr("downloadHash", narInfo->fileHash.to_string(hashBase, true));
|
jsonPath.attr("downloadHash", narInfo->fileHash->to_string(hashBase, true));
|
||||||
if (narInfo->fileSize)
|
if (narInfo->fileSize)
|
||||||
jsonPath.attr("downloadSize", narInfo->fileSize);
|
jsonPath.attr("downloadSize", narInfo->fileSize);
|
||||||
if (showClosureSize)
|
if (showClosureSize)
|
||||||
|
@ -636,6 +706,15 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
|
|
||||||
uint64_t total = 0;
|
uint64_t total = 0;
|
||||||
|
|
||||||
|
// recompute store path on the chance dstStore does it differently
|
||||||
|
if (info->ca && info->references.empty()) {
|
||||||
|
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||||
|
info2->path = dstStore->makeFixedOutputPathFromCA(info->path.name(), *info->ca);
|
||||||
|
if (dstStore->storeDir == srcStore->storeDir)
|
||||||
|
assert(info->path == info2->path);
|
||||||
|
info = info2;
|
||||||
|
}
|
||||||
|
|
||||||
if (!info->narHash) {
|
if (!info->narHash) {
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
srcStore->narFromPath({storePath}, sink);
|
srcStore->narFromPath({storePath}, sink);
|
||||||
|
@ -671,16 +750,20 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & storePaths,
|
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & storePaths,
|
||||||
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
|
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
|
||||||
{
|
{
|
||||||
auto valid = dstStore->queryValidPaths(storePaths, substitute);
|
auto valid = dstStore->queryValidPaths(storePaths, substitute);
|
||||||
|
|
||||||
PathSet missing;
|
StorePathSet missing;
|
||||||
for (auto & path : storePaths)
|
for (auto & path : storePaths)
|
||||||
if (!valid.count(path)) missing.insert(srcStore->printStorePath(path));
|
if (!valid.count(path)) missing.insert(path);
|
||||||
|
|
||||||
if (missing.empty()) return;
|
std::map<StorePath, StorePath> pathsMap;
|
||||||
|
for (auto & path : storePaths)
|
||||||
|
pathsMap.insert_or_assign(path, path);
|
||||||
|
|
||||||
|
if (missing.empty()) return pathsMap;
|
||||||
|
|
||||||
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
|
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
|
||||||
|
|
||||||
|
@ -695,30 +778,49 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
|
|
||||||
ThreadPool pool;
|
ThreadPool pool;
|
||||||
|
|
||||||
processGraph<Path>(pool,
|
processGraph<StorePath>(pool,
|
||||||
PathSet(missing.begin(), missing.end()),
|
StorePathSet(missing.begin(), missing.end()),
|
||||||
|
|
||||||
[&](const Path & storePath) {
|
[&](const StorePath & storePath) {
|
||||||
if (dstStore->isValidPath(dstStore->parseStorePath(storePath))) {
|
auto info = srcStore->queryPathInfo(storePath);
|
||||||
|
auto storePathForDst = storePath;
|
||||||
|
if (info->ca && info->references.empty()) {
|
||||||
|
storePathForDst = dstStore->makeFixedOutputPathFromCA(storePath.name(), *info->ca);
|
||||||
|
if (dstStore->storeDir == srcStore->storeDir)
|
||||||
|
assert(storePathForDst == storePath);
|
||||||
|
if (storePathForDst != storePath)
|
||||||
|
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
|
||||||
|
}
|
||||||
|
pathsMap.insert_or_assign(storePath, storePathForDst);
|
||||||
|
|
||||||
|
if (dstStore->isValidPath(storePath)) {
|
||||||
nrDone++;
|
nrDone++;
|
||||||
showProgress();
|
showProgress();
|
||||||
return PathSet();
|
return StorePathSet();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto info = srcStore->queryPathInfo(srcStore->parseStorePath(storePath));
|
|
||||||
|
|
||||||
bytesExpected += info->narSize;
|
bytesExpected += info->narSize;
|
||||||
act.setExpected(actCopyPath, bytesExpected);
|
act.setExpected(actCopyPath, bytesExpected);
|
||||||
|
|
||||||
return srcStore->printStorePathSet(info->references);
|
return info->references;
|
||||||
},
|
},
|
||||||
|
|
||||||
[&](const Path & storePathS) {
|
[&](const StorePath & storePath) {
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
auto storePath = dstStore->parseStorePath(storePathS);
|
auto info = srcStore->queryPathInfo(storePath);
|
||||||
|
|
||||||
if (!dstStore->isValidPath(storePath)) {
|
auto storePathForDst = storePath;
|
||||||
|
if (info->ca && info->references.empty()) {
|
||||||
|
storePathForDst = dstStore->makeFixedOutputPathFromCA(storePath.name(), *info->ca);
|
||||||
|
if (dstStore->storeDir == srcStore->storeDir)
|
||||||
|
assert(storePathForDst == storePath);
|
||||||
|
if (storePathForDst != storePath)
|
||||||
|
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
|
||||||
|
}
|
||||||
|
pathsMap.insert_or_assign(storePath, storePathForDst);
|
||||||
|
|
||||||
|
if (!dstStore->isValidPath(storePathForDst)) {
|
||||||
MaintainCount<decltype(nrRunning)> mc(nrRunning);
|
MaintainCount<decltype(nrRunning)> mc(nrRunning);
|
||||||
showProgress();
|
showProgress();
|
||||||
try {
|
try {
|
||||||
|
@ -727,7 +829,7 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
nrFailed++;
|
nrFailed++;
|
||||||
if (!settings.keepGoing)
|
if (!settings.keepGoing)
|
||||||
throw e;
|
throw e;
|
||||||
logger->log(lvlError, fmt("could not copy %s: %s", storePathS, e.what()));
|
logger->log(lvlError, fmt("could not copy %s: %s", dstStore->printStorePath(storePath), e.what()));
|
||||||
showProgress();
|
showProgress();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -736,6 +838,8 @@ void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & st
|
||||||
nrDone++;
|
nrDone++;
|
||||||
showProgress();
|
showProgress();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return pathsMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -801,7 +905,7 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
||||||
store.printStorePath(path));
|
store.printStorePath(path));
|
||||||
return
|
return
|
||||||
"1;" + store.printStorePath(path) + ";"
|
"1;" + store.printStorePath(path) + ";"
|
||||||
+ narHash.to_string(Base32, true) + ";"
|
+ narHash->to_string(Base32, true) + ";"
|
||||||
+ std::to_string(narSize) + ";"
|
+ std::to_string(narSize) + ";"
|
||||||
+ concatStringsSep(",", store.printStorePathSet(references));
|
+ concatStringsSep(",", store.printStorePathSet(references));
|
||||||
}
|
}
|
||||||
|
@ -812,10 +916,6 @@ void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME Put this somewhere?
|
|
||||||
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
|
|
||||||
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
|
|
||||||
|
|
||||||
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
bool ValidPathInfo::isContentAddressed(const Store & store) const
|
||||||
{
|
{
|
||||||
if (! ca) return false;
|
if (! ca) return false;
|
||||||
|
@ -882,7 +982,9 @@ Derivation Store::readDerivation(const StorePath & drvPath)
|
||||||
{
|
{
|
||||||
auto accessor = getFSAccessor();
|
auto accessor = getFSAccessor();
|
||||||
try {
|
try {
|
||||||
return parseDerivation(*this, accessor->readFile(printStorePath(drvPath)));
|
return parseDerivation(*this,
|
||||||
|
accessor->readFile(printStorePath(drvPath)),
|
||||||
|
Derivation::nameFromPath(drvPath));
|
||||||
} catch (FormatError & e) {
|
} catch (FormatError & e) {
|
||||||
throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
|
throw Error("error parsing derivation '%s': %s", printStorePath(drvPath), e.msg());
|
||||||
}
|
}
|
||||||
|
@ -932,12 +1034,20 @@ ref<Store> openStore(const std::string & uri_,
|
||||||
throw Error("don't know how to open Nix store '%s'", uri);
|
throw Error("don't know how to open Nix store '%s'", uri);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool isNonUriPath(const std::string & spec) {
|
||||||
|
return
|
||||||
|
// is not a URL
|
||||||
|
spec.find("://") == std::string::npos
|
||||||
|
// Has at least one path separator, and so isn't a single word that
|
||||||
|
// might be special like "auto"
|
||||||
|
&& spec.find("/") != std::string::npos;
|
||||||
|
}
|
||||||
|
|
||||||
StoreType getStoreType(const std::string & uri, const std::string & stateDir)
|
StoreType getStoreType(const std::string & uri, const std::string & stateDir)
|
||||||
{
|
{
|
||||||
if (uri == "daemon") {
|
if (uri == "daemon") {
|
||||||
return tDaemon;
|
return tDaemon;
|
||||||
} else if (uri == "local" || hasPrefix(uri, "/")) {
|
} else if (uri == "local" || isNonUriPath(uri)) {
|
||||||
return tLocal;
|
return tLocal;
|
||||||
} else if (uri == "" || uri == "auto") {
|
} else if (uri == "" || uri == "auto") {
|
||||||
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
||||||
|
@ -961,8 +1071,9 @@ static RegisterStoreImplementation regStore([](
|
||||||
return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params));
|
return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params));
|
||||||
case tLocal: {
|
case tLocal: {
|
||||||
Store::Params params2 = params;
|
Store::Params params2 = params;
|
||||||
if (hasPrefix(uri, "/"))
|
if (isNonUriPath(uri)) {
|
||||||
params2["root"] = uri;
|
params2["root"] = absPath(uri);
|
||||||
|
}
|
||||||
return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2));
|
return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2));
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
|
|
@ -85,7 +85,7 @@ struct GCOptions
|
||||||
StorePathSet pathsToDelete;
|
StorePathSet pathsToDelete;
|
||||||
|
|
||||||
/* Stop after at least `maxFreed' bytes have been freed. */
|
/* Stop after at least `maxFreed' bytes have been freed. */
|
||||||
unsigned long long maxFreed{std::numeric_limits<unsigned long long>::max()};
|
uint64_t maxFreed{std::numeric_limits<uint64_t>::max()};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ struct GCResults
|
||||||
|
|
||||||
/* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the
|
/* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the
|
||||||
number of bytes that would be or was freed. */
|
number of bytes that would be or was freed. */
|
||||||
unsigned long long bytesFreed = 0;
|
uint64_t bytesFreed = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,8 +105,8 @@ struct SubstitutablePathInfo
|
||||||
{
|
{
|
||||||
std::optional<StorePath> deriver;
|
std::optional<StorePath> deriver;
|
||||||
StorePathSet references;
|
StorePathSet references;
|
||||||
unsigned long long downloadSize; /* 0 = unknown or inapplicable */
|
uint64_t downloadSize; /* 0 = unknown or inapplicable */
|
||||||
unsigned long long narSize; /* 0 = unknown */
|
uint64_t narSize; /* 0 = unknown */
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
|
typedef std::map<StorePath, SubstitutablePathInfo> SubstitutablePathInfos;
|
||||||
|
@ -115,7 +115,8 @@ struct ValidPathInfo
|
||||||
{
|
{
|
||||||
StorePath path;
|
StorePath path;
|
||||||
std::optional<StorePath> deriver;
|
std::optional<StorePath> deriver;
|
||||||
Hash narHash;
|
// TODO document this
|
||||||
|
std::optional<Hash> narHash;
|
||||||
StorePathSet references;
|
StorePathSet references;
|
||||||
time_t registrationTime = 0;
|
time_t registrationTime = 0;
|
||||||
uint64_t narSize = 0; // 0 = unknown
|
uint64_t narSize = 0; // 0 = unknown
|
||||||
|
@ -343,7 +344,11 @@ public:
|
||||||
bool hasSelfReference = false) const;
|
bool hasSelfReference = false) const;
|
||||||
|
|
||||||
StorePath makeTextPath(std::string_view name, const Hash & hash,
|
StorePath makeTextPath(std::string_view name, const Hash & hash,
|
||||||
const StorePathSet & references) const;
|
const StorePathSet & references = {}) const;
|
||||||
|
|
||||||
|
StorePath makeFixedOutputPathFromCA(std::string_view name, ContentAddress ca,
|
||||||
|
const StorePathSet & references = {},
|
||||||
|
bool hasSelfReference = false) const;
|
||||||
|
|
||||||
/* This is the preparatory part of addToStore(); it computes the
|
/* This is the preparatory part of addToStore(); it computes the
|
||||||
store path to which srcPath is to be copied. Returns the store
|
store path to which srcPath is to be copied. Returns the store
|
||||||
|
@ -435,9 +440,10 @@ public:
|
||||||
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
|
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
|
||||||
|
|
||||||
/* Query substitute info (i.e. references, derivers and download
|
/* Query substitute info (i.e. references, derivers and download
|
||||||
sizes) of a set of paths. If a path does not have substitute
|
sizes) of a map of paths to their optional ca values. If a path
|
||||||
info, it's omitted from the resulting ‘infos’ map. */
|
does not have substitute info, it's omitted from the resulting
|
||||||
virtual void querySubstitutablePathInfos(const StorePathSet & paths,
|
‘infos’ map. */
|
||||||
|
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
|
||||||
SubstitutablePathInfos & infos) { return; };
|
SubstitutablePathInfos & infos) { return; };
|
||||||
|
|
||||||
/* Import a path into the store. */
|
/* Import a path into the store. */
|
||||||
|
@ -460,7 +466,7 @@ public:
|
||||||
std::optional<Hash> expectedCAHash = {});
|
std::optional<Hash> expectedCAHash = {});
|
||||||
|
|
||||||
// FIXME: remove?
|
// FIXME: remove?
|
||||||
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
|
virtual StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
|
||||||
{
|
{
|
||||||
throw Error("addToStoreFromDump() is not supported by this store");
|
throw Error("addToStoreFromDump() is not supported by this store");
|
||||||
|
@ -609,7 +615,7 @@ public:
|
||||||
that will be substituted. */
|
that will be substituted. */
|
||||||
virtual void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
virtual void queryMissing(const std::vector<StorePathWithOutputs> & targets,
|
||||||
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
StorePathSet & willBuild, StorePathSet & willSubstitute, StorePathSet & unknown,
|
||||||
unsigned long long & downloadSize, unsigned long long & narSize);
|
uint64_t & downloadSize, uint64_t & narSize);
|
||||||
|
|
||||||
/* Sort a set of paths topologically under the references
|
/* Sort a set of paths topologically under the references
|
||||||
relation. If p refers to q, then p precedes q in this list. */
|
relation. If p refers to q, then p precedes q in this list. */
|
||||||
|
@ -739,11 +745,13 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
|
|
||||||
|
|
||||||
/* Copy store paths from one store to another. The paths may be copied
|
/* Copy store paths from one store to another. The paths may be copied
|
||||||
in parallel. They are copied in a topologically sorted order
|
in parallel. They are copied in a topologically sorted order (i.e.
|
||||||
(i.e. if A is a reference of B, then A is copied before B), but
|
if A is a reference of B, then A is copied before B), but the set
|
||||||
the set of store paths is not automatically closed; use
|
of store paths is not automatically closed; use copyClosure() for
|
||||||
copyClosure() for that. */
|
that. Returns a map of what each path was copied to the dstStore
|
||||||
void copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & storePaths,
|
as. */
|
||||||
|
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore,
|
||||||
|
const StorePathSet & storePaths,
|
||||||
RepairFlag repair = NoRepair,
|
RepairFlag repair = NoRepair,
|
||||||
CheckSigsFlag checkSigs = CheckSigs,
|
CheckSigsFlag checkSigs = CheckSigs,
|
||||||
SubstituteFlag substitute = NoSubstitute);
|
SubstituteFlag substitute = NoSubstitute);
|
||||||
|
@ -842,4 +850,6 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
|
||||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||||
|
|
||||||
|
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ namespace nix {
|
||||||
#define WORKER_MAGIC_1 0x6e697863
|
#define WORKER_MAGIC_1 0x6e697863
|
||||||
#define WORKER_MAGIC_2 0x6478696f
|
#define WORKER_MAGIC_2 0x6478696f
|
||||||
|
|
||||||
#define PROTOCOL_VERSION 0x116
|
#define PROTOCOL_VERSION 0x117
|
||||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||||
|
|
||||||
|
@ -70,6 +70,10 @@ template<class T> T readStorePaths(const Store & store, Source & from);
|
||||||
|
|
||||||
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
|
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
|
||||||
|
|
||||||
|
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
|
||||||
|
|
||||||
|
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
|
||||||
|
|
||||||
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
|
void writeOutputPathMap(const Store & store, Sink & out, const OutputPathMap & paths);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,17 +150,17 @@ static void skipGeneric(Source & source)
|
||||||
|
|
||||||
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
||||||
{
|
{
|
||||||
unsigned long long size = readLongLong(source);
|
uint64_t size = readLongLong(source);
|
||||||
|
|
||||||
sink.preallocateContents(size);
|
sink.preallocateContents(size);
|
||||||
|
|
||||||
unsigned long long left = size;
|
uint64_t left = size;
|
||||||
std::vector<unsigned char> buf(65536);
|
std::vector<unsigned char> buf(65536);
|
||||||
|
|
||||||
while (left) {
|
while (left) {
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
auto n = buf.size();
|
auto n = buf.size();
|
||||||
if ((unsigned long long)n > left) n = left;
|
if ((uint64_t)n > left) n = left;
|
||||||
source(buf.data(), n);
|
source(buf.data(), n);
|
||||||
sink.receiveContents(buf.data(), n);
|
sink.receiveContents(buf.data(), n);
|
||||||
left -= n;
|
left -= n;
|
||||||
|
@ -323,7 +323,7 @@ struct RestoreSink : ParseSink
|
||||||
throw SysError("fchmod");
|
throw SysError("fchmod");
|
||||||
}
|
}
|
||||||
|
|
||||||
void preallocateContents(unsigned long long len)
|
void preallocateContents(uint64_t len)
|
||||||
{
|
{
|
||||||
#if HAVE_POSIX_FALLOCATE
|
#if HAVE_POSIX_FALLOCATE
|
||||||
if (len) {
|
if (len) {
|
||||||
|
@ -338,7 +338,7 @@ struct RestoreSink : ParseSink
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void receiveContents(unsigned char * data, unsigned int len)
|
void receiveContents(unsigned char * data, size_t len)
|
||||||
{
|
{
|
||||||
writeFull(fd.get(), data, len);
|
writeFull(fd.get(), data, len);
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue