forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into auto-uid-allocation
This commit is contained in:
commit
dc92b01885
7
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
7
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
**Release Notes**
|
||||||
|
Please include relevant [release notes](https://github.com/NixOS/nix/blob/master/doc/manual/src/release-notes/rl-next.md) as needed.
|
||||||
|
|
||||||
|
|
||||||
|
**Testing**
|
||||||
|
|
||||||
|
If this issue is a regression or something that should block release, please consider including a test either in the [testsuite](https://github.com/NixOS/nix/tree/master/tests) or as a [hydraJob]( https://github.com/NixOS/nix/blob/master/flake.nix#L396) so that it can be part of the [automatic checks](https://hydra.nixos.org/jobset/nix/master).
|
26
.github/workflows/backport.yml
vendored
Normal file
26
.github/workflows/backport.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
name: Backport
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types: [closed, labeled]
|
||||||
|
jobs:
|
||||||
|
backport:
|
||||||
|
name: Backport Pull Request
|
||||||
|
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
# required to find all branches
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Create backport PRs
|
||||||
|
# should be kept in sync with `version`
|
||||||
|
uses: zeebe-io/backport-action@v0.0.7
|
||||||
|
with:
|
||||||
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
github_workspace: ${{ github.workspace }}
|
||||||
|
pull_description: |-
|
||||||
|
Bot-based backport to `${target_branch}`, triggered by a label in #${pull_number}.
|
||||||
|
# should be kept in sync with `uses`
|
||||||
|
version: v0.0.5
|
|
@ -1,20 +1,23 @@
|
||||||
name: "Test"
|
name: "CI"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
needs: [check_cachix]
|
needs: [check_cachix]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.4.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v14
|
- uses: cachix/install-nix-action@v16
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v10
|
||||||
if: needs.check_cachix.outputs.secret == 'true'
|
if: needs.check_cachix.outputs.secret == 'true'
|
||||||
|
@ -22,7 +25,8 @@ jobs:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
- run: nix-build -A checks.$(nix-instantiate --eval -E '(builtins.currentSystem)')
|
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
||||||
|
|
||||||
check_cachix:
|
check_cachix:
|
||||||
name: Cachix secret present for installer tests
|
name: Cachix secret present for installer tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -34,6 +38,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
|
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||||
run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
|
run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
|
||||||
|
|
||||||
installer:
|
installer:
|
||||||
needs: [tests, check_cachix]
|
needs: [tests, check_cachix]
|
||||||
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
||||||
|
@ -41,11 +46,11 @@ jobs:
|
||||||
outputs:
|
outputs:
|
||||||
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.4.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v14
|
- uses: cachix/install-nix-action@v16
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v10
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -53,6 +58,7 @@ jobs:
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
- id: prepare-installer
|
- id: prepare-installer
|
||||||
run: scripts/prepare-installer-for-github-actions
|
run: scripts/prepare-installer-for-github-actions
|
||||||
|
|
||||||
installer_test:
|
installer_test:
|
||||||
needs: [installer, check_cachix]
|
needs: [installer, check_cachix]
|
||||||
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
||||||
|
@ -61,10 +67,42 @@ jobs:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.3.4
|
- uses: actions/checkout@v2.4.0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v14
|
- uses: cachix/install-nix-action@v16
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
||||||
|
|
||||||
|
docker_push_image:
|
||||||
|
needs: [check_cachix, tests]
|
||||||
|
if: >-
|
||||||
|
github.event_name == 'push' &&
|
||||||
|
github.ref_name == 'master' &&
|
||||||
|
needs.check_cachix.outputs.secret == 'true'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.4.0
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: cachix/install-nix-action@v16
|
||||||
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
|
- run: echo NIX_VERSION="$(nix-instantiate --eval -E '(import ./default.nix).defaultPackage.${builtins.currentSystem}.version' | tr -d \")" >> $GITHUB_ENV
|
||||||
|
- uses: cachix/cachix-action@v10
|
||||||
|
if: needs.check_cachix.outputs.secret == 'true'
|
||||||
|
with:
|
||||||
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
|
- run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L
|
||||||
|
- run: docker load -i ./result/image.tar.gz
|
||||||
|
- run: docker tag nix:$NIX_VERSION nixos/nix:$NIX_VERSION
|
||||||
|
- run: docker tag nix:$NIX_VERSION nixos/nix:master
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v1
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
- run: docker push nixos/nix:$NIX_VERSION
|
||||||
|
- run: docker push nixos/nix:master
|
16
.github/workflows/hydra_status.yml
vendored
Normal file
16
.github/workflows/hydra_status.yml
vendored
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
name: Hydra status
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "12,42 * * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
check_hydra_status:
|
||||||
|
name: Check Hydra status
|
||||||
|
if: github.repository_owner == 'NixOS'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.4.0
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- run: bash scripts/check-hydra-status.sh
|
||||||
|
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -26,8 +26,6 @@ perl/Makefile.config
|
||||||
|
|
||||||
# /scripts/
|
# /scripts/
|
||||||
/scripts/nix-profile.sh
|
/scripts/nix-profile.sh
|
||||||
/scripts/nix-reduce-build
|
|
||||||
/scripts/nix-http-export.cgi
|
|
||||||
/scripts/nix-profile-daemon.sh
|
/scripts/nix-profile-daemon.sh
|
||||||
|
|
||||||
# /src/libexpr/
|
# /src/libexpr/
|
||||||
|
@ -40,6 +38,7 @@ perl/Makefile.config
|
||||||
|
|
||||||
# /src/libstore/
|
# /src/libstore/
|
||||||
*.gen.*
|
*.gen.*
|
||||||
|
/src/libstore/tests/libstore-tests
|
||||||
|
|
||||||
# /src/libutil/
|
# /src/libutil/
|
||||||
/src/libutil/tests/libutil-tests
|
/src/libutil/tests/libutil-tests
|
||||||
|
@ -121,3 +120,7 @@ GTAGS
|
||||||
compile_commands.json
|
compile_commands.json
|
||||||
|
|
||||||
nix-rust/target
|
nix-rust/target
|
||||||
|
|
||||||
|
result
|
||||||
|
|
||||||
|
.vscode/
|
||||||
|
|
1
Makefile
1
Makefile
|
@ -4,6 +4,7 @@ makefiles = \
|
||||||
src/libutil/local.mk \
|
src/libutil/local.mk \
|
||||||
src/libutil/tests/local.mk \
|
src/libutil/tests/local.mk \
|
||||||
src/libstore/local.mk \
|
src/libstore/local.mk \
|
||||||
|
src/libstore/tests/local.mk \
|
||||||
src/libfetchers/local.mk \
|
src/libfetchers/local.mk \
|
||||||
src/libmain/local.mk \
|
src/libmain/local.mk \
|
||||||
src/libexpr/local.mk \
|
src/libexpr/local.mk \
|
||||||
|
|
|
@ -16,6 +16,7 @@ LDFLAGS = @LDFLAGS@
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||||
|
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
||||||
index 1cee6a0b..46c3acd9 100644
|
index 4b2c429..1fb4c52 100644
|
||||||
--- a/pthread_stop_world.c
|
--- a/pthread_stop_world.c
|
||||||
+++ b/pthread_stop_world.c
|
+++ b/pthread_stop_world.c
|
||||||
@@ -674,6 +674,8 @@ GC_INNER void GC_push_all_stacks(void)
|
@@ -673,6 +673,8 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
struct GC_traced_stack_sect_s *traced_stack_sect;
|
struct GC_traced_stack_sect_s *traced_stack_sect;
|
||||||
pthread_t self = pthread_self();
|
pthread_t self = pthread_self();
|
||||||
word total_size = 0;
|
word total_size = 0;
|
||||||
|
@ -11,7 +11,7 @@ index 1cee6a0b..46c3acd9 100644
|
||||||
|
|
||||||
if (!EXPECT(GC_thr_initialized, TRUE))
|
if (!EXPECT(GC_thr_initialized, TRUE))
|
||||||
GC_thr_init();
|
GC_thr_init();
|
||||||
@@ -723,6 +725,28 @@ GC_INNER void GC_push_all_stacks(void)
|
@@ -722,6 +724,31 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
hi = p->altstack + p->altstack_size;
|
hi = p->altstack + p->altstack_size;
|
||||||
/* FIXME: Need to scan the normal stack too, but how ? */
|
/* FIXME: Need to scan the normal stack too, but how ? */
|
||||||
/* FIXME: Assume stack grows down */
|
/* FIXME: Assume stack grows down */
|
||||||
|
@ -22,6 +22,9 @@ index 1cee6a0b..46c3acd9 100644
|
||||||
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
+ if (pthread_attr_getstacksize(&pattr, &stack_limit)) {
|
||||||
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
|
+ ABORT("GC_push_all_stacks: pthread_attr_getstacksize failed!");
|
||||||
+ }
|
+ }
|
||||||
|
+ if (pthread_attr_destroy(&pattr)) {
|
||||||
|
+ ABORT("GC_push_all_stacks: pthread_attr_destroy failed!");
|
||||||
|
+ }
|
||||||
+ // When a thread goes into a coroutine, we lose its original sp until
|
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||||
+ // control flow returns to the thread.
|
+ // control flow returns to the thread.
|
||||||
+ // While in the coroutine, the sp points outside the thread stack,
|
+ // While in the coroutine, the sp points outside the thread stack,
|
||||||
|
|
21
configure.ac
21
configure.ac
|
@ -188,17 +188,24 @@ PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLA
|
||||||
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
|
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
|
||||||
])
|
])
|
||||||
|
|
||||||
# Look for libsodium, an optional dependency.
|
# Look for libsodium.
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Look for libbrotli{enc,dec}.
|
# Look for libbrotli{enc,dec}.
|
||||||
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Look for libcpuid.
|
# Look for libcpuid.
|
||||||
|
have_libcpuid=
|
||||||
if test "$machine_name" = "x86_64"; then
|
if test "$machine_name" = "x86_64"; then
|
||||||
PKG_CHECK_MODULES([LIBCPUID], [libcpuid], [CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"])
|
AC_ARG_ENABLE([cpuid],
|
||||||
have_libcpuid=1
|
AS_HELP_STRING([--disable-cpuid], [Do not determine microarchitecture levels with libcpuid (relevant to x86_64 only)]))
|
||||||
AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])
|
if test "x$enable_cpuid" != "xno"; then
|
||||||
|
PKG_CHECK_MODULES([LIBCPUID], [libcpuid],
|
||||||
|
[CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"
|
||||||
|
have_libcpuid=1
|
||||||
|
AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])]
|
||||||
|
)
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
|
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
|
||||||
|
|
||||||
|
@ -255,13 +262,17 @@ fi
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
||||||
|
|
||||||
|
|
||||||
|
# Look for nlohmann/json.
|
||||||
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
||||||
# documentation generation switch
|
# documentation generation switch
|
||||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
doc_generate=$enableval, doc_generate=yes)
|
||||||
AC_SUBST(doc_generate)
|
AC_SUBST(doc_generate)
|
||||||
|
|
||||||
# Look for lowdown library.
|
# Look for lowdown library.
|
||||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.8.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Setuid installations.
|
# Setuid installations.
|
||||||
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
|
||||||
src = ./.;
|
src = ./.;
|
||||||
}).defaultNix
|
}).defaultNix
|
||||||
|
|
|
@ -6,9 +6,9 @@ builtins:
|
||||||
concatStrings (map
|
concatStrings (map
|
||||||
(name:
|
(name:
|
||||||
let builtin = builtins.${name}; in
|
let builtin = builtins.${name}; in
|
||||||
"<dt><code>${name} "
|
"<dt id=\"builtins-${name}\"><a href=\"#builtins-${name}\"><code>${name} "
|
||||||
+ concatStringsSep " " (map (s: "<var>${s}</var>") builtin.args)
|
+ concatStringsSep " " (map (s: "<var>${s}</var>") builtin.args)
|
||||||
+ "</code></dt>"
|
+ "</code></a></dt>"
|
||||||
+ "<dd>\n\n"
|
+ "<dd>\n\n"
|
||||||
+ builtin.doc
|
+ builtin.doc
|
||||||
+ "\n\n</dd>"
|
+ "\n\n</dd>"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
command:
|
{ command, renderLinks ? false }:
|
||||||
|
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import ./utils.nix;
|
||||||
|
@ -20,7 +20,11 @@ let
|
||||||
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
|
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues def.commands)));
|
||||||
listCommands = cmds:
|
listCommands = cmds:
|
||||||
concatStrings (map (name:
|
concatStrings (map (name:
|
||||||
"* [`${command} ${name}`](./${appendName filename name}.md) - ${cmds.${name}.description}\n")
|
"* "
|
||||||
|
+ (if renderLinks
|
||||||
|
then "[`${command} ${name}`](./${appendName filename name}.md)"
|
||||||
|
else "`${command} ${name}`")
|
||||||
|
+ " - ${cmds.${name}.description}\n")
|
||||||
(attrNames cmds));
|
(attrNames cmds));
|
||||||
in
|
in
|
||||||
"where *subcommand* is one of the following:\n\n"
|
"where *subcommand* is one of the following:\n\n"
|
||||||
|
|
|
@ -8,17 +8,19 @@ concatStrings (map
|
||||||
let option = options.${name}; in
|
let option = options.${name}; in
|
||||||
" - `${name}` \n\n"
|
" - `${name}` \n\n"
|
||||||
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
|
+ concatStrings (map (s: " ${s}\n") (splitLines option.description)) + "\n\n"
|
||||||
+ " **Default:** " + (
|
+ (if option.documentDefault
|
||||||
if option.value == "" || option.value == []
|
then " **Default:** " + (
|
||||||
then "*empty*"
|
if option.value == "" || option.value == []
|
||||||
else if isBool option.value
|
then "*empty*"
|
||||||
then (if option.value then "`true`" else "`false`")
|
else if isBool option.value
|
||||||
else
|
then (if option.value then "`true`" else "`false`")
|
||||||
# n.b. a StringMap value type is specified as a string, but
|
else
|
||||||
# this shows the value type. The empty stringmap is "null" in
|
# n.b. a StringMap value type is specified as a string, but
|
||||||
# JSON, but that converts to "{ }" here.
|
# this shows the value type. The empty stringmap is "null" in
|
||||||
(if isAttrs option.value then "`\"\"`"
|
# JSON, but that converts to "{ }" here.
|
||||||
else "`" + toString option.value + "`")) + "\n\n"
|
(if isAttrs option.value then "`\"\"`"
|
||||||
|
else "`" + toString option.value + "`")) + "\n\n"
|
||||||
|
else " **Default:** *machine-specific*\n")
|
||||||
+ (if option.aliases != []
|
+ (if option.aliases != []
|
||||||
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
|
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
|
||||||
else "")
|
else "")
|
||||||
|
|
|
@ -12,11 +12,13 @@ man-pages := $(foreach n, \
|
||||||
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
|
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
|
||||||
|
|
||||||
# Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox.
|
# Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox.
|
||||||
|
# Set cores to 0 because otherwise nix show-config resolves the cores based on the current machine
|
||||||
dummy-env = env -i \
|
dummy-env = env -i \
|
||||||
HOME=/dummy \
|
HOME=/dummy \
|
||||||
NIX_CONF_DIR=/dummy \
|
NIX_CONF_DIR=/dummy \
|
||||||
NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt \
|
NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt \
|
||||||
NIX_STATE_DIR=/dummy
|
NIX_STATE_DIR=/dummy \
|
||||||
|
NIX_CONFIG='cores = 0'
|
||||||
|
|
||||||
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
nix-eval = $(dummy-env) $(bindir)/nix eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
||||||
|
|
||||||
|
@ -44,7 +46,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||||
@rm -rf $@
|
@rm -rf $@
|
||||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix (builtins.readFile $<)'
|
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }'
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
|
@ -70,6 +72,7 @@ $(d)/builtins.json: $(bindir)/nix
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
||||||
# Generate the HTML manual.
|
# Generate the HTML manual.
|
||||||
|
html: $(docdir)/manual/index.html
|
||||||
install: $(docdir)/manual/index.html
|
install: $(docdir)/manual/index.html
|
||||||
|
|
||||||
# Generate 'nix' manpages.
|
# Generate 'nix' manpages.
|
||||||
|
@ -92,7 +95,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
||||||
rm $$tmpFile; \
|
rm $$tmpFile; \
|
||||||
done
|
done
|
||||||
touch $@
|
@touch $@
|
||||||
|
|
||||||
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
|
$(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/custom.css $(d)/src/SUMMARY.md $(d)/src/command-ref/new-cli $(d)/src/command-ref/conf-file.md $(d)/src/expressions/builtins.md $(call rwildcard, $(d)/src, *.md)
|
||||||
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
|
$(trace-gen) RUST_LOG=warn mdbook build doc/manual -d $(DESTDIR)$(docdir)/manual
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
- [Prerequisites](installation/prerequisites-source.md)
|
- [Prerequisites](installation/prerequisites-source.md)
|
||||||
- [Obtaining a Source Distribution](installation/obtaining-source.md)
|
- [Obtaining a Source Distribution](installation/obtaining-source.md)
|
||||||
- [Building Nix from Source](installation/building-source.md)
|
- [Building Nix from Source](installation/building-source.md)
|
||||||
|
- [Using Nix within Docker](installation/installing-docker.md)
|
||||||
- [Security](installation/nix-security.md)
|
- [Security](installation/nix-security.md)
|
||||||
- [Single-User Mode](installation/single-user.md)
|
- [Single-User Mode](installation/single-user.md)
|
||||||
- [Multi-User Mode](installation/multi-user.md)
|
- [Multi-User Mode](installation/multi-user.md)
|
||||||
|
@ -70,7 +71,10 @@
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
- [CLI guideline](contributing/cli-guideline.md)
|
- [CLI guideline](contributing/cli-guideline.md)
|
||||||
- [Release Notes](release-notes/release-notes.md)
|
- [Release Notes](release-notes/release-notes.md)
|
||||||
- [Release 2.4 (2021-XX-XX)](release-notes/rl-2.4.md)
|
- [Release X.Y (202?-??-??)](release-notes/rl-next.md)
|
||||||
|
- [Release 2.6 (2022-01-24)](release-notes/rl-2.6.md)
|
||||||
|
- [Release 2.5 (2021-12-13)](release-notes/rl-2.5.md)
|
||||||
|
- [Release 2.4 (2021-11-01)](release-notes/rl-2.4.md)
|
||||||
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
|
- [Release 2.3 (2019-09-04)](release-notes/rl-2.3.md)
|
||||||
- [Release 2.2 (2019-01-11)](release-notes/rl-2.2.md)
|
- [Release 2.2 (2019-01-11)](release-notes/rl-2.2.md)
|
||||||
- [Release 2.1 (2018-09-02)](release-notes/rl-2.1.md)
|
- [Release 2.1 (2018-09-02)](release-notes/rl-2.1.md)
|
||||||
|
|
|
@ -53,8 +53,8 @@ example, the following command allows you to build a derivation for
|
||||||
$ uname
|
$ uname
|
||||||
Linux
|
Linux
|
||||||
|
|
||||||
$ nix build \
|
$ nix build --impure \
|
||||||
'(with import <nixpkgs> { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \
|
--expr '(with import <nixpkgs> { system = "x86_64-darwin"; }; runCommand "foo" {} "uname > $out")' \
|
||||||
--builders 'ssh://mac x86_64-darwin'
|
--builders 'ssh://mac x86_64-darwin'
|
||||||
[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac
|
[1/0/1 built, 0.0 MiB DL] building foo on ssh://mac
|
||||||
|
|
||||||
|
|
|
@ -16,8 +16,9 @@ By default Nix reads settings from the following places:
|
||||||
will be loaded in reverse order.
|
will be loaded in reverse order.
|
||||||
|
|
||||||
Otherwise it will look for `nix/nix.conf` files in `XDG_CONFIG_DIRS`
|
Otherwise it will look for `nix/nix.conf` files in `XDG_CONFIG_DIRS`
|
||||||
and `XDG_CONFIG_HOME`. If these are unset, it will look in
|
and `XDG_CONFIG_HOME`. If unset, `XDG_CONFIG_DIRS` defaults to
|
||||||
`$HOME/.config/nix.conf`.
|
`/etc/xdg`, and `XDG_CONFIG_HOME` defaults to `$HOME/.config`
|
||||||
|
as per [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html).
|
||||||
|
|
||||||
- If `NIX_CONFIG` is set, its contents is treated as the contents of
|
- If `NIX_CONFIG` is set, its contents is treated as the contents of
|
||||||
a configuration file.
|
a configuration file.
|
||||||
|
|
|
@ -238,7 +238,16 @@ a number of possible ways:
|
||||||
|
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
To install a specific version of `gcc` from the active Nix expression:
|
To install a package using a specific attribute path from the active Nix expression:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-env -iA gcc40mips
|
||||||
|
installing `gcc-4.0.2'
|
||||||
|
$ nix-env -iA xorg.xorgserver
|
||||||
|
installing `xorg-server-1.2.0'
|
||||||
|
```
|
||||||
|
|
||||||
|
To install a specific version of `gcc` using the derivation name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install gcc-3.3.2
|
$ nix-env --install gcc-3.3.2
|
||||||
|
@ -246,6 +255,9 @@ installing `gcc-3.3.2'
|
||||||
uninstalling `gcc-3.1'
|
uninstalling `gcc-3.1'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Using attribute path for selecting a package is preferred,
|
||||||
|
as it is much faster and there will not be multiple matches.
|
||||||
|
|
||||||
Note the previously installed version is removed, since
|
Note the previously installed version is removed, since
|
||||||
`--preserve-installed` was not specified.
|
`--preserve-installed` was not specified.
|
||||||
|
|
||||||
|
@ -256,13 +268,6 @@ $ nix-env --install gcc
|
||||||
installing `gcc-3.3.2'
|
installing `gcc-3.3.2'
|
||||||
```
|
```
|
||||||
|
|
||||||
To install using a specific attribute:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env -i -A gcc40mips
|
|
||||||
$ nix-env -i -A xorg.xorgserver
|
|
||||||
```
|
|
||||||
|
|
||||||
To install all derivations in the Nix expression `foo.nix`:
|
To install all derivations in the Nix expression `foo.nix`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -374,22 +379,29 @@ For the other flags, see `--install`.
|
||||||
## Examples
|
## Examples
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --upgrade gcc
|
$ nix-env --upgrade -A nixpkgs.gcc
|
||||||
upgrading `gcc-3.3.1' to `gcc-3.4'
|
upgrading `gcc-3.3.1' to `gcc-3.4'
|
||||||
```
|
```
|
||||||
|
|
||||||
|
When there are no updates available, nothing will happen:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u gcc-3.3.2 --always (switch to a specific version)
|
$ nix-env --upgrade -A nixpkgs.pan
|
||||||
|
```
|
||||||
|
|
||||||
|
Using `-A` is preferred when possible, as it is faster and unambiguous but
|
||||||
|
it is also possible to upgrade to a specific version by matching the derivation name:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-env -u gcc-3.3.2 --always
|
||||||
upgrading `gcc-3.4' to `gcc-3.3.2'
|
upgrading `gcc-3.4' to `gcc-3.3.2'
|
||||||
```
|
```
|
||||||
|
|
||||||
```console
|
To try to upgrade everything
|
||||||
$ nix-env --upgrade pan
|
(matching packages based on the part of the derivation name without version):
|
||||||
(no upgrades available, so nothing happens)
|
|
||||||
```
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u (try to upgrade everything)
|
$ nix-env -u
|
||||||
upgrading `hello-2.1.2' to `hello-2.1.3'
|
upgrading `hello-2.1.2' to `hello-2.1.3'
|
||||||
upgrading `mozilla-1.2' to `mozilla-1.4'
|
upgrading `mozilla-1.2' to `mozilla-1.4'
|
||||||
```
|
```
|
||||||
|
@ -401,7 +413,7 @@ of a derivation `x` by looking at their respective `name` attributes.
|
||||||
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
|
The names (e.g., `gcc-3.3.1` are split into two parts: the package name
|
||||||
(`gcc`), and the version (`3.3.1`). The version part starts after the
|
(`gcc`), and the version (`3.3.1`). The version part starts after the
|
||||||
first dash not followed by a letter. `x` is considered an upgrade of `y`
|
first dash not followed by a letter. `x` is considered an upgrade of `y`
|
||||||
if their package names match, and the version of `y` is higher that that
|
if their package names match, and the version of `y` is higher than that
|
||||||
of `x`.
|
of `x`.
|
||||||
|
|
||||||
The versions are compared by splitting them into contiguous components
|
The versions are compared by splitting them into contiguous components
|
||||||
|
|
|
@ -11,8 +11,8 @@
|
||||||
[`--command` *cmd*]
|
[`--command` *cmd*]
|
||||||
[`--run` *cmd*]
|
[`--run` *cmd*]
|
||||||
[`--exclude` *regexp*]
|
[`--exclude` *regexp*]
|
||||||
[--pure]
|
[`--pure`]
|
||||||
[--keep *name*]
|
[`--keep` *name*]
|
||||||
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
|
{{`--packages` | `-p`} {*packages* | *expressions*} … | [*path*]}
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
@ -101,7 +101,8 @@ The following common options are supported:
|
||||||
|
|
||||||
- `NIX_BUILD_SHELL`\
|
- `NIX_BUILD_SHELL`\
|
||||||
Shell used to start the interactive environment. Defaults to the
|
Shell used to start the interactive environment. Defaults to the
|
||||||
`bash` found in `PATH`.
|
`bash` found in `<nixpkgs>`, falling back to the `bash` found in
|
||||||
|
`PATH` if not found.
|
||||||
|
|
||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
|
@ -110,13 +111,19 @@ shell in which to build it:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell '<nixpkgs>' -A pan
|
$ nix-shell '<nixpkgs>' -A pan
|
||||||
[nix-shell]$ unpackPhase
|
[nix-shell]$ eval ${unpackPhase:-unpackPhase}
|
||||||
[nix-shell]$ cd pan-*
|
[nix-shell]$ cd pan-*
|
||||||
[nix-shell]$ configurePhase
|
[nix-shell]$ eval ${configurePhase:-configurePhase}
|
||||||
[nix-shell]$ buildPhase
|
[nix-shell]$ eval ${buildPhase:-buildPhase}
|
||||||
[nix-shell]$ ./pan/gui/pan
|
[nix-shell]$ ./pan/gui/pan
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The reason we use form `eval ${configurePhase:-configurePhase}` here is because
|
||||||
|
those packages that override these phases do so by exporting the overridden
|
||||||
|
values in the environment variable of the same name.
|
||||||
|
Here bash is being told to either evaluate the contents of 'configurePhase',
|
||||||
|
if it exists as a variable, otherwise evaluate the configurePhase function.
|
||||||
|
|
||||||
To clear the environment first, and do some additional automatic
|
To clear the environment first, and do some additional automatic
|
||||||
initialisation of the interactive shell:
|
initialisation of the interactive shell:
|
||||||
|
|
||||||
|
|
|
@ -125,7 +125,7 @@ Special exit codes:
|
||||||
|
|
||||||
- `104`\
|
- `104`\
|
||||||
Not deterministic, the build succeeded in check mode but the
|
Not deterministic, the build succeeded in check mode but the
|
||||||
resulting output is not binary reproducable.
|
resulting output is not binary reproducible.
|
||||||
|
|
||||||
With the `--keep-going` flag it's possible for multiple failures to
|
With the `--keep-going` flag it's possible for multiple failures to
|
||||||
occur, in this case the 1xx status codes are or combined using binary
|
occur, in this case the 1xx status codes are or combined using binary
|
||||||
|
@ -321,8 +321,8 @@ symlink.
|
||||||
This query has one option:
|
This query has one option:
|
||||||
|
|
||||||
- `--include-outputs`
|
- `--include-outputs`
|
||||||
Also include the output path of store derivations, and their
|
Also include the existing output paths of store derivations,
|
||||||
closures.
|
and their closures.
|
||||||
|
|
||||||
This query can be used to implement various kinds of deployment. A
|
This query can be used to implement various kinds of deployment. A
|
||||||
*source deployment* is obtained by distributing the closure of a
|
*source deployment* is obtained by distributing the closure of a
|
||||||
|
|
|
@ -162,11 +162,11 @@ Most Nix commands accept the following command-line options:
|
||||||
}: ...
|
}: ...
|
||||||
```
|
```
|
||||||
|
|
||||||
So if you call this Nix expression (e.g., when you do `nix-env -i
|
So if you call this Nix expression (e.g., when you do `nix-env -iA
|
||||||
pkgname`), the function will be called automatically using the
|
pkgname`), the function will be called automatically using the
|
||||||
value [`builtins.currentSystem`](../expressions/builtins.md) for
|
value [`builtins.currentSystem`](../expressions/builtins.md) for
|
||||||
the `system` argument. You can override this using `--arg`, e.g.,
|
the `system` argument. You can override this using `--arg`, e.g.,
|
||||||
`nix-env -i pkgname --arg system \"i686-freebsd\"`. (Note that
|
`nix-env -iA pkgname --arg system \"i686-freebsd\"`. (Note that
|
||||||
since the argument is a Nix string literal, you have to escape the
|
since the argument is a Nix string literal, you have to escape the
|
||||||
quotes.)
|
quotes.)
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
## Goals
|
## Goals
|
||||||
|
|
||||||
Purpose of this document is to provide a clear direction to **help design
|
Purpose of this document is to provide a clear direction to **help design
|
||||||
delightful command line** experience. This document contain guidelines to
|
delightful command line** experience. This document contains guidelines to
|
||||||
follow to ensure a consistent and approachable user experience.
|
follow to ensure a consistent and approachable user experience.
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
@ -103,7 +103,7 @@ impacted the most by bad user experience.
|
||||||
# Help is essential
|
# Help is essential
|
||||||
|
|
||||||
Help should be built into your command line so that new users can gradually
|
Help should be built into your command line so that new users can gradually
|
||||||
discover new features when they need them.
|
discover new features when they need them.
|
||||||
|
|
||||||
## Looking for help
|
## Looking for help
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ The rules are:
|
||||||
|
|
||||||
- Help is shown by using `--help` or `help` command (eg `nix` `--``help` or
|
- Help is shown by using `--help` or `help` command (eg `nix` `--``help` or
|
||||||
`nix help`).
|
`nix help`).
|
||||||
- For non-COMMANDs (eg. `nix` `--``help` and `nix store` `--``help`) we **show
|
- For non-COMMANDs (eg. `nix` `--``help` and `nix store` `--``help`) we **show
|
||||||
a summary** of most common use cases. Summary is presented on the STDOUT
|
a summary** of most common use cases. Summary is presented on the STDOUT
|
||||||
without any use of PAGER.
|
without any use of PAGER.
|
||||||
- For COMMANDs (eg. `nix init` `--``help` or `nix help init`) we display the
|
- For COMMANDs (eg. `nix init` `--``help` or `nix help init`) we display the
|
||||||
|
@ -176,7 +176,7 @@ $ nix init --template=template#pyton
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
Initializing Nix project at `/path/to/here`.
|
Initializing Nix project at `/path/to/here`.
|
||||||
Select a template for you new project:
|
Select a template for you new project:
|
||||||
|> template#pyton
|
|> template#python
|
||||||
template#python-pip
|
template#python-pip
|
||||||
template#python-poetry
|
template#python-poetry
|
||||||
```
|
```
|
||||||
|
@ -230,17 +230,17 @@ Now **Learn** part of the output is where you educate users. You should only
|
||||||
show it when you know that a build will take some time and not annoy users of
|
show it when you know that a build will take some time and not annoy users of
|
||||||
the builds that take only few seconds.
|
the builds that take only few seconds.
|
||||||
|
|
||||||
Every feature like this should go though a intensive review and testing to
|
Every feature like this should go through an intensive review and testing to
|
||||||
collect as much a feedback as possible and to fine tune every little detail. If
|
collect as much feedback as possible and to fine tune every little detail. If
|
||||||
done right this can be an awesome features beginners and advance users will
|
done right this can be an awesome features beginners and advance users will
|
||||||
love, but if not done perfectly it will annoy users and leave bad impression.
|
love, but if not done perfectly it will annoy users and leave bad impression.
|
||||||
|
|
||||||
# Input
|
# Input
|
||||||
|
|
||||||
Input to a command is provided via `ARGUMENTS` and `OPTIONS`.
|
Input to a command is provided via `ARGUMENTS` and `OPTIONS`.
|
||||||
|
|
||||||
`ARGUMENTS` represent a required input for a function. When choosing to use
|
`ARGUMENTS` represent a required input for a function. When choosing to use
|
||||||
`ARGUMENT` over function please be aware of the downsides that come with it:
|
`ARGUMENTS` over `OPTIONS` please be aware of the downsides that come with it:
|
||||||
|
|
||||||
- User will need to remember the order of `ARGUMENTS`. This is not a problem if
|
- User will need to remember the order of `ARGUMENTS`. This is not a problem if
|
||||||
there is only one `ARGUMENT`.
|
there is only one `ARGUMENT`.
|
||||||
|
@ -253,7 +253,7 @@ developer consider the downsides and choose wisely.
|
||||||
|
|
||||||
## Naming the `OPTIONS`
|
## Naming the `OPTIONS`
|
||||||
|
|
||||||
Then only naming convention - apart from the ones mentioned in Naming the
|
The only naming convention - apart from the ones mentioned in Naming the
|
||||||
`COMMANDS` section is how flags are named.
|
`COMMANDS` section is how flags are named.
|
||||||
|
|
||||||
Flags are a type of `OPTION` that represent an option that can be turned ON of
|
Flags are a type of `OPTION` that represent an option that can be turned ON of
|
||||||
|
@ -271,12 +271,12 @@ to improve the discoverability of possible input. A new user will most likely
|
||||||
not know which `ARGUMENTS` and `OPTIONS` are required or which values are
|
not know which `ARGUMENTS` and `OPTIONS` are required or which values are
|
||||||
possible for those options.
|
possible for those options.
|
||||||
|
|
||||||
In cases, the user might not provide the input or they provide wrong input,
|
In case the user does not provide the input or they provide wrong input,
|
||||||
rather then show the error, prompt a user with an option to find and select
|
rather than show the error, prompt a user with an option to find and select
|
||||||
correct input (see examples).
|
correct input (see examples).
|
||||||
|
|
||||||
Prompting is of course not required when TTY is not attached to STDIN. This
|
Prompting is of course not required when TTY is not attached to STDIN. This
|
||||||
would mean that scripts wont need to handle prompt, but rather handle errors.
|
would mean that scripts won't need to handle prompt, but rather handle errors.
|
||||||
|
|
||||||
A place to use prompt and provide user with interactive select
|
A place to use prompt and provide user with interactive select
|
||||||
|
|
||||||
|
@ -300,9 +300,9 @@ going to happen.
|
||||||
```shell
|
```shell
|
||||||
$ nix build --option substitutors https://cache.example.org
|
$ nix build --option substitutors https://cache.example.org
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
Warning! A security related question need to be answered.
|
Warning! A security related question needs to be answered.
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
The following substitutors will be used to in `my-project`:
|
The following substitutors will be used to in `my-project`:
|
||||||
- https://cache.example.org
|
- https://cache.example.org
|
||||||
|
|
||||||
Do you allow `my-project` to use above mentioned substitutors?
|
Do you allow `my-project` to use above mentioned substitutors?
|
||||||
|
@ -311,14 +311,14 @@ $ nix build --option substitutors https://cache.example.org
|
||||||
|
|
||||||
# Output
|
# Output
|
||||||
|
|
||||||
Terminal output can be quite limiting in many ways. Which should forces us to
|
Terminal output can be quite limiting in many ways. Which should force us to
|
||||||
think about the experience even more. As with every design the output is a
|
think about the experience even more. As with every design the output is a
|
||||||
compromise between being terse and being verbose, between showing help to
|
compromise between being terse and being verbose, between showing help to
|
||||||
beginners and annoying advance users. For this it is important that we know
|
beginners and annoying advance users. For this it is important that we know
|
||||||
what are the priorities.
|
what are the priorities.
|
||||||
|
|
||||||
Nix command line should be first and foremost written with beginners in mind.
|
Nix command line should be first and foremost written with beginners in mind.
|
||||||
But users wont stay beginners for long and what was once useful might quickly
|
But users won't stay beginners for long and what was once useful might quickly
|
||||||
become annoying. There is no golden rule that we can give in this guideline
|
become annoying. There is no golden rule that we can give in this guideline
|
||||||
that would make it easier how to draw a line and find best compromise.
|
that would make it easier how to draw a line and find best compromise.
|
||||||
|
|
||||||
|
@ -342,7 +342,7 @@ also allowing them to redirect content to a file. For example:
|
||||||
```shell
|
```shell
|
||||||
$ nix build > build.txt
|
$ nix build > build.txt
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
Error! Atrribute `bin` missing at (1:94) from string.
|
Error! Attribute `bin` missing at (1:94) from string.
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
|
|
||||||
1| with import <nixpkgs> { }; (pkgs.runCommandCC or pkgs.runCommand) "shell" { buildInputs = [ (surge.bin) ]; } ""
|
1| with import <nixpkgs> { }; (pkgs.runCommandCC or pkgs.runCommand) "shell" { buildInputs = [ (surge.bin) ]; } ""
|
||||||
|
@ -408,7 +408,7 @@ Above command clearly states that command successfully completed. And in case
|
||||||
of `nix build`, which is a command that might take some time to complete, it is
|
of `nix build`, which is a command that might take some time to complete, it is
|
||||||
equally important to also show that a command started.
|
equally important to also show that a command started.
|
||||||
|
|
||||||
## Text alignment
|
## Text alignment
|
||||||
|
|
||||||
Text alignment is the number one design element that will present all of the
|
Text alignment is the number one design element that will present all of the
|
||||||
Nix commands as a family and not as separate tools glued together.
|
Nix commands as a family and not as separate tools glued together.
|
||||||
|
@ -419,7 +419,7 @@ The format we should follow is:
|
||||||
$ nix COMMAND
|
$ nix COMMAND
|
||||||
VERB_1 NOUN and other words
|
VERB_1 NOUN and other words
|
||||||
VERB__1 NOUN and other words
|
VERB__1 NOUN and other words
|
||||||
|> Some details
|
|> Some details
|
||||||
```
|
```
|
||||||
|
|
||||||
Few rules that we can extract from above example:
|
Few rules that we can extract from above example:
|
||||||
|
@ -444,13 +444,13 @@ is not even notable, therefore relying on it wouldn’t make much sense.
|
||||||
|
|
||||||
**The bright text is much better supported** across terminals and color
|
**The bright text is much better supported** across terminals and color
|
||||||
schemes. Most of the time the difference is perceived as if the bright text
|
schemes. Most of the time the difference is perceived as if the bright text
|
||||||
would be bold.
|
would be bold.
|
||||||
|
|
||||||
## Colors
|
## Colors
|
||||||
|
|
||||||
Humans are already conditioned by society to attach certain meaning to certain
|
Humans are already conditioned by society to attach certain meaning to certain
|
||||||
colors. While the meaning is not universal, a simple collection of colors is
|
colors. While the meaning is not universal, a simple collection of colors is
|
||||||
used to represent basic emotions.
|
used to represent basic emotions.
|
||||||
|
|
||||||
Colors that can be used in output
|
Colors that can be used in output
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ can, with a few key strokes, be changed into and advance introspection tool.
|
||||||
|
|
||||||
### Progress
|
### Progress
|
||||||
|
|
||||||
For longer running commands we should provide and overview of the progress.
|
For longer running commands we should provide and overview the progress.
|
||||||
This is shown best in `nix build` example:
|
This is shown best in `nix build` example:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
@ -553,9 +553,9 @@ going to happen.
|
||||||
```shell
|
```shell
|
||||||
$ nix build --option substitutors https://cache.example.org
|
$ nix build --option substitutors https://cache.example.org
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
Warning! A security related question need to be answered.
|
Warning! A security related question needs to be answered.
|
||||||
------------------------------------------------------------------------
|
------------------------------------------------------------------------
|
||||||
The following substitutors will be used to in `my-project`:
|
The following substitutors will be used to in `my-project`:
|
||||||
- https://cache.example.org
|
- https://cache.example.org
|
||||||
|
|
||||||
Do you allow `my-project` to use above mentioned substitutors?
|
Do you allow `my-project` to use above mentioned substitutors?
|
||||||
|
@ -566,7 +566,7 @@ $ nix build --option substitutors https://cache.example.org
|
||||||
|
|
||||||
There are many ways that you can control verbosity.
|
There are many ways that you can control verbosity.
|
||||||
|
|
||||||
Verbosity levels are:
|
Verbosity levels are:
|
||||||
|
|
||||||
- `ERROR` (level 0)
|
- `ERROR` (level 0)
|
||||||
- `WARN` (level 1)
|
- `WARN` (level 1)
|
||||||
|
@ -586,4 +586,4 @@ There are also two shortcuts, `--debug` to run in `DEBUG` verbosity level and
|
||||||
|
|
||||||
# Appendix 1: Commands naming exceptions
|
# Appendix 1: Commands naming exceptions
|
||||||
|
|
||||||
`nix init` and `nix repl` are well established
|
`nix init` and `nix repl` are well established
|
||||||
|
|
|
@ -35,6 +35,25 @@ variables are set up so that those dependencies can be found:
|
||||||
$ nix-shell
|
$ nix-shell
|
||||||
```
|
```
|
||||||
|
|
||||||
|
or if you have a flake-enabled nix:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix develop
|
||||||
|
```
|
||||||
|
|
||||||
|
To get a shell with a different compilation environment (e.g. stdenv,
|
||||||
|
gccStdenv, clangStdenv, clang11Stdenv):
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-shell -A devShells.x86_64-linux.clang11StdenvPackages
|
||||||
|
```
|
||||||
|
|
||||||
|
or if you have a flake-enabled nix:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix develop .#clang11StdenvPackages
|
||||||
|
```
|
||||||
|
|
||||||
To build Nix itself in this shell:
|
To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
|
|
@ -237,7 +237,7 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
- `preferLocalBuild`\
|
- `preferLocalBuild`\
|
||||||
If this attribute is set to `true` and [distributed building is
|
If this attribute is set to `true` and [distributed building is
|
||||||
enabled](../advanced-topics/distributed-builds.md), then, if
|
enabled](../advanced-topics/distributed-builds.md), then, if
|
||||||
possible, the derivaton will be built locally instead of forwarded
|
possible, the derivation will be built locally instead of forwarded
|
||||||
to a remote machine. This is appropriate for trivial builders
|
to a remote machine. This is appropriate for trivial builders
|
||||||
where the cost of doing a download or remote build would exceed
|
where the cost of doing a download or remote build would exceed
|
||||||
the cost of building locally.
|
the cost of building locally.
|
||||||
|
|
|
@ -12,5 +12,5 @@ For instance, `derivation` is also available as `builtins.derivation`.
|
||||||
<dl>
|
<dl>
|
||||||
<dt><code>derivation <var>attrs</var></code>;
|
<dt><code>derivation <var>attrs</var></code>;
|
||||||
<code>builtins.derivation <var>attrs</var></code></dt>
|
<code>builtins.derivation <var>attrs</var></code></dt>
|
||||||
<dd><p><var>derivation</var> in described in
|
<dd><p><var>derivation</var> is described in
|
||||||
<a href="derivations.md">its own section</a>.</p></dd>
|
<a href="derivations.md">its own section</a>.</p></dd>
|
||||||
|
|
|
@ -26,7 +26,7 @@ elements (referenced from the figure by number):
|
||||||
called with three arguments: `stdenv`, `fetchurl`, and `perl`. They
|
called with three arguments: `stdenv`, `fetchurl`, and `perl`. They
|
||||||
are needed to build Hello, but we don't know how to build them here;
|
are needed to build Hello, but we don't know how to build them here;
|
||||||
that's why they are function arguments. `stdenv` is a package that
|
that's why they are function arguments. `stdenv` is a package that
|
||||||
is used by almost all Nix Packages packages; it provides a
|
is used by almost all Nix Packages; it provides a
|
||||||
“standard” environment consisting of the things you would expect
|
“standard” environment consisting of the things you would expect
|
||||||
in a basic Unix environment: a C/C++ compiler (GCC, to be precise),
|
in a basic Unix environment: a C/C++ compiler (GCC, to be precise),
|
||||||
the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`,
|
the Bash shell, fundamental Unix tools such as `cp`, `grep`, `tar`,
|
||||||
|
|
|
@ -284,6 +284,10 @@ The points of interest are:
|
||||||
function is called with the `localServer` argument set to `true` but
|
function is called with the `localServer` argument set to `true` but
|
||||||
the `db4` argument set to `null`, then the evaluation fails.
|
the `db4` argument set to `null`, then the evaluation fails.
|
||||||
|
|
||||||
|
Note that `->` is the [logical
|
||||||
|
implication](https://en.wikipedia.org/wiki/Truth_table#Logical_implication)
|
||||||
|
Boolean operation.
|
||||||
|
|
||||||
2. This is a more subtle condition: if Subversion is built with Apache
|
2. This is a more subtle condition: if Subversion is built with Apache
|
||||||
(`httpServer`) support, then the Expat library (an XML library) used
|
(`httpServer`) support, then the Expat library (an XML library) used
|
||||||
by Subversion should be same as the one used by Apache. This is
|
by Subversion should be same as the one used by Apache. This is
|
||||||
|
|
|
@ -17,12 +17,12 @@ order of precedence (from strongest to weakest binding).
|
||||||
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
|
| String Concatenation | *string1* `+` *string2* | left | String concatenation. | 7 |
|
||||||
| Not | `!` *e* | none | Boolean negation. | 8 |
|
| Not | `!` *e* | none | Boolean negation. | 8 |
|
||||||
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
|
| Update | *e1* `//` *e2* | right | Return a set consisting of the attributes in *e1* and *e2* (with the latter taking precedence over the former in case of equally named attributes). | 9 |
|
||||||
| Less Than | *e1* `<` *e2*, | none | Arithmetic comparison. | 10 |
|
| Less Than | *e1* `<` *e2*, | none | Arithmetic/lexicographic comparison. | 10 |
|
||||||
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic comparison. | 10 |
|
| Less Than or Equal To | *e1* `<=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||||
| Greater Than | *e1* `>` *e2* | none | Arithmetic comparison. | 10 |
|
| Greater Than | *e1* `>` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||||
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic comparison. | 10 |
|
| Greater Than or Equal To | *e1* `>=` *e2* | none | Arithmetic/lexicographic comparison. | 10 |
|
||||||
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
|
| Equality | *e1* `==` *e2* | none | Equality. | 11 |
|
||||||
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
| Inequality | *e1* `!=` *e2* | none | Inequality. | 11 |
|
||||||
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
| Logical AND | *e1* `&&` *e2* | left | Logical AND. | 12 |
|
||||||
| Logical OR | *e1* `\|\|` *e2* | left | Logical OR. | 13 |
|
| Logical OR | *e1* <code>||</code> *e2* | left | Logical OR. | 13 |
|
||||||
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to `!e1 \|\| e2`). | 14 |
|
| Logical Implication | *e1* `->` *e2* | none | Logical implication (equivalent to <code>!e1 || e2</code>). | 14 |
|
||||||
|
|
|
@ -64,7 +64,7 @@ Nix has the following basic data types:
|
||||||
the start of each line. To be precise, it strips from each line a
|
the start of each line. To be precise, it strips from each line a
|
||||||
number of spaces equal to the minimal indentation of the string as a
|
number of spaces equal to the minimal indentation of the string as a
|
||||||
whole (disregarding the indentation of empty lines). For instance,
|
whole (disregarding the indentation of empty lines). For instance,
|
||||||
the first and second line are indented two space, while the third
|
the first and second line are indented two spaces, while the third
|
||||||
line is indented four spaces. Thus, two spaces are stripped from
|
line is indented four spaces. Thus, two spaces are stripped from
|
||||||
each line, so the resulting string is
|
each line, so the resulting string is
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Building and Testing
|
# Building and Testing
|
||||||
|
|
||||||
You can now try to build Hello. Of course, you could do `nix-env -i
|
You can now try to build Hello. Of course, you could do `nix-env -f . -iA
|
||||||
hello`, but you may not want to install a possibly broken package just
|
hello`, but you may not want to install a possibly broken package just
|
||||||
yet. The best way to test the package is by using the command
|
yet. The best way to test the package is by using the command
|
||||||
`nix-build`, which builds a Nix expression and creates a symlink named
|
`nix-build`, which builds a Nix expression and creates a symlink named
|
||||||
|
|
|
@ -47,7 +47,7 @@
|
||||||
the store object at `P` contains the path `Q` somewhere. The
|
the store object at `P` contains the path `Q` somewhere. The
|
||||||
*references* of a store path are the set of store paths to which it
|
*references* of a store path are the set of store paths to which it
|
||||||
has a reference.
|
has a reference.
|
||||||
|
|
||||||
A derivation can reference other derivations and sources (but not
|
A derivation can reference other derivations and sources (but not
|
||||||
output paths), whereas an output path only references other output
|
output paths), whereas an output path only references other output
|
||||||
paths.
|
paths.
|
||||||
|
@ -66,7 +66,7 @@
|
||||||
is necessary to deploy whole closures, since otherwise at runtime
|
is necessary to deploy whole closures, since otherwise at runtime
|
||||||
files could be missing. The command `nix-store -qR` prints out
|
files could be missing. The command `nix-store -qR` prints out
|
||||||
closures of store paths.
|
closures of store paths.
|
||||||
|
|
||||||
As an example, if the store object at path `P` contains a reference
|
As an example, if the store object at path `P` contains a reference
|
||||||
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
to path `Q`, then `Q` is in the closure of `P`. Further, if `Q`
|
||||||
references `R` then `R` is also in the closure of `P`.
|
references `R` then `R` is also in the closure of `P`.
|
||||||
|
@ -98,3 +98,7 @@
|
||||||
store. It can contain regular files, directories and symbolic
|
store. It can contain regular files, directories and symbolic
|
||||||
links. NARs are generated and unpacked using `nix-store --dump`
|
links. NARs are generated and unpacked using `nix-store --dump`
|
||||||
and `nix-store --restore`.
|
and `nix-store --restore`.
|
||||||
|
- `∅` \
|
||||||
|
The empty set symbol. In the context of profile history, this denotes a package is not present in a particular version of the profile.
|
||||||
|
- `ε` \
|
||||||
|
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
# Building Nix from Source
|
# Building Nix from Source
|
||||||
|
|
||||||
After unpacking or checking out the Nix sources, issue the following
|
After cloning Nix's Git repository, issue the following commands:
|
||||||
commands:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
$ ./bootstrap.sh
|
||||||
$ ./configure options...
|
$ ./configure options...
|
||||||
$ make
|
$ make
|
||||||
$ make install
|
$ make install
|
||||||
|
@ -11,13 +11,6 @@ $ make install
|
||||||
|
|
||||||
Nix requires GNU Make so you may need to invoke `gmake` instead.
|
Nix requires GNU Make so you may need to invoke `gmake` instead.
|
||||||
|
|
||||||
When building from the Git repository, these should be preceded by the
|
|
||||||
command:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ ./bootstrap.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
The installation path can be specified by passing the `--prefix=prefix`
|
The installation path can be specified by passing the `--prefix=prefix`
|
||||||
to `configure`. The default installation directory is `/usr/local`. You
|
to `configure`. The default installation directory is `/usr/local`. You
|
||||||
can change this to any location you like. You must have write permission
|
can change this to any location you like. You must have write permission
|
||||||
|
|
|
@ -40,7 +40,7 @@ export NIX_SSL_CERT_FILE=/etc/ssl/my-certificate-bundle.crt
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> You must not add the export and then do the install, as the Nix
|
> You must not add the export and then do the install, as the Nix
|
||||||
> installer will detect the presense of Nix configuration, and abort.
|
> installer will detect the presence of Nix configuration, and abort.
|
||||||
|
|
||||||
## `NIX_SSL_CERT_FILE` with macOS and the Nix daemon
|
## `NIX_SSL_CERT_FILE` with macOS and the Nix daemon
|
||||||
|
|
||||||
|
|
|
@ -119,6 +119,30 @@ this to run the installer, but it may help if you run into trouble:
|
||||||
- update `/etc/synthetic.conf` to direct macOS to create a "synthetic"
|
- update `/etc/synthetic.conf` to direct macOS to create a "synthetic"
|
||||||
empty root directory to mount your volume
|
empty root directory to mount your volume
|
||||||
- specify mount options for the volume in `/etc/fstab`
|
- specify mount options for the volume in `/etc/fstab`
|
||||||
|
- `rw`: read-write
|
||||||
|
- `noauto`: prevent the system from auto-mounting the volume (so the
|
||||||
|
LaunchDaemon mentioned below can control mounting it, and to avoid
|
||||||
|
masking problems with that mounting service).
|
||||||
|
- `nobrowse`: prevent the Nix Store volume from showing up on your
|
||||||
|
desktop; also keeps Spotlight from spending resources to index
|
||||||
|
this volume
|
||||||
|
<!-- TODO:
|
||||||
|
- `suid`: honor setuid? surely not? ...
|
||||||
|
- `owners`: honor file ownership on the volume
|
||||||
|
|
||||||
|
For now I'll avoid pretending to understand suid/owners more
|
||||||
|
than I do. There've been some vague reports of file-ownership
|
||||||
|
and permission issues, particularly in cloud/VM/headless setups.
|
||||||
|
My pet theory is that this has something to do with these setups
|
||||||
|
not having a token that gets delegated to initial/admin accounts
|
||||||
|
on macOS. See scripts/create-darwin-volume.sh for a little more.
|
||||||
|
|
||||||
|
In any case, by Dec 4 2021, it _seems_ like some combination of
|
||||||
|
suid, owners, and calling diskutil enableOwnership have stopped
|
||||||
|
new reports from coming in. But I hesitate to celebrate because we
|
||||||
|
haven't really named and catalogued the behavior, understood what
|
||||||
|
we're fixing, and validated that all 3 components are essential.
|
||||||
|
-->
|
||||||
- if you have FileVault enabled
|
- if you have FileVault enabled
|
||||||
- generate an encryption password
|
- generate an encryption password
|
||||||
- put it in your system Keychain
|
- put it in your system Keychain
|
||||||
|
|
59
doc/manual/src/installation/installing-docker.md
Normal file
59
doc/manual/src/installation/installing-docker.md
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
# Using Nix within Docker
|
||||||
|
|
||||||
|
To run the latest stable release of Nix with Docker run the following command:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ docker run -ti nixos/nix
|
||||||
|
Unable to find image 'nixos/nix:latest' locally
|
||||||
|
latest: Pulling from nixos/nix
|
||||||
|
5843afab3874: Pull complete
|
||||||
|
b52bf13f109c: Pull complete
|
||||||
|
1e2415612aa3: Pull complete
|
||||||
|
Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff
|
||||||
|
Status: Downloaded newer image for nixos/nix:latest
|
||||||
|
35ca4ada6e96:/# nix --version
|
||||||
|
nix (Nix) 2.3.12
|
||||||
|
35ca4ada6e96:/# exit
|
||||||
|
```
|
||||||
|
|
||||||
|
# What is included in Nix's Docker image?
|
||||||
|
|
||||||
|
The official Docker image is created using `pkgs.dockerTools.buildLayeredImage`
|
||||||
|
(and not with `Dockerfile` as it is usual with Docker images). You can still
|
||||||
|
base your custom Docker image on it as you would do with any other Docker
|
||||||
|
image.
|
||||||
|
|
||||||
|
The Docker image is also not based on any other image and includes minimal set
|
||||||
|
of runtime dependencies that are required to use Nix:
|
||||||
|
|
||||||
|
- pkgs.nix
|
||||||
|
- pkgs.bashInteractive
|
||||||
|
- pkgs.coreutils-full
|
||||||
|
- pkgs.gnutar
|
||||||
|
- pkgs.gzip
|
||||||
|
- pkgs.gnugrep
|
||||||
|
- pkgs.which
|
||||||
|
- pkgs.curl
|
||||||
|
- pkgs.less
|
||||||
|
- pkgs.wget
|
||||||
|
- pkgs.man
|
||||||
|
- pkgs.cacert.out
|
||||||
|
- pkgs.findutils
|
||||||
|
|
||||||
|
# Docker image with the latest development version of Nix
|
||||||
|
|
||||||
|
To get the latest image that was built by [Hydra](https://hydra.nixos.org) run
|
||||||
|
the following command:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ curl -L https://hydra.nixos.org/job/nix/master/dockerImage.x86_64-linux/latest/download/1 | docker load
|
||||||
|
$ docker run -ti nix:2.5pre20211105
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also build a Docker image from source yourself:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix build ./\#hydraJobs.dockerImage.x86_64-linux
|
||||||
|
$ docker load -i ./result/image.tar.gz
|
||||||
|
$ docker run -ti nix:2.5pre20211105
|
||||||
|
```
|
|
@ -1,4 +1,4 @@
|
||||||
# Installing Nix from Source
|
# Installing Nix from Source
|
||||||
|
|
||||||
If no binary package is available, you can download and compile a source
|
If no binary package is available or if you want to hack on Nix, you
|
||||||
distribution.
|
can build Nix from its Git repository.
|
||||||
|
|
|
@ -1,14 +1,9 @@
|
||||||
# Obtaining a Source Distribution
|
# Obtaining the Source
|
||||||
|
|
||||||
The source tarball of the most recent stable release can be downloaded
|
The most recent sources of Nix can be obtained from its [Git
|
||||||
from the [Nix homepage](http://nixos.org/nix/download.html). You can
|
repository](https://github.com/NixOS/nix). For example, the following
|
||||||
also grab the [most recent development
|
command will check out the latest revision into a directory called
|
||||||
release](http://hydra.nixos.org/job/nix/master/release/latest-finished#tabs-constituents).
|
`nix`:
|
||||||
|
|
||||||
Alternatively, the most recent sources of Nix can be obtained from its
|
|
||||||
[Git repository](https://github.com/NixOS/nix). For example, the
|
|
||||||
following command will check out the latest revision into a directory
|
|
||||||
called `nix`:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ git clone https://github.com/NixOS/nix
|
$ git clone https://github.com/NixOS/nix
|
||||||
|
|
|
@ -2,9 +2,8 @@
|
||||||
|
|
||||||
- GNU Autoconf (<https://www.gnu.org/software/autoconf/>) and the
|
- GNU Autoconf (<https://www.gnu.org/software/autoconf/>) and the
|
||||||
autoconf-archive macro collection
|
autoconf-archive macro collection
|
||||||
(<https://www.gnu.org/software/autoconf-archive/>). These are only
|
(<https://www.gnu.org/software/autoconf-archive/>). These are
|
||||||
needed to run the bootstrap script, and are not necessary if your
|
needed to run the bootstrap script.
|
||||||
source distribution came with a pre-built `./configure` script.
|
|
||||||
|
|
||||||
- GNU Make.
|
- GNU Make.
|
||||||
|
|
||||||
|
@ -45,6 +44,11 @@
|
||||||
obtained from the its repository
|
obtained from the its repository
|
||||||
<https://github.com/troglobit/editline>.
|
<https://github.com/troglobit/editline>.
|
||||||
|
|
||||||
|
- The `libsodium` library for verifying cryptographic signatures
|
||||||
|
of contents fetched from binary caches.
|
||||||
|
It can be obtained from the official web site
|
||||||
|
<https://libsodium.org>.
|
||||||
|
|
||||||
- Recent versions of Bison and Flex to build the parser. (This is
|
- Recent versions of Bison and Flex to build the parser. (This is
|
||||||
because Nix needs GLR support in Bison and reentrancy support in
|
because Nix needs GLR support in Bison and reentrancy support in
|
||||||
Flex.) For Bison, you need version 2.6, which can be obtained from
|
Flex.) For Bison, you need version 2.6, which can be obtained from
|
||||||
|
@ -52,11 +56,18 @@
|
||||||
you need version 2.5.35, which is available on
|
you need version 2.5.35, which is available on
|
||||||
[SourceForge](http://lex.sourceforge.net/). Slightly older versions
|
[SourceForge](http://lex.sourceforge.net/). Slightly older versions
|
||||||
may also work, but ancient versions like the ubiquitous 2.5.4a
|
may also work, but ancient versions like the ubiquitous 2.5.4a
|
||||||
won't. Note that these are only required if you modify the parser or
|
won't.
|
||||||
when you are building from the Git repository.
|
|
||||||
|
|
||||||
- The `libseccomp` is used to provide syscall filtering on Linux. This
|
- The `libseccomp` is used to provide syscall filtering on Linux. This
|
||||||
is an optional dependency and can be disabled passing a
|
is an optional dependency and can be disabled passing a
|
||||||
`--disable-seccomp-sandboxing` option to the `configure` script (Not
|
`--disable-seccomp-sandboxing` option to the `configure` script (Not
|
||||||
recommended unless your system doesn't support `libseccomp`). To get
|
recommended unless your system doesn't support `libseccomp`). To get
|
||||||
the library, visit <https://github.com/seccomp/libseccomp>.
|
the library, visit <https://github.com/seccomp/libseccomp>.
|
||||||
|
|
||||||
|
- On 64-bit x86 machines only, `libcpuid` library
|
||||||
|
is used to determine which microarchitecture levels are supported
|
||||||
|
(e.g., as whether to have `x86_64-v2-linux` among additional system types).
|
||||||
|
The library is available from its homepage
|
||||||
|
<http://libcpuid.sourceforge.net>.
|
||||||
|
This is an optional dependency and can be disabled
|
||||||
|
by providing a `--disable-cpuid` to the `configure` script.
|
||||||
|
|
|
@ -4,4 +4,4 @@ Nix is currently supported on the following platforms:
|
||||||
|
|
||||||
- Linux (i686, x86\_64, aarch64).
|
- Linux (i686, x86\_64, aarch64).
|
||||||
|
|
||||||
- macOS (x86\_64).
|
- macOS (x86\_64, aarch64).
|
||||||
|
|
|
@ -76,7 +76,7 @@ there after an upgrade. This means that you can _roll back_ to the
|
||||||
old version:
|
old version:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --upgrade some-packages
|
$ nix-env --upgrade -A nixpkgs.some-package
|
||||||
$ nix-env --rollback
|
$ nix-env --rollback
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -122,12 +122,12 @@ Nix expressions generally describe how to build a package from
|
||||||
source, so an installation action like
|
source, so an installation action like
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install firefox
|
$ nix-env --install -A nixpkgs.firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
_could_ cause quite a bit of build activity, as not only Firefox but
|
_could_ cause quite a bit of build activity, as not only Firefox but
|
||||||
also all its dependencies (all the way up to the C library and the
|
also all its dependencies (all the way up to the C library and the
|
||||||
compiler) would have to built, at least if they are not already in the
|
compiler) would have to be built, at least if they are not already in the
|
||||||
Nix store. This is a _source deployment model_. For most users,
|
Nix store. This is a _source deployment model_. For most users,
|
||||||
building from source is not very pleasant as it takes far too long.
|
building from source is not very pleasant as it takes far too long.
|
||||||
However, Nix can automatically skip building from source and instead
|
However, Nix can automatically skip building from source and instead
|
||||||
|
|
|
@ -24,7 +24,7 @@ collection; you could write your own Nix expressions based on Nixpkgs,
|
||||||
or completely new ones.)
|
or completely new ones.)
|
||||||
|
|
||||||
You can manually download the latest version of Nixpkgs from
|
You can manually download the latest version of Nixpkgs from
|
||||||
<http://nixos.org/nixpkgs/download.html>. However, it’s much more
|
<https://github.com/NixOS/nixpkgs>. However, it’s much more
|
||||||
convenient to use the Nixpkgs [*channel*](channels.md), since it makes
|
convenient to use the Nixpkgs [*channel*](channels.md), since it makes
|
||||||
it easy to stay up to date with new versions of Nixpkgs. Nixpkgs is
|
it easy to stay up to date with new versions of Nixpkgs. Nixpkgs is
|
||||||
automatically added to your list of “subscribed” channels when you
|
automatically added to your list of “subscribed” channels when you
|
||||||
|
@ -40,48 +40,52 @@ $ nix-channel --update
|
||||||
>
|
>
|
||||||
> On NixOS, you’re automatically subscribed to a NixOS channel
|
> On NixOS, you’re automatically subscribed to a NixOS channel
|
||||||
> corresponding to your NixOS major release (e.g.
|
> corresponding to your NixOS major release (e.g.
|
||||||
> <http://nixos.org/channels/nixos-14.12>). A NixOS channel is identical
|
> <http://nixos.org/channels/nixos-21.11>). A NixOS channel is identical
|
||||||
> to the Nixpkgs channel, except that it contains only Linux binaries
|
> to the Nixpkgs channel, except that it contains only Linux binaries
|
||||||
> and is updated only if a set of regression tests succeed.
|
> and is updated only if a set of regression tests succeed.
|
||||||
|
|
||||||
You can view the set of available packages in Nixpkgs:
|
You can view the set of available packages in Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa
|
$ nix-env -qaP
|
||||||
aterm-2.2
|
nixpkgs.aterm aterm-2.2
|
||||||
bash-3.0
|
nixpkgs.bash bash-3.0
|
||||||
binutils-2.15
|
nixpkgs.binutils binutils-2.15
|
||||||
bison-1.875d
|
nixpkgs.bison bison-1.875d
|
||||||
blackdown-1.4.2
|
nixpkgs.blackdown blackdown-1.4.2
|
||||||
bzip2-1.0.2
|
nixpkgs.bzip2 bzip2-1.0.2
|
||||||
…
|
…
|
||||||
```
|
```
|
||||||
|
|
||||||
The flag `-q` specifies a query operation, and `-a` means that you want
|
The flag `-q` specifies a query operation, `-a` means that you want
|
||||||
to show the “available” (i.e., installable) packages, as opposed to the
|
to show the “available” (i.e., installable) packages, as opposed to the
|
||||||
installed packages. If you downloaded Nixpkgs yourself, or if you
|
installed packages, and `-P` prints the attribute paths that can be used
|
||||||
checked it out from GitHub, then you need to pass the path to your
|
to unambiguously select a package for installation (listed in the first column).
|
||||||
Nixpkgs tree using the `-f` flag:
|
If you downloaded Nixpkgs yourself, or if you checked it out from GitHub,
|
||||||
|
then you need to pass the path to your Nixpkgs tree using the `-f` flag:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qaf /path/to/nixpkgs
|
$ nix-env -qaPf /path/to/nixpkgs
|
||||||
|
aterm aterm-2.2
|
||||||
|
bash bash-3.0
|
||||||
|
…
|
||||||
```
|
```
|
||||||
|
|
||||||
where */path/to/nixpkgs* is where you’ve unpacked or checked out
|
where */path/to/nixpkgs* is where you’ve unpacked or checked out
|
||||||
Nixpkgs.
|
Nixpkgs.
|
||||||
|
|
||||||
You can select specific packages by name:
|
You can filter the packages by name:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa firefox
|
$ nix-env -qaP firefox
|
||||||
firefox-34.0.5
|
nixpkgs.firefox-esr firefox-91.3.0esr
|
||||||
firefox-with-plugins-34.0.5
|
nixpkgs.firefox firefox-94.0.1
|
||||||
```
|
```
|
||||||
|
|
||||||
and using regular expressions:
|
and using regular expressions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa 'firefox.*'
|
$ nix-env -qaP 'firefox.*'
|
||||||
```
|
```
|
||||||
|
|
||||||
It is also possible to see the *status* of available packages, i.e.,
|
It is also possible to see the *status* of available packages, i.e.,
|
||||||
|
@ -89,11 +93,11 @@ whether they are installed into the user environment and/or present in
|
||||||
the system:
|
the system:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qas
|
$ nix-env -qaPs
|
||||||
…
|
…
|
||||||
-PS bash-3.0
|
-PS nixpkgs.bash bash-3.0
|
||||||
--S binutils-2.15
|
--S nixpkgs.binutils binutils-2.15
|
||||||
IPS bison-1.875d
|
IPS nixpkgs.bison bison-1.875d
|
||||||
…
|
…
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -106,13 +110,13 @@ which is Nix’s mechanism for doing binary deployment. It just means that
|
||||||
Nix knows that it can fetch a pre-built package from somewhere
|
Nix knows that it can fetch a pre-built package from somewhere
|
||||||
(typically a network server) instead of building it locally.
|
(typically a network server) instead of building it locally.
|
||||||
|
|
||||||
You can install a package using `nix-env -i`. For instance,
|
You can install a package using `nix-env -iA`. For instance,
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i subversion
|
$ nix-env -iA nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
will install the package called `subversion` (which is, of course, the
|
will install the package called `subversion` from `nixpkgs` channel (which is, of course, the
|
||||||
[Subversion version management system](http://subversion.tigris.org/)).
|
[Subversion version management system](http://subversion.tigris.org/)).
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
|
@ -122,7 +126,7 @@ will install the package called `subversion` (which is, of course, the
|
||||||
> binary cache <https://cache.nixos.org>; it contains binaries for most
|
> binary cache <https://cache.nixos.org>; it contains binaries for most
|
||||||
> packages in Nixpkgs. Only if no binary is available in the binary
|
> packages in Nixpkgs. Only if no binary is available in the binary
|
||||||
> cache, Nix will build the package from source. So if `nix-env
|
> cache, Nix will build the package from source. So if `nix-env
|
||||||
> -i subversion` results in Nix building stuff from source, then either
|
> -iA nixpkgs.subversion` results in Nix building stuff from source, then either
|
||||||
> the package is not built for your platform by the Nixpkgs build
|
> the package is not built for your platform by the Nixpkgs build
|
||||||
> servers, or your version of Nixpkgs is too old or too new. For
|
> servers, or your version of Nixpkgs is too old or too new. For
|
||||||
> instance, if you have a very recent checkout of Nixpkgs, then the
|
> instance, if you have a very recent checkout of Nixpkgs, then the
|
||||||
|
@ -133,7 +137,10 @@ will install the package called `subversion` (which is, of course, the
|
||||||
> using a Git checkout of the Nixpkgs tree), you will get binaries for
|
> using a Git checkout of the Nixpkgs tree), you will get binaries for
|
||||||
> most packages.
|
> most packages.
|
||||||
|
|
||||||
Naturally, packages can also be uninstalled:
|
Naturally, packages can also be uninstalled. Unlike when installing, you will
|
||||||
|
need to use the derivation name (though the version part can be omitted),
|
||||||
|
instead of the attribute path, as `nix-env` does not record which attribute
|
||||||
|
was used for installing:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -e subversion
|
$ nix-env -e subversion
|
||||||
|
@ -143,7 +150,7 @@ Upgrading to a new version is just as easy. If you have a new release of
|
||||||
Nix Packages, you can do:
|
Nix Packages, you can do:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -u subversion
|
$ nix-env -uA nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
This will *only* upgrade Subversion if there is a “newer” version in the
|
This will *only* upgrade Subversion if there is a “newer” version in the
|
||||||
|
|
|
@ -9,7 +9,7 @@ The daemon that handles binary cache requests via HTTP, `nix-serve`, is
|
||||||
not part of the Nix distribution, but you can install it from Nixpkgs:
|
not part of the Nix distribution, but you can install it from Nixpkgs:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i nix-serve
|
$ nix-env -iA nixpkgs.nix-serve
|
||||||
```
|
```
|
||||||
|
|
||||||
You can then start the server, listening for HTTP connections on
|
You can then start the server, listening for HTTP connections on
|
||||||
|
@ -35,7 +35,7 @@ On the client side, you can tell Nix to use your binary cache using
|
||||||
`--option extra-binary-caches`, e.g.:
|
`--option extra-binary-caches`, e.g.:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i firefox --option extra-binary-caches http://avalon:8080/
|
$ nix-env -iA nixpkgs.firefox --option extra-binary-caches http://avalon:8080/
|
||||||
```
|
```
|
||||||
|
|
||||||
The option `extra-binary-caches` tells Nix to use this binary cache in
|
The option `extra-binary-caches` tells Nix to use this binary cache in
|
||||||
|
|
|
@ -44,7 +44,7 @@ collector as follows:
|
||||||
$ nix-store --gc
|
$ nix-store --gc
|
||||||
```
|
```
|
||||||
|
|
||||||
The behaviour of the gargage collector is affected by the
|
The behaviour of the garbage collector is affected by the
|
||||||
`keep-derivations` (default: true) and `keep-outputs` (default: false)
|
`keep-derivations` (default: true) and `keep-outputs` (default: false)
|
||||||
options in the Nix configuration file. The defaults will ensure that all
|
options in the Nix configuration file. The defaults will ensure that all
|
||||||
derivations that are build-time dependencies of garbage collector roots
|
derivations that are build-time dependencies of garbage collector roots
|
||||||
|
|
|
@ -39,7 +39,7 @@ just Subversion 1.1.2 (arrows in the figure indicate symlinks). This
|
||||||
would be what we would obtain if we had done
|
would be what we would obtain if we had done
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i subversion
|
$ nix-env -iA nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
on a set of Nix expressions that contained Subversion 1.1.2.
|
on a set of Nix expressions that contained Subversion 1.1.2.
|
||||||
|
@ -54,7 +54,7 @@ environment is generated based on the current one. For instance,
|
||||||
generation 43 was created from generation 42 when we did
|
generation 43 was created from generation 42 when we did
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i subversion firefox
|
$ nix-env -iA nixpkgs.subversion nixpkgs.firefox
|
||||||
```
|
```
|
||||||
|
|
||||||
on a set of Nix expressions that contained Firefox and a new version of
|
on a set of Nix expressions that contained Firefox and a new version of
|
||||||
|
@ -127,7 +127,7 @@ All `nix-env` operations work on the profile pointed to by
|
||||||
(abbreviation `-p`):
|
(abbreviation `-p`):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -p /nix/var/nix/profiles/other-profile -i subversion
|
$ nix-env -p /nix/var/nix/profiles/other-profile -iA nixpkgs.subversion
|
||||||
```
|
```
|
||||||
|
|
||||||
This will *not* change the `~/.nix-profile` symlink.
|
This will *not* change the `~/.nix-profile` symlink.
|
||||||
|
|
|
@ -6,7 +6,7 @@ automatically fetching any store paths in Firefox’s closure if they are
|
||||||
available on the server `avalon`:
|
available on the server `avalon`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i firefox --substituters ssh://alice@avalon
|
$ nix-env -iA nixpkgs.firefox --substituters ssh://alice@avalon
|
||||||
```
|
```
|
||||||
|
|
||||||
This works similar to the binary cache substituter that Nix usually
|
This works similar to the binary cache substituter that Nix usually
|
||||||
|
|
|
@ -19,19 +19,19 @@ to subsequent chapters.
|
||||||
channel:
|
channel:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -qa
|
$ nix-env -qaP
|
||||||
docbook-xml-4.3
|
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
||||||
docbook-xml-4.5
|
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
||||||
firefox-33.0.2
|
nixpkgs.firefox firefox-33.0.2
|
||||||
hello-2.9
|
nixpkgs.hello hello-2.9
|
||||||
libxslt-1.1.28
|
nixpkgs.libxslt libxslt-1.1.28
|
||||||
…
|
…
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Install some packages from the channel:
|
1. Install some packages from the channel:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env -i hello
|
$ nix-env -iA nixpkgs.hello
|
||||||
```
|
```
|
||||||
|
|
||||||
This should download pre-built packages; it should not build them
|
This should download pre-built packages; it should not build them
|
||||||
|
|
|
@ -1,8 +1,542 @@
|
||||||
# Release 2.4 (202X-XX-XX)
|
# Release 2.4 (2021-11-01)
|
||||||
|
|
||||||
- It is now an error to modify the `plugin-files` setting via a
|
This is the first release in more than two years and is the result of
|
||||||
command-line flag that appears after the first non-flag argument
|
more than 2800 commits from 195 contributors since release 2.3.
|
||||||
to any command, including a subcommand to `nix`. For example,
|
|
||||||
`nix-instantiate default.nix --plugin-files ""` must now become
|
## Highlights
|
||||||
`nix-instantiate --plugin-files "" default.nix`.
|
|
||||||
- Plugins that add new `nix` subcommands are now actually respected.
|
* Nix's **error messages** have been improved a lot. For instance,
|
||||||
|
evaluation errors now point out the location of the error:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ nix build
|
||||||
|
error: undefined variable 'bzip3'
|
||||||
|
|
||||||
|
at /nix/store/449lv242z0zsgwv95a8124xi11sp419f-source/flake.nix:88:13:
|
||||||
|
|
||||||
|
87| [ curl
|
||||||
|
88| bzip3 xz brotli editline
|
||||||
|
| ^
|
||||||
|
89| openssl sqlite
|
||||||
|
```
|
||||||
|
|
||||||
|
* The **`nix` command** has seen a lot of work and is now almost at
|
||||||
|
feature parity with the old command-line interface (the `nix-*`
|
||||||
|
commands). It aims to be [more modern, consistent and pleasant to
|
||||||
|
use](../contributing/cli-guideline.md) than the old CLI. It is still
|
||||||
|
marked as experimental but its interface should not change much
|
||||||
|
anymore in future releases.
|
||||||
|
|
||||||
|
* **Flakes** are a new format to package Nix-based projects in a more
|
||||||
|
discoverable, composable, consistent and reproducible way. A flake
|
||||||
|
is just a repository or tarball containing a file named `flake.nix`
|
||||||
|
that specifies dependencies on other flakes and returns any Nix
|
||||||
|
assets such as packages, Nixpkgs overlays, NixOS modules or CI
|
||||||
|
tests. The new `nix` CLI is primarily based around flakes; for
|
||||||
|
example, a command like `nix run nixpkgs#hello` runs the `hello`
|
||||||
|
application from the `nixpkgs` flake.
|
||||||
|
|
||||||
|
Flakes are currently marked as experimental. For an introduction,
|
||||||
|
see [this blog
|
||||||
|
post](https://www.tweag.io/blog/2020-05-25-flakes/). For detailed
|
||||||
|
information about flake syntax and semantics, see the [`nix flake`
|
||||||
|
manual page](../command-ref/new-cli/nix3-flake.md).
|
||||||
|
|
||||||
|
* Nix's store can now be **content-addressed**, meaning that the hash
|
||||||
|
component of a store path is the hash of the path's
|
||||||
|
contents. Previously Nix could only build **input-addressed** store
|
||||||
|
paths, where the hash is computed from the derivation dependency
|
||||||
|
graph. Content-addressing allows deduplication, early cutoff in
|
||||||
|
build systems, and unprivileged closure copying. This is still [an
|
||||||
|
experimental
|
||||||
|
feature](https://discourse.nixos.org/t/content-addressed-nix-call-for-testers/12881).
|
||||||
|
|
||||||
|
* The Nix manual has been converted into Markdown, making it easier to
|
||||||
|
contribute. In addition, every `nix` subcommand now has a manual
|
||||||
|
page, documenting every option.
|
||||||
|
|
||||||
|
* A new setting that allows **experimental features** to be enabled
|
||||||
|
selectively. This allows us to merge unstable features into Nix more
|
||||||
|
quickly and do more frequent releases.
|
||||||
|
|
||||||
|
## Other features
|
||||||
|
|
||||||
|
* There are many new `nix` subcommands:
|
||||||
|
|
||||||
|
- `nix develop` is intended to replace `nix-shell`. It has a number
|
||||||
|
of new features:
|
||||||
|
|
||||||
|
* It automatically sets the output environment variables (such as
|
||||||
|
`$out`) to writable locations (such as `./outputs/out`).
|
||||||
|
|
||||||
|
* It can store the environment in a profile. This is useful for
|
||||||
|
offline work.
|
||||||
|
|
||||||
|
* It can run specific phases directly. For instance, `nix develop
|
||||||
|
--build` runs `buildPhase`.
|
||||||
|
|
||||||
|
- It allows dependencies in the Nix store to be "redirected" to
|
||||||
|
arbitrary directories using the `--redirect` flag. This is
|
||||||
|
useful if you want to hack on a package *and* some of its
|
||||||
|
dependencies at the same time.
|
||||||
|
|
||||||
|
- `nix print-dev-env` prints the environment variables and bash
|
||||||
|
functions defined by a derivation. This is useful for users of
|
||||||
|
other shells than bash (especially with `--json`).
|
||||||
|
|
||||||
|
- `nix shell` was previously named `nix run` and is intended to
|
||||||
|
replace `nix-shell -p`, but without the `stdenv` overhead. It
|
||||||
|
simply starts a shell where some packages have been added to
|
||||||
|
`$PATH`.
|
||||||
|
|
||||||
|
- `nix run` (not to be confused with the old subcommand that has
|
||||||
|
been renamed to `nix shell`) runs an "app", a flake output that
|
||||||
|
specifies a command to run, or an eponymous program from a
|
||||||
|
package. For example, `nix run nixpkgs#hello` runs the `hello`
|
||||||
|
program from the `hello` package in `nixpkgs`.
|
||||||
|
|
||||||
|
- `nix flake` is the container for flake-related operations, such as
|
||||||
|
creating a new flake, querying the contents of a flake or updating
|
||||||
|
flake lock files.
|
||||||
|
|
||||||
|
- `nix registry` allows you to query and update the flake registry,
|
||||||
|
which maps identifiers such as `nixpkgs` to concrete flake URLs.
|
||||||
|
|
||||||
|
- `nix profile` is intended to replace `nix-env`. Its main advantage
|
||||||
|
is that it keeps track of the provenance of installed packages
|
||||||
|
(e.g. exactly which flake version a package came from). It also
|
||||||
|
has some helpful subcommands:
|
||||||
|
|
||||||
|
* `nix profile history` shows what packages were added, upgraded
|
||||||
|
or removed between each version of a profile.
|
||||||
|
|
||||||
|
* `nix profile diff-closures` shows the changes between the
|
||||||
|
closures of each version of a profile. This allows you to
|
||||||
|
discover the addition or removal of dependencies or size
|
||||||
|
changes.
|
||||||
|
|
||||||
|
**Warning**: after a profile has been updated using `nix profile`,
|
||||||
|
it is no longer usable with `nix-env`.
|
||||||
|
|
||||||
|
- `nix store diff-closures` shows the differences between the
|
||||||
|
closures of two store paths in terms of the versions and sizes of
|
||||||
|
dependencies in the closures.
|
||||||
|
|
||||||
|
- `nix store make-content-addressable` rewrites an arbitrary closure
|
||||||
|
to make it content-addressed. Such paths can be copied into other
|
||||||
|
stores without requiring signatures.
|
||||||
|
|
||||||
|
- `nix bundle` uses the [`nix-bundle`
|
||||||
|
program](https://github.com/matthewbauer/nix-bundle) to convert a
|
||||||
|
closure into a self-extracting executable.
|
||||||
|
|
||||||
|
- Various other replacements for the old CLI, e.g. `nix store gc`,
|
||||||
|
`nix store delete`, `nix store repair`, `nix nar dump-path`, `nix
|
||||||
|
store prefetch-file`, `nix store prefetch-tarball`, `nix key` and
|
||||||
|
`nix daemon`.
|
||||||
|
|
||||||
|
* Nix now has an **evaluation cache** for flake outputs. For example,
|
||||||
|
a second invocation of the command `nix run nixpkgs#firefox` will
|
||||||
|
not need to evaluate the `firefox` attribute because it's already in
|
||||||
|
the evaluation cache. This is made possible by the hermetic
|
||||||
|
evaluation model of flakes.
|
||||||
|
|
||||||
|
* The new `--offline` flag disables substituters and causes all
|
||||||
|
locally cached tarballs and repositories to be considered
|
||||||
|
up-to-date.
|
||||||
|
|
||||||
|
* The new `--refresh` flag causes all locally cached tarballs and
|
||||||
|
repositories to be considered out-of-date.
|
||||||
|
|
||||||
|
* Many `nix` subcommands now have a `--json` option to produce
|
||||||
|
machine-readable output.
|
||||||
|
|
||||||
|
* `nix repl` has a new `:doc` command to show documentation about
|
||||||
|
builtin functions (e.g. `:doc builtins.map`).
|
||||||
|
|
||||||
|
* Binary cache stores now have an option `index-debug-info` to create
|
||||||
|
an index of DWARF debuginfo files for use by
|
||||||
|
[`dwarffs`](https://github.com/edolstra/dwarffs).
|
||||||
|
|
||||||
|
* To support flakes, Nix now has an extensible mechanism for fetching
|
||||||
|
source trees. Currently it has the following backends:
|
||||||
|
|
||||||
|
* Git repositories
|
||||||
|
|
||||||
|
* Mercurial repositories
|
||||||
|
|
||||||
|
* GitHub and GitLab repositories (an optimisation for faster
|
||||||
|
fetching than Git)
|
||||||
|
|
||||||
|
* Tarballs
|
||||||
|
|
||||||
|
* Arbitrary directories
|
||||||
|
|
||||||
|
The fetcher infrastructure is exposed via flake input specifications
|
||||||
|
and via the `fetchTree` built-in.
|
||||||
|
|
||||||
|
* **Languages changes**: the only new language feature is that you can
|
||||||
|
now have antiquotations in paths, e.g. `./${foo}` instead of `./. +
|
||||||
|
foo`.
|
||||||
|
|
||||||
|
* **New built-in functions**:
|
||||||
|
|
||||||
|
- `builtins.fetchTree` allows fetching a source tree using any
|
||||||
|
backends supported by the fetcher infrastructure. It subsumes the
|
||||||
|
functionality of existing built-ins like `fetchGit`,
|
||||||
|
`fetchMercurial` and `fetchTarball`.
|
||||||
|
|
||||||
|
- `builtins.getFlake` fetches a flake and returns its output
|
||||||
|
attributes. This function should not be used inside flakes! Use
|
||||||
|
flake inputs instead.
|
||||||
|
|
||||||
|
- `builtins.floor` and `builtins.ceil` round a floating-point number
|
||||||
|
down and up, respectively.
|
||||||
|
|
||||||
|
* Experimental support for recursive Nix. This means that Nix
|
||||||
|
derivations can now call Nix to build other derivations. This is not
|
||||||
|
in a stable state yet and not well
|
||||||
|
[documented](https://github.com/NixOS/nix/commit/c4d7c76b641d82b2696fef73ce0ac160043c18da).
|
||||||
|
|
||||||
|
* The new experimental feature `no-url-literals` disables URL
|
||||||
|
literals. This helps to implement [RFC
|
||||||
|
45](https://github.com/NixOS/rfcs/pull/45).
|
||||||
|
|
||||||
|
* Nix now uses `libarchive` to decompress and unpack tarballs and zip
|
||||||
|
files, so `tar` is no longer required.
|
||||||
|
|
||||||
|
* The priority of substituters can now be overridden using the
|
||||||
|
`priority` substituter setting (e.g. `--substituters
|
||||||
|
'http://cache.nixos.org?priority=100 daemon?priority=10'`).
|
||||||
|
|
||||||
|
* `nix edit` now supports non-derivation attributes, e.g. `nix edit
|
||||||
|
.#nixosConfigurations.bla`.
|
||||||
|
|
||||||
|
* The `nix` command now provides command line completion for `bash`,
|
||||||
|
`zsh` and `fish`. Since the support for getting completions is built
|
||||||
|
into `nix`, it's easy to add support for other shells.
|
||||||
|
|
||||||
|
* The new `--log-format` flag selects what Nix's output looks like. It
|
||||||
|
defaults to a terse progress indicator. There is a new
|
||||||
|
`internal-json` output format for use by other programs.
|
||||||
|
|
||||||
|
* `nix eval` has a new `--apply` flag that applies a function to the
|
||||||
|
evaluation result.
|
||||||
|
|
||||||
|
* `nix eval` has a new `--write-to` flag that allows it to write a
|
||||||
|
nested attribute set of string leaves to a corresponding directory
|
||||||
|
tree.
|
||||||
|
|
||||||
|
* Memory improvements: many operations that add paths to the store or
|
||||||
|
copy paths between stores now run in constant memory.
|
||||||
|
|
||||||
|
* Many `nix` commands now support the flag `--derivation` to operate
|
||||||
|
on a `.drv` file itself instead of its outputs.
|
||||||
|
|
||||||
|
* There is a new store called `dummy://` that does not support
|
||||||
|
building or adding paths. This is useful if you want to use the Nix
|
||||||
|
evaluator but don't have a Nix store.
|
||||||
|
|
||||||
|
* The `ssh-ng://` store now allows substituting paths on the remote,
|
||||||
|
as `ssh://` already did.
|
||||||
|
|
||||||
|
* When auto-calling a function with an ellipsis, all arguments are now
|
||||||
|
passed.
|
||||||
|
|
||||||
|
* New `nix-shell` features:
|
||||||
|
|
||||||
|
- It preserves the `PS1` environment variable if
|
||||||
|
`NIX_SHELL_PRESERVE_PROMPT` is set.
|
||||||
|
|
||||||
|
- With `-p`, it passes any `--arg`s as Nixpkgs arguments.
|
||||||
|
|
||||||
|
- Support for structured attributes.
|
||||||
|
|
||||||
|
* `nix-prefetch-url` has a new `--executable` flag.
|
||||||
|
|
||||||
|
* On `x86_64` systems, [`x86_64` microarchitecture
|
||||||
|
levels](https://lwn.net/Articles/844831/) are mapped to additional
|
||||||
|
system types (e.g. `x86_64-v1-linux`).
|
||||||
|
|
||||||
|
* The new `--eval-store` flag allows you to use a different store for
|
||||||
|
evaluation than for building or storing the build result. This is
|
||||||
|
primarily useful when you want to query whether something exists in
|
||||||
|
a read-only store, such as a binary cache:
|
||||||
|
|
||||||
|
```
|
||||||
|
# nix path-info --json --store https://cache.nixos.org \
|
||||||
|
--eval-store auto nixpkgs#hello
|
||||||
|
```
|
||||||
|
|
||||||
|
(Here `auto` indicates the local store.)
|
||||||
|
|
||||||
|
* The Nix daemon has a new low-latency mechanism for copying
|
||||||
|
closures. This is useful when building on remote stores such as
|
||||||
|
`ssh-ng://`.
|
||||||
|
|
||||||
|
* Plugins can now register `nix` subcommands.
|
||||||
|
|
||||||
|
* The `--indirect` flag to `nix-store --add-root` has become a no-op.
|
||||||
|
`--add-root` will always generate indirect GC roots from now on.
|
||||||
|
|
||||||
|
## Incompatible changes
|
||||||
|
|
||||||
|
* The `nix` command is now marked as an experimental feature. This
|
||||||
|
means that you need to add
|
||||||
|
|
||||||
|
```
|
||||||
|
experimental-features = nix-command
|
||||||
|
```
|
||||||
|
|
||||||
|
to your `nix.conf` if you want to use it, or pass
|
||||||
|
`--extra-experimental-features nix-command` on the command line.
|
||||||
|
|
||||||
|
* The `nix` command no longer has a syntax for referring to packages
|
||||||
|
in a channel. This means that the following no longer works:
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix build nixpkgs.hello # Nix 2.3
|
||||||
|
```
|
||||||
|
|
||||||
|
Instead, you can either use the `#` syntax to select a package from
|
||||||
|
a flake, e.g.
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix build nixpkgs#hello
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, if you want to use the `nixpkgs` channel in the `NIX_PATH`
|
||||||
|
environment variable:
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix build -f '<nixpkgs>' hello
|
||||||
|
```
|
||||||
|
|
||||||
|
* The old `nix run` has been renamed to `nix shell`, while there is a
|
||||||
|
new `nix run` that runs a default command. So instead of
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix run nixpkgs.hello -c hello # Nix 2.3
|
||||||
|
```
|
||||||
|
|
||||||
|
you should use
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix shell nixpkgs#hello -c hello
|
||||||
|
```
|
||||||
|
|
||||||
|
or just
|
||||||
|
|
||||||
|
```console
|
||||||
|
nix run nixpkgs#hello
|
||||||
|
```
|
||||||
|
|
||||||
|
if the command you want to run has the same name as the package.
|
||||||
|
|
||||||
|
* It is now an error to modify the `plugin-files` setting via a
|
||||||
|
command-line flag that appears after the first non-flag argument to
|
||||||
|
any command, including a subcommand to `nix`. For example,
|
||||||
|
`nix-instantiate default.nix --plugin-files ""` must now become
|
||||||
|
`nix-instantiate --plugin-files "" default.nix`.
|
||||||
|
|
||||||
|
* We no longer release source tarballs. If you want to build from
|
||||||
|
source, please build from the tags in the Git repository.
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
|
||||||
|
This release has contributions from
|
||||||
|
Adam Höse,
|
||||||
|
Albert Safin,
|
||||||
|
Alex Kovar,
|
||||||
|
Alex Zero,
|
||||||
|
Alexander Bantyev,
|
||||||
|
Alexandre Esteves,
|
||||||
|
Alyssa Ross,
|
||||||
|
Anatole Lucet,
|
||||||
|
Anders Kaseorg,
|
||||||
|
Andreas Rammhold,
|
||||||
|
Antoine Eiche,
|
||||||
|
Antoine Martin,
|
||||||
|
Arnout Engelen,
|
||||||
|
Arthur Gautier,
|
||||||
|
aszlig,
|
||||||
|
Ben Burdette,
|
||||||
|
Benjamin Hipple,
|
||||||
|
Bernardo Meurer,
|
||||||
|
Björn Gohla,
|
||||||
|
Bjørn Forsman,
|
||||||
|
Bob van der Linden,
|
||||||
|
Brian Leung,
|
||||||
|
Brian McKenna,
|
||||||
|
Brian Wignall,
|
||||||
|
Bruce Toll,
|
||||||
|
Bryan Richter,
|
||||||
|
Calle Rosenquist,
|
||||||
|
Calvin Loncaric,
|
||||||
|
Carlo Nucera,
|
||||||
|
Carlos D'Agostino,
|
||||||
|
Chaz Schlarp,
|
||||||
|
Christian Höppner,
|
||||||
|
Christian Kampka,
|
||||||
|
Chua Hou,
|
||||||
|
Chuck,
|
||||||
|
Cole Helbling,
|
||||||
|
Daiderd Jordan,
|
||||||
|
Dan Callahan,
|
||||||
|
Dani,
|
||||||
|
Daniel Fitzpatrick,
|
||||||
|
Danila Fedorin,
|
||||||
|
Daniël de Kok,
|
||||||
|
Danny Bautista,
|
||||||
|
DavHau,
|
||||||
|
David McFarland,
|
||||||
|
Dima,
|
||||||
|
Domen Kožar,
|
||||||
|
Dominik Schrempf,
|
||||||
|
Dominique Martinet,
|
||||||
|
dramforever,
|
||||||
|
Dustin DeWeese,
|
||||||
|
edef,
|
||||||
|
Eelco Dolstra,
|
||||||
|
Ellie Hermaszewska,
|
||||||
|
Emilio Karakey,
|
||||||
|
Emily,
|
||||||
|
Eric Culp,
|
||||||
|
Ersin Akinci,
|
||||||
|
Fabian Möller,
|
||||||
|
Farid Zakaria,
|
||||||
|
Federico Pellegrin,
|
||||||
|
Finn Behrens,
|
||||||
|
Florian Franzen,
|
||||||
|
Félix Baylac-Jacqué,
|
||||||
|
Gabriella Gonzalez,
|
||||||
|
Geoff Reedy,
|
||||||
|
Georges Dubus,
|
||||||
|
Graham Christensen,
|
||||||
|
Greg Hale,
|
||||||
|
Greg Price,
|
||||||
|
Gregor Kleen,
|
||||||
|
Gregory Hale,
|
||||||
|
Griffin Smith,
|
||||||
|
Guillaume Bouchard,
|
||||||
|
Harald van Dijk,
|
||||||
|
illustris,
|
||||||
|
Ivan Zvonimir Horvat,
|
||||||
|
Jade,
|
||||||
|
Jake Waksbaum,
|
||||||
|
jakobrs,
|
||||||
|
James Ottaway,
|
||||||
|
Jan Tojnar,
|
||||||
|
Janne Heß,
|
||||||
|
Jaroslavas Pocepko,
|
||||||
|
Jarrett Keifer,
|
||||||
|
Jeremy Schlatter,
|
||||||
|
Joachim Breitner,
|
||||||
|
Joe Pea,
|
||||||
|
John Ericson,
|
||||||
|
Jonathan Ringer,
|
||||||
|
Josef Kemetmüller,
|
||||||
|
Joseph Lucas,
|
||||||
|
Jude Taylor,
|
||||||
|
Julian Stecklina,
|
||||||
|
Julien Tanguy,
|
||||||
|
Jörg Thalheim,
|
||||||
|
Kai Wohlfahrt,
|
||||||
|
keke,
|
||||||
|
Keshav Kini,
|
||||||
|
Kevin Quick,
|
||||||
|
Kevin Stock,
|
||||||
|
Kjetil Orbekk,
|
||||||
|
Krzysztof Gogolewski,
|
||||||
|
kvtb,
|
||||||
|
Lars Mühmel,
|
||||||
|
Leonhard Markert,
|
||||||
|
Lily Ballard,
|
||||||
|
Linus Heckemann,
|
||||||
|
Lorenzo Manacorda,
|
||||||
|
Lucas Desgouilles,
|
||||||
|
Lucas Franceschino,
|
||||||
|
Lucas Hoffmann,
|
||||||
|
Luke Granger-Brown,
|
||||||
|
Madeline Haraj,
|
||||||
|
Marwan Aljubeh,
|
||||||
|
Mat Marini,
|
||||||
|
Mateusz Piotrowski,
|
||||||
|
Matthew Bauer,
|
||||||
|
Matthew Kenigsberg,
|
||||||
|
Mauricio Scheffer,
|
||||||
|
Maximilian Bosch,
|
||||||
|
Michael Adler,
|
||||||
|
Michael Bishop,
|
||||||
|
Michael Fellinger,
|
||||||
|
Michael Forney,
|
||||||
|
Michael Reilly,
|
||||||
|
mlatus,
|
||||||
|
Mykola Orliuk,
|
||||||
|
Nathan van Doorn,
|
||||||
|
Naïm Favier,
|
||||||
|
ng0,
|
||||||
|
Nick Van den Broeck,
|
||||||
|
Nicolas Stig124 Formichella,
|
||||||
|
Niels Egberts,
|
||||||
|
Niklas Hambüchen,
|
||||||
|
Nikola Knezevic,
|
||||||
|
oxalica,
|
||||||
|
p01arst0rm,
|
||||||
|
Pamplemousse,
|
||||||
|
Patrick Hilhorst,
|
||||||
|
Paul Opiyo,
|
||||||
|
Pavol Rusnak,
|
||||||
|
Peter Kolloch,
|
||||||
|
Philipp Bartsch,
|
||||||
|
Philipp Middendorf,
|
||||||
|
Piotr Szubiakowski,
|
||||||
|
Profpatsch,
|
||||||
|
Puck Meerburg,
|
||||||
|
Ricardo M. Correia,
|
||||||
|
Rickard Nilsson,
|
||||||
|
Robert Hensing,
|
||||||
|
Robin Gloster,
|
||||||
|
Rodrigo,
|
||||||
|
Rok Garbas,
|
||||||
|
Ronnie Ebrin,
|
||||||
|
Rovanion Luckey,
|
||||||
|
Ryan Burns,
|
||||||
|
Ryan Mulligan,
|
||||||
|
Ryne Everett,
|
||||||
|
Sam Doshi,
|
||||||
|
Sam Lidder,
|
||||||
|
Samir Talwar,
|
||||||
|
Samuel Dionne-Riel,
|
||||||
|
Sebastian Ullrich,
|
||||||
|
Sergei Trofimovich,
|
||||||
|
Sevan Janiyan,
|
||||||
|
Shao Cheng,
|
||||||
|
Shea Levy,
|
||||||
|
Silvan Mosberger,
|
||||||
|
Stefan Frijters,
|
||||||
|
Stefan Jaax,
|
||||||
|
sternenseemann,
|
||||||
|
Steven Shaw,
|
||||||
|
Stéphan Kochen,
|
||||||
|
SuperSandro2000,
|
||||||
|
Suraj Barkale,
|
||||||
|
Taeer Bar-Yam,
|
||||||
|
Thomas Churchman,
|
||||||
|
Théophane Hufschmitt,
|
||||||
|
Timothy DeHerrera,
|
||||||
|
Timothy Klim,
|
||||||
|
Tobias Möst,
|
||||||
|
Tobias Pflug,
|
||||||
|
Tom Bereknyei,
|
||||||
|
Travis A. Everett,
|
||||||
|
Ujjwal Jain,
|
||||||
|
Vladimír Čunát,
|
||||||
|
Wil Taylor,
|
||||||
|
Will Dietz,
|
||||||
|
Yaroslav Bolyukin,
|
||||||
|
Yestin L. Harrison,
|
||||||
|
YI,
|
||||||
|
Yorick van Pelt,
|
||||||
|
Yuriy Taraday and
|
||||||
|
zimbatm.
|
||||||
|
|
16
doc/manual/src/release-notes/rl-2.5.md
Normal file
16
doc/manual/src/release-notes/rl-2.5.md
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Release 2.5 (2021-12-13)
|
||||||
|
|
||||||
|
* The garbage collector no longer blocks new builds, so the message
|
||||||
|
`waiting for the big garbage collector lock...` is a thing of the
|
||||||
|
past.
|
||||||
|
|
||||||
|
* Binary cache stores now have a setting `compression-level`.
|
||||||
|
|
||||||
|
* `nix develop` now has a flag `--unpack` to run `unpackPhase`.
|
||||||
|
|
||||||
|
* Lists can now be compared lexicographically using the `<` operator.
|
||||||
|
|
||||||
|
* New built-in function: `builtins.groupBy`, with the same functionality as
|
||||||
|
Nixpkgs' `lib.groupBy`, but faster.
|
||||||
|
|
||||||
|
* `nix repl` now has a `:log` command.
|
21
doc/manual/src/release-notes/rl-2.6.md
Normal file
21
doc/manual/src/release-notes/rl-2.6.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Release 2.6 (2022-01-24)
|
||||||
|
|
||||||
|
* The Nix CLI now searches for a `flake.nix` up until the root of the current
|
||||||
|
Git repository or a filesystem boundary rather than just in the current
|
||||||
|
directory.
|
||||||
|
* The TOML parser used by `builtins.fromTOML` has been replaced by [a
|
||||||
|
more compliant one](https://github.com/ToruNiina/toml11).
|
||||||
|
* Added `:st`/`:show-trace` commands to `nix repl`, which are used to
|
||||||
|
set or toggle display of error traces.
|
||||||
|
* New builtin function `builtins.zipAttrsWith` with the same
|
||||||
|
functionality as `lib.zipAttrsWith` from Nixpkgs, but much more
|
||||||
|
efficient.
|
||||||
|
* New command `nix store copy-log` to copy build logs from one store
|
||||||
|
to another.
|
||||||
|
* The `commit-lockfile-summary` option can be set to a non-empty
|
||||||
|
string to override the commit summary used when commiting an updated
|
||||||
|
lockfile. This may be used in conjunction with the `nixConfig`
|
||||||
|
attribute in `flake.nix` to better conform to repository
|
||||||
|
conventions.
|
||||||
|
* `docker run -ti nixos/nix:master` will place you in the Docker
|
||||||
|
container with the latest version of Nix from the `master` branch.
|
28
doc/manual/src/release-notes/rl-next.md
Normal file
28
doc/manual/src/release-notes/rl-next.md
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
* A number of "default" flake output attributes have been
|
||||||
|
renamed. These are:
|
||||||
|
|
||||||
|
* `defaultPackage.<system>` → `packages.<system>.default`
|
||||||
|
* `defaultApps.<system>` → `apps.<system>.default`
|
||||||
|
* `defaultTemplate` → `templates.default`
|
||||||
|
* `defaultBundler.<system>` → `bundlers.<system>.default`
|
||||||
|
* `overlay` → `overlays.default`
|
||||||
|
* `devShell.<system>` → `devShells.<system>.default`
|
||||||
|
|
||||||
|
The old flake output attributes still work, but `nix flake check`
|
||||||
|
will warn about them.
|
||||||
|
|
||||||
|
* `nix bundle` breaking API change now supports bundlers of the form
|
||||||
|
`bundler.<system>.<name>= derivation: another-derivation;`. This supports
|
||||||
|
additional functionality to inspect evaluation information during bundling. A
|
||||||
|
new [repository](https://github.com/NixOS/bundlers) has various bundlers
|
||||||
|
implemented.
|
||||||
|
|
||||||
|
* `nix store ping` now reports the version of the remote Nix daemon.
|
||||||
|
|
||||||
|
* `nix flake {init,new}` now display information about which files have been
|
||||||
|
created.
|
||||||
|
|
||||||
|
* Templates can now define a `welcomeText` attribute, which is printed out by
|
||||||
|
`nix flake {init,new} --template <template>`.
|
264
docker.nix
Normal file
264
docker.nix
Normal file
|
@ -0,0 +1,264 @@
|
||||||
|
{ pkgs ? import <nixpkgs> { }
|
||||||
|
, lib ? pkgs.lib
|
||||||
|
, name ? "nix"
|
||||||
|
, tag ? "latest"
|
||||||
|
, channelName ? "nixpkgs"
|
||||||
|
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
defaultPkgs = with pkgs; [
|
||||||
|
nix
|
||||||
|
bashInteractive
|
||||||
|
coreutils-full
|
||||||
|
gnutar
|
||||||
|
gzip
|
||||||
|
gnugrep
|
||||||
|
which
|
||||||
|
curl
|
||||||
|
less
|
||||||
|
wget
|
||||||
|
man
|
||||||
|
cacert.out
|
||||||
|
findutils
|
||||||
|
iana-etc
|
||||||
|
git
|
||||||
|
];
|
||||||
|
|
||||||
|
users = {
|
||||||
|
|
||||||
|
root = {
|
||||||
|
uid = 0;
|
||||||
|
shell = "/bin/bash";
|
||||||
|
home = "/root";
|
||||||
|
gid = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
} // lib.listToAttrs (
|
||||||
|
map
|
||||||
|
(
|
||||||
|
n: {
|
||||||
|
name = "nixbld${toString n}";
|
||||||
|
value = {
|
||||||
|
uid = 30000 + n;
|
||||||
|
gid = 30000;
|
||||||
|
groups = [ "nixbld" ];
|
||||||
|
description = "Nix build user ${toString n}";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
)
|
||||||
|
(lib.lists.range 1 32)
|
||||||
|
);
|
||||||
|
|
||||||
|
groups = {
|
||||||
|
root.gid = 0;
|
||||||
|
nixbld.gid = 30000;
|
||||||
|
};
|
||||||
|
|
||||||
|
userToPasswd = (
|
||||||
|
k:
|
||||||
|
{ uid
|
||||||
|
, gid ? 65534
|
||||||
|
, home ? "/var/empty"
|
||||||
|
, description ? ""
|
||||||
|
, shell ? "/bin/false"
|
||||||
|
, groups ? [ ]
|
||||||
|
}: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
|
||||||
|
);
|
||||||
|
passwdContents = (
|
||||||
|
lib.concatStringsSep "\n"
|
||||||
|
(lib.attrValues (lib.mapAttrs userToPasswd users))
|
||||||
|
);
|
||||||
|
|
||||||
|
userToShadow = k: { ... }: "${k}:!:1::::::";
|
||||||
|
shadowContents = (
|
||||||
|
lib.concatStringsSep "\n"
|
||||||
|
(lib.attrValues (lib.mapAttrs userToShadow users))
|
||||||
|
);
|
||||||
|
|
||||||
|
# Map groups to members
|
||||||
|
# {
|
||||||
|
# group = [ "user1" "user2" ];
|
||||||
|
# }
|
||||||
|
groupMemberMap = (
|
||||||
|
let
|
||||||
|
# Create a flat list of user/group mappings
|
||||||
|
mappings = (
|
||||||
|
builtins.foldl'
|
||||||
|
(
|
||||||
|
acc: user:
|
||||||
|
let
|
||||||
|
groups = users.${user}.groups or [ ];
|
||||||
|
in
|
||||||
|
acc ++ map
|
||||||
|
(group: {
|
||||||
|
inherit user group;
|
||||||
|
})
|
||||||
|
groups
|
||||||
|
)
|
||||||
|
[ ]
|
||||||
|
(lib.attrNames users)
|
||||||
|
);
|
||||||
|
in
|
||||||
|
(
|
||||||
|
builtins.foldl'
|
||||||
|
(
|
||||||
|
acc: v: acc // {
|
||||||
|
${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
|
||||||
|
}
|
||||||
|
)
|
||||||
|
{ }
|
||||||
|
mappings)
|
||||||
|
);
|
||||||
|
|
||||||
|
groupToGroup = k: { gid }:
|
||||||
|
let
|
||||||
|
members = groupMemberMap.${k} or [ ];
|
||||||
|
in
|
||||||
|
"${k}:x:${toString gid}:${lib.concatStringsSep "," members}";
|
||||||
|
groupContents = (
|
||||||
|
lib.concatStringsSep "\n"
|
||||||
|
(lib.attrValues (lib.mapAttrs groupToGroup groups))
|
||||||
|
);
|
||||||
|
|
||||||
|
nixConf = {
|
||||||
|
sandbox = "false";
|
||||||
|
build-users-group = "nixbld";
|
||||||
|
trusted-public-keys = "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=";
|
||||||
|
};
|
||||||
|
nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: "${n} = ${v}") nixConf)) + "\n";
|
||||||
|
|
||||||
|
baseSystem =
|
||||||
|
let
|
||||||
|
nixpkgs = pkgs.path;
|
||||||
|
channel = pkgs.runCommand "channel-nixos" { } ''
|
||||||
|
mkdir $out
|
||||||
|
ln -s ${nixpkgs} $out/nixpkgs
|
||||||
|
echo "[]" > $out/manifest.nix
|
||||||
|
'';
|
||||||
|
rootEnv = pkgs.buildPackages.buildEnv {
|
||||||
|
name = "root-profile-env";
|
||||||
|
paths = defaultPkgs;
|
||||||
|
};
|
||||||
|
manifest = pkgs.buildPackages.runCommand "manifest.nix" { } ''
|
||||||
|
cat > $out <<EOF
|
||||||
|
[
|
||||||
|
${lib.concatStringsSep "\n" (builtins.map (drv: let
|
||||||
|
outputs = drv.outputsToInstall or [ "out" ];
|
||||||
|
in ''
|
||||||
|
{
|
||||||
|
${lib.concatStringsSep "\n" (builtins.map (output: ''
|
||||||
|
${output} = { outPath = "${lib.getOutput output drv}"; };
|
||||||
|
'') outputs)}
|
||||||
|
outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ];
|
||||||
|
name = "${drv.name}";
|
||||||
|
outPath = "${drv}";
|
||||||
|
system = "${drv.system}";
|
||||||
|
type = "derivation";
|
||||||
|
meta = { };
|
||||||
|
}
|
||||||
|
'') defaultPkgs)}
|
||||||
|
]
|
||||||
|
EOF
|
||||||
|
'';
|
||||||
|
profile = pkgs.buildPackages.runCommand "user-environment" { } ''
|
||||||
|
mkdir $out
|
||||||
|
cp -a ${rootEnv}/* $out/
|
||||||
|
ln -s ${manifest} $out/manifest.nix
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
pkgs.runCommand "base-system"
|
||||||
|
{
|
||||||
|
inherit passwdContents groupContents shadowContents nixConfContents;
|
||||||
|
passAsFile = [
|
||||||
|
"passwdContents"
|
||||||
|
"groupContents"
|
||||||
|
"shadowContents"
|
||||||
|
"nixConfContents"
|
||||||
|
];
|
||||||
|
allowSubstitutes = false;
|
||||||
|
preferLocalBuild = true;
|
||||||
|
} ''
|
||||||
|
env
|
||||||
|
set -x
|
||||||
|
mkdir -p $out/etc
|
||||||
|
|
||||||
|
mkdir -p $out/etc/ssl/certs
|
||||||
|
ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs
|
||||||
|
|
||||||
|
cat $passwdContentsPath > $out/etc/passwd
|
||||||
|
echo "" >> $out/etc/passwd
|
||||||
|
|
||||||
|
cat $groupContentsPath > $out/etc/group
|
||||||
|
echo "" >> $out/etc/group
|
||||||
|
|
||||||
|
cat $shadowContentsPath > $out/etc/shadow
|
||||||
|
echo "" >> $out/etc/shadow
|
||||||
|
|
||||||
|
mkdir -p $out/usr
|
||||||
|
ln -s /nix/var/nix/profiles/share $out/usr/
|
||||||
|
|
||||||
|
mkdir -p $out/nix/var/nix/gcroots
|
||||||
|
|
||||||
|
mkdir $out/tmp
|
||||||
|
|
||||||
|
mkdir -p $out/var/tmp
|
||||||
|
|
||||||
|
mkdir -p $out/etc/nix
|
||||||
|
cat $nixConfContentsPath > $out/etc/nix/nix.conf
|
||||||
|
|
||||||
|
mkdir -p $out/root
|
||||||
|
mkdir -p $out/nix/var/nix/profiles/per-user/root
|
||||||
|
|
||||||
|
ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
|
||||||
|
ln -s $out/nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
|
||||||
|
ln -s /nix/var/nix/profiles/default $out/root/.nix-profile
|
||||||
|
|
||||||
|
ln -s ${channel} $out/nix/var/nix/profiles/per-user/root/channels-1-link
|
||||||
|
ln -s $out/nix/var/nix/profiles/per-user/root/channels-1-link $out/nix/var/nix/profiles/per-user/root/channels
|
||||||
|
|
||||||
|
mkdir -p $out/root/.nix-defexpr
|
||||||
|
ln -s $out/nix/var/nix/profiles/per-user/root/channels $out/root/.nix-defexpr/channels
|
||||||
|
echo "${channelURL} ${channelName}" > $out/root/.nix-channels
|
||||||
|
|
||||||
|
mkdir -p $out/bin $out/usr/bin
|
||||||
|
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
|
||||||
|
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
|
||||||
|
'';
|
||||||
|
|
||||||
|
in
|
||||||
|
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||||
|
|
||||||
|
inherit name tag;
|
||||||
|
|
||||||
|
contents = [ baseSystem ];
|
||||||
|
|
||||||
|
extraCommands = ''
|
||||||
|
rm -rf nix-support
|
||||||
|
ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles
|
||||||
|
'';
|
||||||
|
fakeRootCommands = ''
|
||||||
|
chmod 1777 tmp
|
||||||
|
chmod 1777 var/tmp
|
||||||
|
'';
|
||||||
|
|
||||||
|
config = {
|
||||||
|
Cmd = [ "/root/.nix-profile/bin/bash" ];
|
||||||
|
Env = [
|
||||||
|
"USER=root"
|
||||||
|
"PATH=${lib.concatStringsSep ":" [
|
||||||
|
"/root/.nix-profile/bin"
|
||||||
|
"/nix/var/nix/profiles/default/bin"
|
||||||
|
"/nix/var/nix/profiles/default/sbin"
|
||||||
|
]}"
|
||||||
|
"MANPATH=${lib.concatStringsSep ":" [
|
||||||
|
"/root/.nix-profile/share/man"
|
||||||
|
"/nix/var/nix/profiles/default/share/man"
|
||||||
|
]}"
|
||||||
|
"SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
"GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
"NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
"NIX_PATH=/nix/var/nix/profiles/per-user/root/channels:/root/.nix-defexpr/channels"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
31
flake.lock
31
flake.lock
|
@ -3,27 +3,26 @@
|
||||||
"lowdown-src": {
|
"lowdown-src": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1628247802,
|
"lastModified": 1633514407,
|
||||||
"narHash": "sha256-4XSXGYvKqogR7bubyqYNwBHYCtrIn6XRGXj6+u+BXNs=",
|
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
||||||
"owner": "kristapsdz",
|
"owner": "kristapsdz",
|
||||||
"repo": "lowdown",
|
"repo": "lowdown",
|
||||||
"rev": "b4483d0ef85990f54b864158ab786b4a5b3904fa",
|
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "kristapsdz",
|
"owner": "kristapsdz",
|
||||||
"ref": "VERSION_0_8_6",
|
|
||||||
"repo": "lowdown",
|
"repo": "lowdown",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1628689438,
|
"lastModified": 1632864508,
|
||||||
"narHash": "sha256-YMINW6YmubHZVdliGsAJpnnMYXRrvppv59LgwtnyYhs=",
|
"narHash": "sha256-d127FIvGR41XbVRDPVvozUPQ/uRHbHwvfyKHwEt5xFM=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "f6551e1efa261568c82b76c3a582b2c2ceb1f53f",
|
"rev": "82891b5e2c2359d7e58d08849e4c89511ab94234",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -32,10 +31,26 @@
|
||||||
"type": "indirect"
|
"type": "indirect"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"nixpkgs-regression": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1643052045,
|
||||||
|
"narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
"root": {
|
"root": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"lowdown-src": "lowdown-src",
|
"lowdown-src": "lowdown-src",
|
||||||
"nixpkgs": "nixpkgs"
|
"nixpkgs": "nixpkgs",
|
||||||
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
544
flake.nix
544
flake.nix
|
@ -2,9 +2,10 @@
|
||||||
description = "The purely functional package manager";
|
description = "The purely functional package manager";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
|
inputs.nixpkgs.url = "nixpkgs/nixos-21.05-small";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown/VERSION_0_8_6"; flake = false; };
|
inputs.nixpkgs-regression.url = "nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
|
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
||||||
|
|
||||||
outputs = { self, nixpkgs, lowdown-src }:
|
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src }:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
|
@ -22,15 +23,36 @@
|
||||||
|
|
||||||
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
crossSystems = [ "armv6l-linux" "armv7l-linux" ];
|
||||||
|
|
||||||
|
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" ];
|
||||||
|
|
||||||
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
|
||||||
|
forAllSystemsAndStdenvs = f: forAllSystems (system:
|
||||||
|
nixpkgs.lib.listToAttrs
|
||||||
|
(map
|
||||||
|
(n:
|
||||||
|
nixpkgs.lib.nameValuePair "${n}Packages" (
|
||||||
|
f system n
|
||||||
|
)) stdenvs
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
forAllStdenvs = stdenvs: f: nixpkgs.lib.genAttrs stdenvs (stdenv: f stdenv);
|
||||||
|
|
||||||
# Memoize nixpkgs for different platforms for efficiency.
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
nixpkgsFor = forAllSystems (system:
|
nixpkgsFor =
|
||||||
import nixpkgs {
|
let stdenvsPackages = forAllSystemsAndStdenvs
|
||||||
inherit system;
|
(system: stdenv:
|
||||||
overlays = [ self.overlay ];
|
import nixpkgs {
|
||||||
}
|
inherit system;
|
||||||
);
|
overlays = [
|
||||||
|
(overlayFor (p: p.${stdenv}))
|
||||||
|
];
|
||||||
|
}
|
||||||
|
);
|
||||||
|
in
|
||||||
|
# Add the `stdenvPackages` at toplevel, both because these are the ones
|
||||||
|
# we want most of the time and for backwards compatibility
|
||||||
|
forAllSystems (system: stdenvsPackages.${system} // stdenvsPackages.${system}.stdenvPackages);
|
||||||
|
|
||||||
commonDeps = pkgs: with pkgs; rec {
|
commonDeps = pkgs: with pkgs; rec {
|
||||||
# Use "busybox-sandbox-shell" if present,
|
# Use "busybox-sandbox-shell" if present,
|
||||||
|
@ -61,6 +83,7 @@
|
||||||
|
|
||||||
configureFlags =
|
configureFlags =
|
||||||
lib.optionals stdenv.isLinux [
|
lib.optionals stdenv.isLinux [
|
||||||
|
"--with-boost=${boost}/lib"
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
"LDFLAGS=-fuse-ld=gold"
|
"LDFLAGS=-fuse-ld=gold"
|
||||||
];
|
];
|
||||||
|
@ -70,15 +93,15 @@
|
||||||
[
|
[
|
||||||
buildPackages.bison
|
buildPackages.bison
|
||||||
buildPackages.flex
|
buildPackages.flex
|
||||||
(lib.getBin buildPackages.lowdown)
|
(lib.getBin buildPackages.lowdown-nix)
|
||||||
buildPackages.mdbook
|
buildPackages.mdbook
|
||||||
buildPackages.autoconf-archive
|
buildPackages.autoconf-archive
|
||||||
buildPackages.autoreconfHook
|
buildPackages.autoreconfHook
|
||||||
buildPackages.pkgconfig
|
buildPackages.pkg-config
|
||||||
|
|
||||||
# Tests
|
# Tests
|
||||||
buildPackages.git
|
buildPackages.git
|
||||||
buildPackages.mercurial
|
buildPackages.mercurial # FIXME: remove? only needed for tests
|
||||||
buildPackages.jq
|
buildPackages.jq
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)];
|
||||||
|
@ -89,8 +112,8 @@
|
||||||
openssl sqlite
|
openssl sqlite
|
||||||
libarchive
|
libarchive
|
||||||
boost
|
boost
|
||||||
lowdown
|
lowdown-nix
|
||||||
gmock
|
gtest
|
||||||
]
|
]
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
++ lib.optionals stdenv.isLinux [libseccomp]
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
|
@ -110,6 +133,7 @@
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
./boehmgc-coroutine-sp-fallback.diff
|
||||||
];
|
];
|
||||||
}))
|
}))
|
||||||
|
nlohmann_json
|
||||||
];
|
];
|
||||||
|
|
||||||
perlDeps =
|
perlDeps =
|
||||||
|
@ -118,16 +142,15 @@
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
installScriptFor = systems:
|
installScriptFor = systems:
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
runCommand "installer-script"
|
runCommand "installer-script"
|
||||||
{ buildInputs = [ nix ];
|
{ buildInputs = [ nix ];
|
||||||
}
|
}
|
||||||
''
|
''
|
||||||
mkdir -p $out/nix-support
|
mkdir -p $out/nix-support
|
||||||
|
|
||||||
# Converts /nix/store/50p3qk8kka9dl6wyq40vydq945k0j3kv-nix-2.4pre20201102_550e11f/bin/nix
|
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
||||||
# To 50p3qk8kka9dl6wyq40vydq945k0j3kv/bin/nix
|
|
||||||
tarballPath() {
|
tarballPath() {
|
||||||
# Remove the store prefix
|
# Remove the store prefix
|
||||||
local path=''${1#${builtins.storeDir}/}
|
local path=''${1#${builtins.storeDir}/}
|
||||||
|
@ -153,13 +176,15 @@
|
||||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||||
'';
|
'';
|
||||||
|
|
||||||
testNixVersions = pkgs: client: daemon: with commonDeps pkgs; pkgs.stdenv.mkDerivation {
|
testNixVersions = pkgs: client: daemon: with commonDeps pkgs; with pkgs.lib; pkgs.stdenv.mkDerivation {
|
||||||
NIX_DAEMON_PACKAGE = daemon;
|
NIX_DAEMON_PACKAGE = daemon;
|
||||||
NIX_CLIENT_PACKAGE = client;
|
NIX_CLIENT_PACKAGE = client;
|
||||||
# Must keep this name short as OSX has a rather strict limit on the
|
name =
|
||||||
# socket path length, and this name appears in the path of the
|
"nix-tests"
|
||||||
# nix-daemon socket used in the tests
|
+ optionalString
|
||||||
name = "nix-tests";
|
(versionAtLeast daemon.version "2.4pre20211005" &&
|
||||||
|
versionAtLeast client.version "2.4pre20211005")
|
||||||
|
"-${client.version}-against-${daemon.version}";
|
||||||
inherit version;
|
inherit version;
|
||||||
|
|
||||||
src = self;
|
src = self;
|
||||||
|
@ -182,198 +207,209 @@
|
||||||
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
installCheckPhase = "make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES";
|
||||||
};
|
};
|
||||||
|
|
||||||
binaryTarball = buildPackages: nix: pkgs: let
|
binaryTarball = buildPackages: nix: pkgs:
|
||||||
inherit (pkgs) cacert;
|
let
|
||||||
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
inherit (pkgs) cacert;
|
||||||
in
|
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
||||||
|
in
|
||||||
|
|
||||||
buildPackages.runCommand "nix-binary-tarball-${version}"
|
buildPackages.runCommand "nix-binary-tarball-${version}"
|
||||||
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
||||||
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
||||||
}
|
}
|
||||||
''
|
''
|
||||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||||
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||||
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||||
--subst-var-by nix ${nix} \
|
--subst-var-by nix ${nix} \
|
||||||
--subst-var-by cacert ${cacert}
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
if type -p shellcheck; then
|
if type -p shellcheck; then
|
||||||
# SC1090: Don't worry about not being able to find
|
# SC1090: Don't worry about not being able to find
|
||||||
# $nix/etc/profile.d/nix.sh
|
# $nix/etc/profile.d/nix.sh
|
||||||
shellcheck --exclude SC1090 $TMPDIR/install
|
shellcheck --exclude SC1090 $TMPDIR/install
|
||||||
shellcheck $TMPDIR/create-darwin-volume.sh
|
shellcheck $TMPDIR/create-darwin-volume.sh
|
||||||
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
||||||
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
|
||||||
# SC1091: Don't panic about not being able to source
|
# SC1091: Don't panic about not being able to source
|
||||||
# /etc/profile
|
# /etc/profile
|
||||||
# SC2002: Ignore "useless cat" "error", when loading
|
# SC2002: Ignore "useless cat" "error", when loading
|
||||||
# .reginfo, as the cat is a much cleaner
|
# .reginfo, as the cat is a much cleaner
|
||||||
# implementation, even though it is "useless"
|
# implementation, even though it is "useless"
|
||||||
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
||||||
# root's home directory
|
# root's home directory
|
||||||
shellcheck --external-sources \
|
shellcheck --external-sources \
|
||||||
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chmod +x $TMPDIR/install
|
chmod +x $TMPDIR/install
|
||||||
chmod +x $TMPDIR/create-darwin-volume.sh
|
chmod +x $TMPDIR/create-darwin-volume.sh
|
||||||
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
||||||
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
||||||
chmod +x $TMPDIR/install-multi-user
|
chmod +x $TMPDIR/install-multi-user
|
||||||
dir=nix-${version}-${pkgs.system}
|
dir=nix-${version}-${pkgs.system}
|
||||||
fn=$out/$dir.tar.xz
|
fn=$out/$dir.tar.xz
|
||||||
mkdir -p $out/nix-support
|
mkdir -p $out/nix-support
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
tar cvfJ $fn \
|
tar cvfJ $fn \
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
--absolute-names \
|
--absolute-names \
|
||||||
--hard-dereference \
|
--hard-dereference \
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
||||||
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
||||||
--transform "s,$NIX_STORE,$dir/store,S" \
|
--transform "s,$NIX_STORE,$dir/store,S" \
|
||||||
$TMPDIR/install \
|
$TMPDIR/install \
|
||||||
$TMPDIR/create-darwin-volume.sh \
|
$TMPDIR/create-darwin-volume.sh \
|
||||||
$TMPDIR/install-darwin-multi-user.sh \
|
$TMPDIR/install-darwin-multi-user.sh \
|
||||||
$TMPDIR/install-systemd-multi-user.sh \
|
$TMPDIR/install-systemd-multi-user.sh \
|
||||||
$TMPDIR/install-multi-user \
|
$TMPDIR/install-multi-user \
|
||||||
$TMPDIR/reginfo \
|
$TMPDIR/reginfo \
|
||||||
$(cat ${installerClosureInfo}/store-paths)
|
$(cat ${installerClosureInfo}/store-paths)
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
overlayFor = getStdenv: final: prev:
|
||||||
|
let currentStdenv = getStdenv final; in
|
||||||
|
{
|
||||||
|
nixStable = prev.nix;
|
||||||
|
|
||||||
|
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
||||||
|
nixUnstable = prev.nixUnstable;
|
||||||
|
|
||||||
|
nix = with final; with commonDeps pkgs; currentStdenv.mkDerivation {
|
||||||
|
name = "nix-${version}";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
|
outputs = [ "out" "dev" "doc" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = nativeBuildDeps;
|
||||||
|
buildInputs = buildDeps ++ awsDeps;
|
||||||
|
|
||||||
|
propagatedBuildInputs = propagatedDeps;
|
||||||
|
|
||||||
|
disallowedReferences = [ boost ];
|
||||||
|
|
||||||
|
preConfigure =
|
||||||
|
''
|
||||||
|
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||||
|
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
||||||
|
rm -f $out/lib/*.a
|
||||||
|
${lib.optionalString currentStdenv.isLinux ''
|
||||||
|
chmod u+w $out/lib/*.so.*
|
||||||
|
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||||
|
''}
|
||||||
|
${lib.optionalString currentStdenv.isDarwin ''
|
||||||
|
for LIB in $out/lib/*.dylib; do
|
||||||
|
chmod u+w $LIB
|
||||||
|
install_name_tool -id $LIB $LIB
|
||||||
|
done
|
||||||
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
|
configureFlags = configureFlags ++
|
||||||
|
[ "--sysconfdir=/etc" ];
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
||||||
|
|
||||||
|
doCheck = true;
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
postInstall = ''
|
||||||
|
mkdir -p $doc/nix-support
|
||||||
|
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
||||||
|
${lib.optionalString currentStdenv.isDarwin ''
|
||||||
|
install_name_tool \
|
||||||
|
-change ${boost}/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libnixutil.dylib
|
||||||
|
''}
|
||||||
|
'';
|
||||||
|
|
||||||
|
doInstallCheck = true;
|
||||||
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
separateDebugInfo = true;
|
||||||
|
|
||||||
|
strictDeps = true;
|
||||||
|
|
||||||
|
passthru.perl-bindings = with final; currentStdenv.mkDerivation {
|
||||||
|
name = "nix-perl-${version}";
|
||||||
|
|
||||||
|
src = self;
|
||||||
|
|
||||||
|
nativeBuildInputs =
|
||||||
|
[ buildPackages.autoconf-archive
|
||||||
|
buildPackages.autoreconfHook
|
||||||
|
buildPackages.pkg-config
|
||||||
|
];
|
||||||
|
|
||||||
|
buildInputs =
|
||||||
|
[ nix
|
||||||
|
curl
|
||||||
|
bzip2
|
||||||
|
xz
|
||||||
|
pkgs.perl
|
||||||
|
boost
|
||||||
|
]
|
||||||
|
++ lib.optional (currentStdenv.isLinux || currentStdenv.isDarwin) libsodium
|
||||||
|
++ lib.optional currentStdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||||
|
|
||||||
|
configureFlags = ''
|
||||||
|
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
||||||
|
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
||||||
|
'';
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
||||||
|
name = "lowdown-0.9.0";
|
||||||
|
|
||||||
|
src = lowdown-src;
|
||||||
|
|
||||||
|
outputs = [ "out" "bin" "dev" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [ buildPackages.which ];
|
||||||
|
|
||||||
|
configurePhase = ''
|
||||||
|
${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
||||||
|
./configure \
|
||||||
|
PREFIX=${placeholder "dev"} \
|
||||||
|
BINDIR=${placeholder "bin"}/bin
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
in {
|
in {
|
||||||
|
|
||||||
# A Nixpkgs overlay that overrides the 'nix' and
|
# A Nixpkgs overlay that overrides the 'nix' and
|
||||||
# 'nix.perl-bindings' packages.
|
# 'nix.perl-bindings' packages.
|
||||||
overlay = final: prev: {
|
overlay = overlayFor (p: p.stdenv);
|
||||||
|
|
||||||
# An older version of Nix to test against when using the daemon.
|
|
||||||
# Currently using `nixUnstable` as the stable one doesn't respect
|
|
||||||
# `NIX_DAEMON_SOCKET_PATH` which is needed for the tests.
|
|
||||||
nixStable = prev.nix;
|
|
||||||
|
|
||||||
nix = with final; with commonDeps pkgs; stdenv.mkDerivation {
|
|
||||||
name = "nix-${version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = self;
|
|
||||||
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ awsDeps;
|
|
||||||
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
preConfigure =
|
|
||||||
''
|
|
||||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
|
||||||
mkdir -p $out/lib
|
|
||||||
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
|
||||||
rm -f $out/lib/*.a
|
|
||||||
${lib.optionalString stdenv.isLinux ''
|
|
||||||
chmod u+w $out/lib/*.so.*
|
|
||||||
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
configureFlags = configureFlags ++
|
|
||||||
[ "--sysconfdir=/etc" ];
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $doc/nix-support
|
|
||||||
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
separateDebugInfo = true;
|
|
||||||
|
|
||||||
strictDeps = true;
|
|
||||||
|
|
||||||
passthru.perl-bindings = with final; stdenv.mkDerivation {
|
|
||||||
name = "nix-perl-${version}";
|
|
||||||
|
|
||||||
src = self;
|
|
||||||
|
|
||||||
nativeBuildInputs =
|
|
||||||
[ buildPackages.autoconf-archive
|
|
||||||
buildPackages.autoreconfHook
|
|
||||||
buildPackages.pkgconfig
|
|
||||||
];
|
|
||||||
|
|
||||||
buildInputs =
|
|
||||||
[ nix
|
|
||||||
curl
|
|
||||||
bzip2
|
|
||||||
xz
|
|
||||||
pkgs.perl
|
|
||||||
boost
|
|
||||||
]
|
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
|
||||||
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
|
||||||
|
|
||||||
configureFlags = ''
|
|
||||||
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
|
|
||||||
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
|
|
||||||
'';
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
|
||||||
};
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
lowdown = with final; stdenv.mkDerivation rec {
|
|
||||||
name = "lowdown-0.8.6";
|
|
||||||
|
|
||||||
/*
|
|
||||||
src = fetchurl {
|
|
||||||
url = "https://kristaps.bsd.lv/lowdown/snapshots/${name}.tar.gz";
|
|
||||||
hash = "sha512-U9WeGoInT9vrawwa57t6u9dEdRge4/P+0wLxmQyOL9nhzOEUU2FRz2Be9H0dCjYE7p2v3vCXIYk40M+jjULATw==";
|
|
||||||
};
|
|
||||||
*/
|
|
||||||
|
|
||||||
src = lowdown-src;
|
|
||||||
|
|
||||||
outputs = [ "out" "bin" "dev" ];
|
|
||||||
|
|
||||||
nativeBuildInputs = [ buildPackages.which ];
|
|
||||||
|
|
||||||
configurePhase = ''
|
|
||||||
${if (stdenv.isDarwin && stdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
|
||||||
./configure \
|
|
||||||
PREFIX=${placeholder "dev"} \
|
|
||||||
BINDIR=${placeholder "bin"}/bin
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
hydraJobs = {
|
hydraJobs = {
|
||||||
|
|
||||||
|
@ -410,6 +446,9 @@
|
||||||
installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
|
installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
|
||||||
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
||||||
|
|
||||||
|
# docker image with Nix inside
|
||||||
|
dockerImage = nixpkgs.lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
||||||
|
|
||||||
# Line coverage analysis.
|
# Line coverage analysis.
|
||||||
coverage =
|
coverage =
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
|
@ -450,12 +489,24 @@
|
||||||
inherit (self) overlay;
|
inherit (self) overlay;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
tests.nssPreload = (import ./tests/nss-preload.nix rec {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
inherit (self) overlay;
|
||||||
|
});
|
||||||
|
|
||||||
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
|
||||||
system = "x86_64-linux";
|
system = "x86_64-linux";
|
||||||
inherit nixpkgs;
|
inherit nixpkgs;
|
||||||
inherit (self) overlay;
|
inherit (self) overlay;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
tests.sourcehutFlakes = (import ./tests/sourcehut-flakes.nix rec {
|
||||||
|
system = "x86_64-linux";
|
||||||
|
inherit nixpkgs;
|
||||||
|
inherit (self) overlay;
|
||||||
|
});
|
||||||
|
|
||||||
tests.setuid = nixpkgs.lib.genAttrs
|
tests.setuid = nixpkgs.lib.genAttrs
|
||||||
["i686-linux" "x86_64-linux"]
|
["i686-linux" "x86_64-linux"]
|
||||||
(system:
|
(system:
|
||||||
|
@ -464,44 +515,46 @@
|
||||||
inherit (self) overlay;
|
inherit (self) overlay;
|
||||||
});
|
});
|
||||||
|
|
||||||
/*
|
# Make sure that nix-env still produces the exact same result
|
||||||
# Check whether we can still evaluate all of Nixpkgs.
|
# on a particular version of Nixpkgs.
|
||||||
tests.evalNixpkgs =
|
tests.evalNixpkgs =
|
||||||
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
|
|
||||||
# FIXME: fix pkgs/top-level/make-tarball.nix in NixOS to not require a revCount.
|
|
||||||
inherit nixpkgs;
|
|
||||||
pkgs = nixpkgsFor.x86_64-linux;
|
|
||||||
officialRelease = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Check whether we can still evaluate NixOS.
|
|
||||||
tests.evalNixOS =
|
|
||||||
with nixpkgsFor.x86_64-linux;
|
with nixpkgsFor.x86_64-linux;
|
||||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
''
|
''
|
||||||
export NIX_STATE_DIR=$TMPDIR
|
type -p nix-env
|
||||||
|
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||||
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
|
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||||
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
|
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
||||||
|
mkdir $out
|
||||||
touch $out
|
|
||||||
'';
|
'';
|
||||||
*/
|
|
||||||
|
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||||
|
pkgs = nixpkgsFor.x86_64-linux;
|
||||||
|
nixpkgs = nixpkgs-regression;
|
||||||
|
};
|
||||||
|
|
||||||
|
installTests = forAllSystems (system:
|
||||||
|
let pkgs = nixpkgsFor.${system}; in
|
||||||
|
pkgs.runCommand "install-tests" {
|
||||||
|
againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix;
|
||||||
|
againstCurrentUnstable =
|
||||||
|
# FIXME: temporarily disable this on macOS because of #3605.
|
||||||
|
if system == "x86_64-linux"
|
||||||
|
then testNixVersions pkgs pkgs.nix pkgs.nixUnstable
|
||||||
|
else null;
|
||||||
|
# Disabled because the latest stable version doesn't handle
|
||||||
|
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
|
||||||
|
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
||||||
|
} "touch $out");
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
checks = forAllSystems (system: {
|
checks = forAllSystems (system: {
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
installTests =
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
let pkgs = nixpkgsFor.${system}; in
|
} // (nixpkgs.lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
pkgs.runCommand "install-tests" {
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
againstSelf = testNixVersions pkgs pkgs.nix pkgs.pkgs.nix;
|
|
||||||
againstCurrentUnstable = testNixVersions pkgs pkgs.nix pkgs.nixUnstable;
|
|
||||||
# Disabled because the latest stable version doesn't handle
|
|
||||||
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
|
|
||||||
# againstLatestStable = testNixVersions pkgs pkgs.nix pkgs.nixStable;
|
|
||||||
} "touch $out";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
packages = forAllSystems (system: {
|
packages = forAllSystems (system: {
|
||||||
|
@ -547,6 +600,20 @@
|
||||||
|
|
||||||
hardeningDisable = [ "pie" ];
|
hardeningDisable = [ "pie" ];
|
||||||
};
|
};
|
||||||
|
dockerImage =
|
||||||
|
let
|
||||||
|
pkgs = nixpkgsFor.${system};
|
||||||
|
image = import ./docker.nix { inherit pkgs; tag = version; };
|
||||||
|
in
|
||||||
|
pkgs.runCommand
|
||||||
|
"docker-image-tarball-${version}"
|
||||||
|
{ meta.description = "Docker image with Nix for ${system}"; }
|
||||||
|
''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
image=$out/image.tar.gz
|
||||||
|
ln -s ${image} $image
|
||||||
|
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
} // builtins.listToAttrs (map (crossSystem: {
|
} // builtins.listToAttrs (map (crossSystem: {
|
||||||
name = "nix-${crossSystem}";
|
name = "nix-${crossSystem}";
|
||||||
value = let
|
value = let
|
||||||
|
@ -586,15 +653,21 @@
|
||||||
doInstallCheck = true;
|
doInstallCheck = true;
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
};
|
};
|
||||||
}) crossSystems)));
|
}) crossSystems)) // (builtins.listToAttrs (map (stdenvName:
|
||||||
|
nixpkgsFor.${system}.lib.nameValuePair
|
||||||
|
"nix-${stdenvName}"
|
||||||
|
nixpkgsFor.${system}."${stdenvName}Packages".nix
|
||||||
|
) stdenvs)));
|
||||||
|
|
||||||
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
|
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
|
||||||
|
|
||||||
devShell = forAllSystems (system:
|
devShell = forAllSystems (system: self.devShells.${system}.stdenvPackages);
|
||||||
|
|
||||||
|
devShells = forAllSystemsAndStdenvs (system: stdenv:
|
||||||
with nixpkgsFor.${system};
|
with nixpkgsFor.${system};
|
||||||
with commonDeps pkgs;
|
with commonDeps pkgs;
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
nixpkgsFor.${system}.${stdenv}.mkDerivation {
|
||||||
name = "nix";
|
name = "nix";
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ];
|
outputs = [ "out" "dev" "doc" ];
|
||||||
|
@ -613,6 +686,9 @@
|
||||||
PATH=$prefix/bin:$PATH
|
PATH=$prefix/bin:$PATH
|
||||||
unset PYTHONPATH
|
unset PYTHONPATH
|
||||||
export MANPATH=$out/share/man:$MANPATH
|
export MANPATH=$out/share/man:$MANPATH
|
||||||
|
|
||||||
|
# Make bash completion work.
|
||||||
|
XDG_DATA_DIRS+=:$out/share
|
||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,8 @@ my $nixpkgsDir = "/home/eelco/Dev/nixpkgs-pristine";
|
||||||
|
|
||||||
my $TMPDIR = $ENV{'TMPDIR'} // "/tmp";
|
my $TMPDIR = $ENV{'TMPDIR'} // "/tmp";
|
||||||
|
|
||||||
|
my $isLatest = ($ENV{'IS_LATEST'} // "") eq "1";
|
||||||
|
|
||||||
# FIXME: cut&paste from nixos-channel-scripts.
|
# FIXME: cut&paste from nixos-channel-scripts.
|
||||||
sub fetch {
|
sub fetch {
|
||||||
my ($url, $type) = @_;
|
my ($url, $type) = @_;
|
||||||
|
@ -35,22 +37,29 @@ sub fetch {
|
||||||
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
|
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
|
||||||
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
|
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
|
||||||
#print Dumper($evalInfo);
|
#print Dumper($evalInfo);
|
||||||
|
my $flakeUrl = $evalInfo->{flake} or die;
|
||||||
|
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die);
|
||||||
|
my $nixRev = $flakeInfo->{revision} or die;
|
||||||
|
|
||||||
my $nixRev = $evalInfo->{jobsetevalinputs}->{nix}->{revision} or die;
|
my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
|
||||||
|
#print Dumper($buildInfo);
|
||||||
|
|
||||||
my $tarballInfo = decode_json(fetch("$evalUrl/job/tarball", 'application/json'));
|
my $releaseName = $buildInfo->{nixname};
|
||||||
|
|
||||||
my $releaseName = $tarballInfo->{releasename};
|
|
||||||
$releaseName =~ /nix-(.*)$/ or die;
|
$releaseName =~ /nix-(.*)$/ or die;
|
||||||
my $version = $1;
|
my $version = $1;
|
||||||
|
|
||||||
print STDERR "Nix revision is $nixRev, version is $version\n";
|
print STDERR "Flake URL is $flakeUrl, Nix revision is $nixRev, version is $version\n";
|
||||||
|
|
||||||
my $releaseDir = "nix/$releaseName";
|
my $releaseDir = "nix/$releaseName";
|
||||||
|
|
||||||
my $tmpDir = "$TMPDIR/nix-release/$releaseName";
|
my $tmpDir = "$TMPDIR/nix-release/$releaseName";
|
||||||
File::Path::make_path($tmpDir);
|
File::Path::make_path($tmpDir);
|
||||||
|
|
||||||
|
my $narCache = "$TMPDIR/nar-cache";
|
||||||
|
File::Path::make_path($narCache);
|
||||||
|
|
||||||
|
my $binaryCache = "https://cache.nixos.org/?local-nar-cache=$narCache";
|
||||||
|
|
||||||
# S3 setup.
|
# S3 setup.
|
||||||
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given.";
|
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given.";
|
||||||
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given.";
|
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given.";
|
||||||
|
@ -76,6 +85,7 @@ sub downloadFile {
|
||||||
my ($jobName, $productNr, $dstName) = @_;
|
my ($jobName, $productNr, $dstName) = @_;
|
||||||
|
|
||||||
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
||||||
|
#print STDERR "$jobName: ", Dumper($buildInfo), "\n";
|
||||||
|
|
||||||
my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die "job '$jobName' lacks product $productNr\n";
|
my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die "job '$jobName' lacks product $productNr\n";
|
||||||
$dstName //= basename($srcFile);
|
$dstName //= basename($srcFile);
|
||||||
|
@ -83,19 +93,27 @@ sub downloadFile {
|
||||||
|
|
||||||
if (!-e $tmpFile) {
|
if (!-e $tmpFile) {
|
||||||
print STDERR "downloading $srcFile to $tmpFile...\n";
|
print STDERR "downloading $srcFile to $tmpFile...\n";
|
||||||
system("NIX_REMOTE=https://cache.nixos.org/ nix store cat '$srcFile' > '$tmpFile'") == 0
|
|
||||||
|
my $fileInfo = decode_json(`NIX_REMOTE=$binaryCache nix store ls --json '$srcFile'`);
|
||||||
|
|
||||||
|
$srcFile = $fileInfo->{target} if $fileInfo->{type} eq 'symlink';
|
||||||
|
|
||||||
|
#print STDERR $srcFile, " ", Dumper($fileInfo), "\n";
|
||||||
|
|
||||||
|
system("NIX_REMOTE=$binaryCache nix store cat '$srcFile' > '$tmpFile'.tmp") == 0
|
||||||
or die "unable to fetch $srcFile\n";
|
or die "unable to fetch $srcFile\n";
|
||||||
|
rename("$tmpFile.tmp", $tmpFile) or die;
|
||||||
}
|
}
|
||||||
|
|
||||||
my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash} or die;
|
my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash};
|
||||||
my $sha256_actual = `nix hash file --base16 --type sha256 '$tmpFile'`;
|
my $sha256_actual = `nix hash file --base16 --type sha256 '$tmpFile'`;
|
||||||
chomp $sha256_actual;
|
chomp $sha256_actual;
|
||||||
if ($sha256_expected ne $sha256_actual) {
|
if (defined($sha256_expected) && $sha256_expected ne $sha256_actual) {
|
||||||
print STDERR "file $tmpFile is corrupt, got $sha256_actual, expected $sha256_expected\n";
|
print STDERR "file $tmpFile is corrupt, got $sha256_actual, expected $sha256_expected\n";
|
||||||
exit 1;
|
exit 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_file("$tmpFile.sha256", $sha256_expected);
|
write_file("$tmpFile.sha256", $sha256_actual);
|
||||||
|
|
||||||
if (! -e "$tmpFile.asc") {
|
if (! -e "$tmpFile.asc") {
|
||||||
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
|
system("gpg2 --detach-sign --armor $tmpFile") == 0 or die "unable to sign $tmpFile\n";
|
||||||
|
@ -104,8 +122,6 @@ sub downloadFile {
|
||||||
return $sha256_expected;
|
return $sha256_expected;
|
||||||
}
|
}
|
||||||
|
|
||||||
downloadFile("tarball", "2"); # .tar.bz2
|
|
||||||
my $tarballHash = downloadFile("tarball", "3"); # .tar.xz
|
|
||||||
downloadFile("binaryTarball.i686-linux", "1");
|
downloadFile("binaryTarball.i686-linux", "1");
|
||||||
downloadFile("binaryTarball.x86_64-linux", "1");
|
downloadFile("binaryTarball.x86_64-linux", "1");
|
||||||
downloadFile("binaryTarball.aarch64-linux", "1");
|
downloadFile("binaryTarball.aarch64-linux", "1");
|
||||||
|
@ -115,6 +131,60 @@ downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1");
|
||||||
downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
|
downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
|
||||||
downloadFile("installerScript", "1");
|
downloadFile("installerScript", "1");
|
||||||
|
|
||||||
|
# Upload docker images to dockerhub.
|
||||||
|
my $dockerManifest = "";
|
||||||
|
my $dockerManifestLatest = "";
|
||||||
|
|
||||||
|
for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
|
||||||
|
my $system = $platforms->[0];
|
||||||
|
my $dockerPlatform = $platforms->[1];
|
||||||
|
my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
|
||||||
|
downloadFile("dockerImage.$system", "1", $fn);
|
||||||
|
|
||||||
|
print STDERR "loading docker image for $dockerPlatform...\n";
|
||||||
|
system("docker load -i $tmpDir/$fn") == 0 or die;
|
||||||
|
|
||||||
|
my $tag = "nixos/nix:$version-$dockerPlatform";
|
||||||
|
my $latestTag = "nixos/nix:latest-$dockerPlatform";
|
||||||
|
|
||||||
|
print STDERR "tagging $version docker image for $dockerPlatform...\n";
|
||||||
|
system("docker tag nix:$version $tag") == 0 or die;
|
||||||
|
|
||||||
|
if ($isLatest) {
|
||||||
|
print STDERR "tagging latest docker image for $dockerPlatform...\n";
|
||||||
|
system("docker tag nix:$version $latestTag") == 0 or die;
|
||||||
|
}
|
||||||
|
|
||||||
|
print STDERR "pushing $version docker image for $dockerPlatform...\n";
|
||||||
|
system("docker push -q $tag") == 0 or die;
|
||||||
|
|
||||||
|
if ($isLatest) {
|
||||||
|
print STDERR "pushing latest docker image for $dockerPlatform...\n";
|
||||||
|
system("docker push -q $latestTag") == 0 or die;
|
||||||
|
}
|
||||||
|
|
||||||
|
$dockerManifest .= " --amend $tag";
|
||||||
|
$dockerManifestLatest .= " --amend $latestTag"
|
||||||
|
}
|
||||||
|
|
||||||
|
print STDERR "creating multi-platform docker manifest...\n";
|
||||||
|
system("docker manifest rm nixos/nix:$version");
|
||||||
|
system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
|
||||||
|
if ($isLatest) {
|
||||||
|
print STDERR "creating latest multi-platform docker manifest...\n";
|
||||||
|
system("docker manifest rm nixos/nix:latest");
|
||||||
|
system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
|
||||||
|
}
|
||||||
|
|
||||||
|
print STDERR "pushing multi-platform docker manifest...\n";
|
||||||
|
system("docker manifest push nixos/nix:$version") == 0 or die;
|
||||||
|
|
||||||
|
if ($isLatest) {
|
||||||
|
print STDERR "pushing latest multi-platform docker manifest...\n";
|
||||||
|
system("docker manifest push nixos/nix:latest") == 0 or die;
|
||||||
|
}
|
||||||
|
|
||||||
|
# Upload release files to S3.
|
||||||
for my $fn (glob "$tmpDir/*") {
|
for my $fn (glob "$tmpDir/*") {
|
||||||
my $name = basename($fn);
|
my $name = basename($fn);
|
||||||
my $dstKey = "$releaseDir/" . $name;
|
my $dstKey = "$releaseDir/" . $name;
|
||||||
|
@ -134,42 +204,38 @@ for my $fn (glob "$tmpDir/*") {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exit if $version =~ /pre/;
|
|
||||||
|
|
||||||
# Update nix-fallback-paths.nix.
|
# Update nix-fallback-paths.nix.
|
||||||
system("cd $nixpkgsDir && git pull") == 0 or die;
|
if ($isLatest) {
|
||||||
|
system("cd $nixpkgsDir && git pull") == 0 or die;
|
||||||
|
|
||||||
sub getStorePath {
|
sub getStorePath {
|
||||||
my ($jobName) = @_;
|
my ($jobName) = @_;
|
||||||
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
||||||
for my $product (values %{$buildInfo->{buildproducts}}) {
|
return $buildInfo->{buildoutputs}->{out}->{path} or die "cannot get store path for '$jobName'";
|
||||||
next unless $product->{type} eq "nix-build";
|
|
||||||
next if $product->{path} =~ /[a-z]+$/;
|
|
||||||
return $product->{path};
|
|
||||||
}
|
}
|
||||||
die;
|
|
||||||
|
write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
|
||||||
|
"{\n" .
|
||||||
|
" x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" .
|
||||||
|
" i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" .
|
||||||
|
" aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" .
|
||||||
|
" x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" .
|
||||||
|
" aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" .
|
||||||
|
"}\n");
|
||||||
|
|
||||||
|
system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_file("$nixpkgsDir/nixos/modules/installer/tools/nix-fallback-paths.nix",
|
|
||||||
"{\n" .
|
|
||||||
" x86_64-linux = \"" . getStorePath("build.x86_64-linux") . "\";\n" .
|
|
||||||
" i686-linux = \"" . getStorePath("build.i686-linux") . "\";\n" .
|
|
||||||
" aarch64-linux = \"" . getStorePath("build.aarch64-linux") . "\";\n" .
|
|
||||||
" x86_64-darwin = \"" . getStorePath("build.x86_64-darwin") . "\";\n" .
|
|
||||||
" aarch64-darwin = \"" . getStorePath("build.aarch64-darwin") . "\";\n" .
|
|
||||||
"}\n");
|
|
||||||
|
|
||||||
system("cd $nixpkgsDir && git commit -a -m 'nix-fallback-paths.nix: Update to $version'") == 0 or die;
|
|
||||||
|
|
||||||
# Update the "latest" symlink.
|
# Update the "latest" symlink.
|
||||||
$channelsBucket->add_key(
|
$channelsBucket->add_key(
|
||||||
"nix-latest/install", "",
|
"nix-latest/install", "",
|
||||||
{ "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" })
|
{ "x-amz-website-redirect-location" => "https://releases.nixos.org/$releaseDir/install" })
|
||||||
or die $channelsBucket->err . ": " . $channelsBucket->errstr;
|
or die $channelsBucket->err . ": " . $channelsBucket->errstr
|
||||||
|
if $isLatest;
|
||||||
|
|
||||||
# Tag the release in Git.
|
# Tag the release in Git.
|
||||||
chdir("/home/eelco/Dev/nix-pristine") or die;
|
chdir("/home/eelco/Dev/nix-pristine") or die;
|
||||||
system("git remote update origin") == 0 or die;
|
system("git remote update origin") == 0 or die;
|
||||||
system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
|
system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
|
||||||
system("git push --tags") == 0 or die;
|
system("git push --tags") == 0 or die;
|
||||||
system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die;
|
system("git push --force-with-lease origin $nixRev:refs/heads/latest-release") == 0 or die if $isLatest;
|
||||||
|
|
|
@ -7,13 +7,15 @@ function _complete_nix {
|
||||||
local completion=${line%% *}
|
local completion=${line%% *}
|
||||||
if [[ -z $have_type ]]; then
|
if [[ -z $have_type ]]; then
|
||||||
have_type=1
|
have_type=1
|
||||||
if [[ $completion = filenames ]]; then
|
if [[ $completion == filenames ]]; then
|
||||||
compopt -o filenames
|
compopt -o filenames
|
||||||
|
elif [[ $completion == attrs ]]; then
|
||||||
|
compopt -o nospace
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
COMPREPLY+=("$completion")
|
COMPREPLY+=("$completion")
|
||||||
fi
|
fi
|
||||||
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}")
|
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]/#\~/$HOME}" 2>/dev/null)
|
||||||
__ltrim_colon_completions "$cur"
|
__ltrim_colon_completions "$cur"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ end
|
||||||
|
|
||||||
function _nix_accepts_files
|
function _nix_accepts_files
|
||||||
set -l response (_nix_complete)
|
set -l response (_nix_complete)
|
||||||
# First line is either filenames or no-filenames.
|
|
||||||
test $response[1] = 'filenames'
|
test $response[1] = 'filenames'
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -25,5 +25,10 @@
|
||||||
<string>/var/log/nix-daemon.log</string>
|
<string>/var/log/nix-daemon.log</string>
|
||||||
<key>StandardOutPath</key>
|
<key>StandardOutPath</key>
|
||||||
<string>/dev/null</string>
|
<string>/dev/null</string>
|
||||||
|
<key>SoftResourceLimits</key>
|
||||||
|
<dict>
|
||||||
|
<key>NumberOfFiles</key>
|
||||||
|
<integer>4096</integer>
|
||||||
|
</dict>
|
||||||
</dict>
|
</dict>
|
||||||
</plist>
|
</plist>
|
||||||
|
|
|
@ -4,7 +4,7 @@ function _nix() {
|
||||||
local ifs_bk="$IFS"
|
local ifs_bk="$IFS"
|
||||||
local input=("${(Q)words[@]}")
|
local input=("${(Q)words[@]}")
|
||||||
IFS=$'\n'
|
IFS=$'\n'
|
||||||
local res=($(NIX_GET_COMPLETIONS=$((CURRENT - 1)) "$input[@]"))
|
local res=($(NIX_GET_COMPLETIONS=$((CURRENT - 1)) "$input[@]" 2>/dev/null))
|
||||||
IFS="$ifs_bk"
|
IFS="$ifs_bk"
|
||||||
local tpe="${${res[1]}%%> *}"
|
local tpe="${${res[1]}%%> *}"
|
||||||
local -a suggestions
|
local -a suggestions
|
||||||
|
|
|
@ -91,7 +91,7 @@ define build-library
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
|
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
||||||
$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
|
$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
|
||||||
|
|
||||||
ifndef HOST_DARWIN
|
ifndef HOST_DARWIN
|
||||||
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
|
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
|
||||||
|
@ -105,7 +105,7 @@ define build-library
|
||||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
||||||
|
|
||||||
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
||||||
$$(trace-ld) $(CXX) -o $$@ -shared $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED)
|
$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
||||||
ifndef HOST_DARWIN
|
ifndef HOST_DARWIN
|
||||||
|
@ -125,8 +125,8 @@ define build-library
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
||||||
$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
|
$$(trace-ld) $(LD) -Ur -o $$(_d)/$$($(1)_NAME).o $$?
|
||||||
$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
|
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ define build-program
|
||||||
$$(eval $$(call create-dir, $$(_d)))
|
$$(eval $$(call create-dir, $$(_d)))
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
||||||
$$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
|
$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
|
||||||
|
|
||||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
$(1)_INSTALL_DIR ?= $$(bindir)
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ define build-program
|
||||||
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
||||||
|
|
||||||
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
||||||
$$(trace-ld) $(CXX) -o $$@ $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED)) $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS)
|
$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
||||||
|
|
||||||
else
|
else
|
||||||
|
|
||||||
|
|
|
@ -13,3 +13,7 @@ define run-install-test
|
||||||
endef
|
endef
|
||||||
|
|
||||||
.PHONY: check installcheck
|
.PHONY: check installcheck
|
||||||
|
|
||||||
|
print-top-help += \
|
||||||
|
echo " check: Run unit tests"; \
|
||||||
|
echo " installcheck: Run functional tests";
|
||||||
|
|
399
nix-rust/Cargo.lock
generated
399
nix-rust/Cargo.lock
generated
|
@ -1,399 +0,0 @@
|
||||||
# This file is automatically @generated by Cargo.
|
|
||||||
# It is not intended for manual editing.
|
|
||||||
[[package]]
|
|
||||||
name = "assert_matches"
|
|
||||||
version = "1.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "autocfg"
|
|
||||||
version = "0.1.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-vec"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bitflags"
|
|
||||||
version = "1.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "byteorder"
|
|
||||||
version = "1.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "c2-chacha"
|
|
||||||
version = "0.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cfg-if"
|
|
||||||
version = "0.1.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cloudabi"
|
|
||||||
version = "0.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fnv"
|
|
||||||
version = "1.0.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fuchsia-cprng"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.1.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hex"
|
|
||||||
version = "0.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazy_static"
|
|
||||||
version = "1.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libc"
|
|
||||||
version = "0.2.66"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nix-rust"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "num-traits"
|
|
||||||
version = "0.2.10"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ppv-lite86"
|
|
||||||
version = "0.2.6"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest"
|
|
||||||
version = "0.9.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "1.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.6.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.7.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.3.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_hc"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_hc"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_isaac"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_jitter"
|
|
||||||
version = "0.1.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_os"
|
|
||||||
version = "0.1.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_pcg"
|
|
||||||
version = "0.1.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_xorshift"
|
|
||||||
version = "0.1.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rdrand"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "redox_syscall"
|
|
||||||
version = "0.1.56"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-syntax"
|
|
||||||
version = "0.6.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "remove_dir_all"
|
|
||||||
version = "0.5.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusty-fork"
|
|
||||||
version = "0.2.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tempfile"
|
|
||||||
version = "3.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wait-timeout"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.7.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi"
|
|
||||||
version = "0.3.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
dependencies = [
|
|
||||||
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-i686-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "winapi-x86_64-pc-windows-gnu"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
|
|
||||||
[metadata]
|
|
||||||
"checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5"
|
|
||||||
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
|
|
||||||
"checksum bit-set 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84c238982c4b1e1ee668d136c510c67a13465279c0cb367ea6baf6310620a80"
|
|
||||||
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
|
|
||||||
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
|
||||||
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
|
|
||||||
"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
|
|
||||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
|
||||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
|
||||||
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
|
|
||||||
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
|
|
||||||
"checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
|
|
||||||
"checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77"
|
|
||||||
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|
||||||
"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558"
|
|
||||||
"checksum num-traits 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c81ffc11c212fa327657cb19dd85eb7419e163b5b076bede2bdb5c974c07e4"
|
|
||||||
"checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b"
|
|
||||||
"checksum proptest 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cf147e022eacf0c8a054ab864914a7602618adba841d800a9a9868a5237a529f"
|
|
||||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
|
||||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
|
||||||
"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
|
|
||||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
|
||||||
"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
|
|
||||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
|
||||||
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
|
||||||
"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
|
|
||||||
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
|
|
||||||
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
|
|
||||||
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
|
|
||||||
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
|
|
||||||
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
|
||||||
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
|
|
||||||
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
|
|
||||||
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
|
||||||
"checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
|
|
||||||
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
|
|
||||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
|
||||||
"checksum rusty-fork 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3dd93264e10c577503e926bd1430193eeb5d21b059148910082245309b424fae"
|
|
||||||
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
|
|
||||||
"checksum wait-timeout 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
|
|
||||||
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
|
|
||||||
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
|
||||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
|
||||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
|
|
@ -1,23 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "nix-rust"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["Eelco Dolstra <edolstra@gmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
name = "nixrust"
|
|
||||||
crate-type = ["cdylib"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
libc = "0.2"
|
|
||||||
#futures-preview = { version = "=0.3.0-alpha.19" }
|
|
||||||
#hyper = "0.13.0-alpha.4"
|
|
||||||
#http = "0.1"
|
|
||||||
#tokio = { version = "0.2.0-alpha.6", default-features = false, features = ["rt-full"] }
|
|
||||||
lazy_static = "1.4"
|
|
||||||
#byteorder = "1.3"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
hex = "0.3"
|
|
||||||
assert_matches = "1.3"
|
|
||||||
proptest = "0.9"
|
|
|
@ -1,48 +0,0 @@
|
||||||
ifeq ($(OPTIMIZE), 1)
|
|
||||||
RUST_MODE = --release
|
|
||||||
RUST_DIR = release
|
|
||||||
else
|
|
||||||
RUST_MODE =
|
|
||||||
RUST_DIR = debug
|
|
||||||
endif
|
|
||||||
|
|
||||||
libnixrust_PATH := $(d)/target/$(RUST_DIR)/libnixrust.$(SO_EXT)
|
|
||||||
libnixrust_INSTALL_PATH := $(libdir)/libnixrust.$(SO_EXT)
|
|
||||||
libnixrust_LDFLAGS_USE := -L$(d)/target/$(RUST_DIR) -lnixrust
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED := -L$(libdir) -lnixrust
|
|
||||||
|
|
||||||
ifdef HOST_LINUX
|
|
||||||
libnixrust_LDFLAGS_USE += -ldl
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED += -ldl
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
libnixrust_BUILD_FLAGS = NIX_LDFLAGS="-undefined dynamic_lookup"
|
|
||||||
else
|
|
||||||
libnixrust_LDFLAGS_USE += -Wl,-rpath,$(abspath $(d)/target/$(RUST_DIR))
|
|
||||||
libnixrust_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$(libdir)
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(libnixrust_PATH): $(call rwildcard, $(d)/src, *.rs) $(d)/Cargo.toml
|
|
||||||
$(trace-gen) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) \
|
|
||||||
$(libnixrust_BUILD_FLAGS) \
|
|
||||||
cargo build $(RUST_MODE) $$(if [[ -d vendor ]]; then echo --offline; fi) \
|
|
||||||
&& touch target/$(RUST_DIR)/libnixrust.$(SO_EXT)
|
|
||||||
|
|
||||||
$(libnixrust_INSTALL_PATH): $(libnixrust_PATH)
|
|
||||||
$(target-gen) cp $^ $@
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
install_name_tool -id $@ $@
|
|
||||||
endif
|
|
||||||
|
|
||||||
clean: clean-rust
|
|
||||||
|
|
||||||
clean-rust:
|
|
||||||
$(suppress) rm -rfv nix-rust/target
|
|
||||||
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
check: rust-tests
|
|
||||||
|
|
||||||
rust-tests:
|
|
||||||
$(trace-test) cd nix-rust && CARGO_HOME=$$(if [[ -d vendor ]]; then echo vendor; fi) cargo test --release $$(if [[ -d vendor ]]; then echo --offline; fi)
|
|
||||||
endif
|
|
|
@ -1,77 +0,0 @@
|
||||||
use super::{error, store::path, store::StorePath, util};
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_String_new(s: &str, out: *mut String) {
|
|
||||||
// FIXME: check whether 's' is valid UTF-8?
|
|
||||||
out.write(s.to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_String_drop(self_: *mut String) {
|
|
||||||
std::ptr::drop_in_place(self_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_new(
|
|
||||||
path: &str,
|
|
||||||
store_dir: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::new(std::path::Path::new(path), std::path::Path::new(store_dir))
|
|
||||||
.map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_new2(
|
|
||||||
hash: &[u8; crate::store::path::STORE_PATH_HASH_BYTES],
|
|
||||||
name: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::from_parts(*hash, name).map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_fromBaseName(
|
|
||||||
base_name: &str,
|
|
||||||
) -> Result<StorePath, error::CppException> {
|
|
||||||
StorePath::new_from_base_name(base_name).map_err(|err| err.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub unsafe extern "C" fn ffi_StorePath_drop(self_: *mut StorePath) {
|
|
||||||
std::ptr::drop_in_place(self_);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_to_string(self_: &StorePath) -> Vec<u8> {
|
|
||||||
let mut buf = vec![0; path::STORE_PATH_HASH_CHARS + 1 + self_.name.name().len()];
|
|
||||||
util::base32::encode_into(self_.hash.hash(), &mut buf[0..path::STORE_PATH_HASH_CHARS]);
|
|
||||||
buf[path::STORE_PATH_HASH_CHARS] = b'-';
|
|
||||||
buf[path::STORE_PATH_HASH_CHARS + 1..].clone_from_slice(self_.name.name().as_bytes());
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_less_than(a: &StorePath, b: &StorePath) -> bool {
|
|
||||||
a < b
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_eq(a: &StorePath, b: &StorePath) -> bool {
|
|
||||||
a == b
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_clone(self_: &StorePath) -> StorePath {
|
|
||||||
self_.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_name(self_: &StorePath) -> &str {
|
|
||||||
self_.name.name()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[no_mangle]
|
|
||||||
pub extern "C" fn ffi_StorePath_hash_data(
|
|
||||||
self_: &StorePath,
|
|
||||||
) -> &[u8; crate::store::path::STORE_PATH_HASH_BYTES] {
|
|
||||||
self_.hash.hash()
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
InvalidPath(crate::store::StorePath),
|
|
||||||
BadStorePath(std::path::PathBuf),
|
|
||||||
NotInStore(std::path::PathBuf),
|
|
||||||
BadNarInfo,
|
|
||||||
BadBase32,
|
|
||||||
StorePathNameEmpty,
|
|
||||||
StorePathNameTooLong,
|
|
||||||
BadStorePathName,
|
|
||||||
NarSizeFieldTooBig,
|
|
||||||
BadNarString,
|
|
||||||
BadNarPadding,
|
|
||||||
BadNarVersionMagic,
|
|
||||||
MissingNarOpenTag,
|
|
||||||
MissingNarCloseTag,
|
|
||||||
MissingNarField,
|
|
||||||
BadNarField(String),
|
|
||||||
BadExecutableField,
|
|
||||||
IOError(std::io::Error),
|
|
||||||
#[cfg(unused)]
|
|
||||||
HttpError(hyper::error::Error),
|
|
||||||
Misc(String),
|
|
||||||
#[cfg(not(test))]
|
|
||||||
Foreign(CppException),
|
|
||||||
BadTarFileMemberName(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<std::io::Error> for Error {
|
|
||||||
fn from(err: std::io::Error) -> Self {
|
|
||||||
Error::IOError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
impl From<hyper::error::Error> for Error {
|
|
||||||
fn from(err: hyper::error::Error) -> Self {
|
|
||||||
Error::HttpError(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Error {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Error::InvalidPath(_) => write!(f, "invalid path"),
|
|
||||||
Error::BadNarInfo => write!(f, ".narinfo file is corrupt"),
|
|
||||||
Error::BadStorePath(path) => write!(f, "path '{}' is not a store path", path.display()),
|
|
||||||
Error::NotInStore(path) => {
|
|
||||||
write!(f, "path '{}' is not in the Nix store", path.display())
|
|
||||||
}
|
|
||||||
Error::BadBase32 => write!(f, "invalid base32 string"),
|
|
||||||
Error::StorePathNameEmpty => write!(f, "store path name is empty"),
|
|
||||||
Error::StorePathNameTooLong => {
|
|
||||||
write!(f, "store path name is longer than 211 characters")
|
|
||||||
}
|
|
||||||
Error::BadStorePathName => write!(f, "store path name contains forbidden character"),
|
|
||||||
Error::NarSizeFieldTooBig => write!(f, "size field in NAR is too big"),
|
|
||||||
Error::BadNarString => write!(f, "NAR string is not valid UTF-8"),
|
|
||||||
Error::BadNarPadding => write!(f, "NAR padding is not zero"),
|
|
||||||
Error::BadNarVersionMagic => write!(f, "unsupported NAR version"),
|
|
||||||
Error::MissingNarOpenTag => write!(f, "NAR open tag is missing"),
|
|
||||||
Error::MissingNarCloseTag => write!(f, "NAR close tag is missing"),
|
|
||||||
Error::MissingNarField => write!(f, "expected NAR field is missing"),
|
|
||||||
Error::BadNarField(s) => write!(f, "unrecognized NAR field '{}'", s),
|
|
||||||
Error::BadExecutableField => write!(f, "bad 'executable' field in NAR"),
|
|
||||||
Error::IOError(err) => write!(f, "I/O error: {}", err),
|
|
||||||
#[cfg(unused)]
|
|
||||||
Error::HttpError(err) => write!(f, "HTTP error: {}", err),
|
|
||||||
#[cfg(not(test))]
|
|
||||||
Error::Foreign(_) => write!(f, "<C++ exception>"), // FIXME
|
|
||||||
Error::Misc(s) => write!(f, "{}", s),
|
|
||||||
Error::BadTarFileMemberName(s) => {
|
|
||||||
write!(f, "tar archive contains illegal file name '{}'", s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl From<Error> for CppException {
|
|
||||||
fn from(err: Error) -> Self {
|
|
||||||
match err {
|
|
||||||
Error::Foreign(ex) => ex,
|
|
||||||
_ => CppException::new(&err.to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
#[repr(C)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct CppException(*const libc::c_void); // == std::exception_ptr*
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl CppException {
|
|
||||||
fn new(s: &str) -> Self {
|
|
||||||
Self(unsafe { make_error(s) })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl Drop for CppException {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
unsafe {
|
|
||||||
destroy_error(self.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
extern "C" {
|
|
||||||
#[allow(improper_ctypes)] // YOLO
|
|
||||||
fn make_error(s: &str) -> *const libc::c_void;
|
|
||||||
|
|
||||||
fn destroy_error(exc: *const libc::c_void);
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
#[allow(improper_ctypes_definitions)]
|
|
||||||
#[cfg(not(test))]
|
|
||||||
mod c;
|
|
||||||
mod error;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod nar;
|
|
||||||
mod store;
|
|
||||||
mod util;
|
|
||||||
|
|
||||||
pub use error::Error;
|
|
|
@ -1,126 +0,0 @@
|
||||||
use crate::Error;
|
|
||||||
use byteorder::{LittleEndian, ReadBytesExt};
|
|
||||||
use std::convert::TryFrom;
|
|
||||||
use std::io::Read;
|
|
||||||
|
|
||||||
pub fn parse<R: Read>(input: &mut R) -> Result<(), Error> {
|
|
||||||
if String::read(input)? != NAR_VERSION_MAGIC {
|
|
||||||
return Err(Error::BadNarVersionMagic);
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_file(input)
|
|
||||||
}
|
|
||||||
|
|
||||||
const NAR_VERSION_MAGIC: &str = "nix-archive-1";
|
|
||||||
|
|
||||||
fn parse_file<R: Read>(input: &mut R) -> Result<(), Error> {
|
|
||||||
if String::read(input)? != "(" {
|
|
||||||
return Err(Error::MissingNarOpenTag);
|
|
||||||
}
|
|
||||||
|
|
||||||
if String::read(input)? != "type" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
|
|
||||||
match String::read(input)?.as_ref() {
|
|
||||||
"regular" => {
|
|
||||||
let mut _executable = false;
|
|
||||||
let mut tag = String::read(input)?;
|
|
||||||
if tag == "executable" {
|
|
||||||
_executable = true;
|
|
||||||
if String::read(input)? != "" {
|
|
||||||
return Err(Error::BadExecutableField);
|
|
||||||
}
|
|
||||||
tag = String::read(input)?;
|
|
||||||
}
|
|
||||||
if tag != "contents" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _contents = Vec::<u8>::read(input)?;
|
|
||||||
if String::read(input)? != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"directory" => loop {
|
|
||||||
match String::read(input)?.as_ref() {
|
|
||||||
"entry" => {
|
|
||||||
if String::read(input)? != "(" {
|
|
||||||
return Err(Error::MissingNarOpenTag);
|
|
||||||
}
|
|
||||||
if String::read(input)? != "name" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _name = String::read(input)?;
|
|
||||||
if String::read(input)? != "node" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
parse_file(input)?;
|
|
||||||
let tag = String::read(input)?;
|
|
||||||
if tag != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
")" => break,
|
|
||||||
s => return Err(Error::BadNarField(s.into())),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"symlink" => {
|
|
||||||
if String::read(input)? != "target" {
|
|
||||||
return Err(Error::MissingNarField);
|
|
||||||
}
|
|
||||||
let _target = String::read(input)?;
|
|
||||||
if String::read(input)? != ")" {
|
|
||||||
return Err(Error::MissingNarCloseTag);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
s => return Err(Error::BadNarField(s.into())),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
trait Deserialize: Sized {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for String {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let buf = Deserialize::read(input)?;
|
|
||||||
Ok(String::from_utf8(buf).map_err(|_| Error::BadNarString)?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for Vec<u8> {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let n: usize = Deserialize::read(input)?;
|
|
||||||
let mut buf = vec![0; n];
|
|
||||||
input.read_exact(&mut buf)?;
|
|
||||||
skip_padding(input, n)?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn skip_padding<R: Read>(input: &mut R, len: usize) -> Result<(), Error> {
|
|
||||||
if len % 8 != 0 {
|
|
||||||
let mut buf = [0; 8];
|
|
||||||
let buf = &mut buf[0..8 - (len % 8)];
|
|
||||||
input.read_exact(buf)?;
|
|
||||||
if !buf.iter().all(|b| *b == 0) {
|
|
||||||
return Err(Error::BadNarPadding);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for u64 {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
Ok(input.read_u64::<LittleEndian>()?)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deserialize for usize {
|
|
||||||
fn read<R: Read>(input: &mut R) -> Result<Self, Error> {
|
|
||||||
let n: u64 = Deserialize::read(input)?;
|
|
||||||
Ok(usize::try_from(n).map_err(|_| Error::NarSizeFieldTooBig)?)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
use super::{PathInfo, Store, StorePath};
|
|
||||||
use crate::Error;
|
|
||||||
use hyper::client::Client;
|
|
||||||
|
|
||||||
pub struct BinaryCacheStore {
|
|
||||||
base_uri: String,
|
|
||||||
client: Client<hyper::client::HttpConnector, hyper::Body>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BinaryCacheStore {
|
|
||||||
pub fn new(base_uri: String) -> Self {
|
|
||||||
Self {
|
|
||||||
base_uri,
|
|
||||||
client: Client::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Store for BinaryCacheStore {
|
|
||||||
fn query_path_info(
|
|
||||||
&self,
|
|
||||||
path: &StorePath,
|
|
||||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<PathInfo, Error>> + Send>> {
|
|
||||||
let uri = format!("{}/{}.narinfo", self.base_uri.clone(), path.hash);
|
|
||||||
let path = path.clone();
|
|
||||||
let client = self.client.clone();
|
|
||||||
let store_dir = self.store_dir().to_string();
|
|
||||||
|
|
||||||
Box::pin(async move {
|
|
||||||
let response = client.get(uri.parse::<hyper::Uri>().unwrap()).await?;
|
|
||||||
|
|
||||||
if response.status() == hyper::StatusCode::NOT_FOUND
|
|
||||||
|| response.status() == hyper::StatusCode::FORBIDDEN
|
|
||||||
{
|
|
||||||
return Err(Error::InvalidPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut body = response.into_body();
|
|
||||||
|
|
||||||
let mut bytes = Vec::new();
|
|
||||||
while let Some(next) = body.next().await {
|
|
||||||
bytes.extend(next?);
|
|
||||||
}
|
|
||||||
|
|
||||||
PathInfo::parse_nar_info(std::str::from_utf8(&bytes).unwrap(), &store_dir)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,17 +0,0 @@
|
||||||
pub mod path;
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod binary_cache_store;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod path_info;
|
|
||||||
#[cfg(unused)]
|
|
||||||
mod store;
|
|
||||||
|
|
||||||
pub use path::{StorePath, StorePathHash, StorePathName};
|
|
||||||
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use binary_cache_store::BinaryCacheStore;
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use path_info::PathInfo;
|
|
||||||
#[cfg(unused)]
|
|
||||||
pub use store::Store;
|
|
|
@ -1,224 +0,0 @@
|
||||||
use crate::error::Error;
|
|
||||||
use crate::util::base32;
|
|
||||||
use std::fmt;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
|
||||||
pub struct StorePath {
|
|
||||||
pub hash: StorePathHash,
|
|
||||||
pub name: StorePathName,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const STORE_PATH_HASH_BYTES: usize = 20;
|
|
||||||
pub const STORE_PATH_HASH_CHARS: usize = 32;
|
|
||||||
|
|
||||||
impl StorePath {
|
|
||||||
pub fn new(path: &Path, store_dir: &Path) -> Result<Self, Error> {
|
|
||||||
if path.parent() != Some(store_dir) {
|
|
||||||
return Err(Error::NotInStore(path.into()));
|
|
||||||
}
|
|
||||||
Self::new_from_base_name(
|
|
||||||
path.file_name()
|
|
||||||
.ok_or_else(|| Error::BadStorePath(path.into()))?
|
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| Error::BadStorePath(path.into()))?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_parts(hash: [u8; STORE_PATH_HASH_BYTES], name: &str) -> Result<Self, Error> {
|
|
||||||
Ok(StorePath {
|
|
||||||
hash: StorePathHash(hash),
|
|
||||||
name: StorePathName::new(name)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new_from_base_name(base_name: &str) -> Result<Self, Error> {
|
|
||||||
if base_name.len() < STORE_PATH_HASH_CHARS + 1
|
|
||||||
|| base_name.as_bytes()[STORE_PATH_HASH_CHARS] != b'-'
|
|
||||||
{
|
|
||||||
return Err(Error::BadStorePath(base_name.into()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(StorePath {
|
|
||||||
hash: StorePathHash::new(&base_name[0..STORE_PATH_HASH_CHARS])?,
|
|
||||||
name: StorePathName::new(&base_name[STORE_PATH_HASH_CHARS + 1..])?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePath {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}-{}", self.hash, self.name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct StorePathHash([u8; STORE_PATH_HASH_BYTES]);
|
|
||||||
|
|
||||||
impl StorePathHash {
|
|
||||||
pub fn new(s: &str) -> Result<Self, Error> {
|
|
||||||
assert_eq!(s.len(), STORE_PATH_HASH_CHARS);
|
|
||||||
let v = base32::decode(s)?;
|
|
||||||
assert_eq!(v.len(), STORE_PATH_HASH_BYTES);
|
|
||||||
let mut bytes: [u8; 20] = Default::default();
|
|
||||||
bytes.copy_from_slice(&v[0..STORE_PATH_HASH_BYTES]);
|
|
||||||
Ok(Self(bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> &[u8; STORE_PATH_HASH_BYTES] {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePathHash {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let mut buf = vec![0; STORE_PATH_HASH_CHARS];
|
|
||||||
base32::encode_into(&self.0, &mut buf);
|
|
||||||
f.write_str(std::str::from_utf8(&buf).unwrap())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for StorePathHash {
|
|
||||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
|
||||||
// Historically we've sorted store paths by their base32
|
|
||||||
// serialization, but our base32 encodes bytes in reverse
|
|
||||||
// order. So compare them in reverse order as well.
|
|
||||||
self.0.iter().rev().cmp(other.0.iter().rev())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for StorePathHash {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
|
|
||||||
pub struct StorePathName(String);
|
|
||||||
|
|
||||||
impl StorePathName {
|
|
||||||
pub fn new(s: &str) -> Result<Self, Error> {
|
|
||||||
if s.is_empty() {
|
|
||||||
return Err(Error::StorePathNameEmpty);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.len() > 211 {
|
|
||||||
return Err(Error::StorePathNameTooLong);
|
|
||||||
}
|
|
||||||
|
|
||||||
let is_good_path_name = s.chars().all(|c| {
|
|
||||||
c.is_ascii_alphabetic()
|
|
||||||
|| c.is_ascii_digit()
|
|
||||||
|| c == '+'
|
|
||||||
|| c == '-'
|
|
||||||
|| c == '.'
|
|
||||||
|| c == '_'
|
|
||||||
|| c == '?'
|
|
||||||
|| c == '='
|
|
||||||
});
|
|
||||||
if s.starts_with('.') || !is_good_path_name {
|
|
||||||
return Err(Error::BadStorePathName);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self(s.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for StorePathName {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.write_str(&self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use assert_matches::assert_matches;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
let p = StorePath::new_from_base_name(&s).unwrap();
|
|
||||||
assert_eq!(p.name.0, "konsole-18.12.3");
|
|
||||||
assert_eq!(
|
|
||||||
p.hash.0,
|
|
||||||
[
|
|
||||||
0x9f, 0x76, 0x49, 0x20, 0xf6, 0x5d, 0xe9, 0x71, 0xc4, 0xca, 0x46, 0x21, 0xab, 0xff,
|
|
||||||
0x9b, 0x44, 0xef, 0x87, 0x0f, 0x3c
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::StorePathNameEmpty)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_no_dash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePath(_))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_short_hash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxl-konsole-18.12.3";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePath(_))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_hash() {
|
|
||||||
let s = "7h7qgvs4kgzsn8e6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
assert_matches!(StorePath::new_from_base_name(&s), Err(Error::BadBase32));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_long_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
|
|
||||||
assert_matches!(StorePath::new_from_base_name(&s), Ok(_));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_too_long_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::StorePathNameTooLong)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bad_name() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-foo bar";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePathName)
|
|
||||||
);
|
|
||||||
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-kónsole";
|
|
||||||
assert_matches!(
|
|
||||||
StorePath::new_from_base_name(&s),
|
|
||||||
Err(Error::BadStorePathName)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_roundtrip() {
|
|
||||||
let s = "7h7qgvs4kgzsn8a6rb273saxyqh4jxlz-konsole-18.12.3";
|
|
||||||
assert_eq!(StorePath::new_from_base_name(&s).unwrap().to_string(), s);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,70 +0,0 @@
|
||||||
use crate::store::StorePath;
|
|
||||||
use crate::Error;
|
|
||||||
use std::collections::BTreeSet;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PathInfo {
|
|
||||||
pub path: StorePath,
|
|
||||||
pub references: BTreeSet<StorePath>,
|
|
||||||
pub nar_size: u64,
|
|
||||||
pub deriver: Option<StorePath>,
|
|
||||||
|
|
||||||
// Additional binary cache info.
|
|
||||||
pub url: Option<String>,
|
|
||||||
pub compression: Option<String>,
|
|
||||||
pub file_size: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PathInfo {
|
|
||||||
pub fn parse_nar_info(nar_info: &str, store_dir: &str) -> Result<Self, Error> {
|
|
||||||
let mut path = None;
|
|
||||||
let mut references = BTreeSet::new();
|
|
||||||
let mut nar_size = None;
|
|
||||||
let mut deriver = None;
|
|
||||||
let mut url = None;
|
|
||||||
let mut compression = None;
|
|
||||||
let mut file_size = None;
|
|
||||||
|
|
||||||
for line in nar_info.lines() {
|
|
||||||
let colon = line.find(':').ok_or(Error::BadNarInfo)?;
|
|
||||||
|
|
||||||
let (name, value) = line.split_at(colon);
|
|
||||||
|
|
||||||
if !value.starts_with(": ") {
|
|
||||||
return Err(Error::BadNarInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
let value = &value[2..];
|
|
||||||
|
|
||||||
if name == "StorePath" {
|
|
||||||
path = Some(StorePath::new(std::path::Path::new(value), store_dir)?);
|
|
||||||
} else if name == "NarSize" {
|
|
||||||
nar_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
|
|
||||||
} else if name == "References" {
|
|
||||||
if !value.is_empty() {
|
|
||||||
for r in value.split(' ') {
|
|
||||||
references.insert(StorePath::new_from_base_name(r)?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if name == "Deriver" {
|
|
||||||
deriver = Some(StorePath::new_from_base_name(value)?);
|
|
||||||
} else if name == "URL" {
|
|
||||||
url = Some(value.into());
|
|
||||||
} else if name == "Compression" {
|
|
||||||
compression = Some(value.into());
|
|
||||||
} else if name == "FileSize" {
|
|
||||||
file_size = Some(u64::from_str_radix(value, 10).map_err(|_| Error::BadNarInfo)?);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(PathInfo {
|
|
||||||
path: path.ok_or(Error::BadNarInfo)?,
|
|
||||||
references,
|
|
||||||
nar_size: nar_size.ok_or(Error::BadNarInfo)?,
|
|
||||||
deriver,
|
|
||||||
url: Some(url.ok_or(Error::BadNarInfo)?),
|
|
||||||
compression,
|
|
||||||
file_size,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
use super::{PathInfo, StorePath};
|
|
||||||
use crate::Error;
|
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub trait Store: Send + Sync {
|
|
||||||
fn store_dir(&self) -> &str {
|
|
||||||
"/nix/store"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn query_path_info(
|
|
||||||
&self,
|
|
||||||
store_path: &StorePath,
|
|
||||||
) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<PathInfo, Error>> + Send>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl dyn Store {
|
|
||||||
pub fn parse_store_path(&self, path: &Path) -> Result<StorePath, Error> {
|
|
||||||
StorePath::new(path, self.store_dir())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn compute_path_closure(
|
|
||||||
&self,
|
|
||||||
roots: BTreeSet<StorePath>,
|
|
||||||
) -> Result<BTreeMap<StorePath, PathInfo>, Error> {
|
|
||||||
let mut done = BTreeSet::new();
|
|
||||||
let mut result = BTreeMap::new();
|
|
||||||
let mut pending = vec![];
|
|
||||||
|
|
||||||
for root in roots {
|
|
||||||
pending.push(self.query_path_info(&root));
|
|
||||||
done.insert(root);
|
|
||||||
}
|
|
||||||
|
|
||||||
while !pending.is_empty() {
|
|
||||||
let (info, _, remaining) = futures::future::select_all(pending).await;
|
|
||||||
pending = remaining;
|
|
||||||
|
|
||||||
let info = info?;
|
|
||||||
|
|
||||||
for path in &info.references {
|
|
||||||
if !done.contains(path) {
|
|
||||||
pending.push(self.query_path_info(&path));
|
|
||||||
done.insert(path.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.insert(info.path.clone(), info);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,160 +0,0 @@
|
||||||
use crate::error::Error;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
pub fn encoded_len(input_len: usize) -> usize {
|
|
||||||
if input_len == 0 {
|
|
||||||
0
|
|
||||||
} else {
|
|
||||||
(input_len * 8 - 1) / 5 + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decoded_len(input_len: usize) -> usize {
|
|
||||||
input_len * 5 / 8
|
|
||||||
}
|
|
||||||
|
|
||||||
static BASE32_CHARS: &[u8; 32] = &b"0123456789abcdfghijklmnpqrsvwxyz";
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref BASE32_CHARS_REVERSE: Box<[u8; 256]> = {
|
|
||||||
let mut xs = [0xffu8; 256];
|
|
||||||
for (n, c) in BASE32_CHARS.iter().enumerate() {
|
|
||||||
xs[*c as usize] = n as u8;
|
|
||||||
}
|
|
||||||
Box::new(xs)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encode(input: &[u8]) -> String {
|
|
||||||
let mut buf = vec![0; encoded_len(input.len())];
|
|
||||||
encode_into(input, &mut buf);
|
|
||||||
std::str::from_utf8(&buf).unwrap().to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn encode_into(input: &[u8], output: &mut [u8]) {
|
|
||||||
let len = encoded_len(input.len());
|
|
||||||
assert_eq!(len, output.len());
|
|
||||||
|
|
||||||
let mut nr_bits_left: usize = 0;
|
|
||||||
let mut bits_left: u16 = 0;
|
|
||||||
let mut pos = len;
|
|
||||||
|
|
||||||
for b in input {
|
|
||||||
bits_left |= (*b as u16) << nr_bits_left;
|
|
||||||
nr_bits_left += 8;
|
|
||||||
while nr_bits_left > 5 {
|
|
||||||
output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
|
|
||||||
pos -= 1;
|
|
||||||
bits_left >>= 5;
|
|
||||||
nr_bits_left -= 5;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if nr_bits_left > 0 {
|
|
||||||
output[pos - 1] = BASE32_CHARS[(bits_left & 0x1f) as usize];
|
|
||||||
pos -= 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(pos, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn decode(input: &str) -> Result<Vec<u8>, crate::Error> {
|
|
||||||
let mut res = Vec::with_capacity(decoded_len(input.len()));
|
|
||||||
|
|
||||||
let mut nr_bits_left: usize = 0;
|
|
||||||
let mut bits_left: u16 = 0;
|
|
||||||
|
|
||||||
for c in input.chars().rev() {
|
|
||||||
let b = BASE32_CHARS_REVERSE[c as usize];
|
|
||||||
if b == 0xff {
|
|
||||||
return Err(Error::BadBase32);
|
|
||||||
}
|
|
||||||
bits_left |= (b as u16) << nr_bits_left;
|
|
||||||
nr_bits_left += 5;
|
|
||||||
if nr_bits_left >= 8 {
|
|
||||||
res.push((bits_left & 0xff) as u8);
|
|
||||||
bits_left >>= 8;
|
|
||||||
nr_bits_left -= 8;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if nr_bits_left > 0 && bits_left != 0 {
|
|
||||||
return Err(Error::BadBase32);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use assert_matches::assert_matches;
|
|
||||||
use hex;
|
|
||||||
use proptest::proptest;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encode() {
|
|
||||||
assert_eq!(encode(&[]), "");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(&hex::decode("0839703786356bca59b0f4a32987eb2e6de43ae8").unwrap()),
|
|
||||||
"x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(
|
|
||||||
&hex::decode("ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
|
|
||||||
.unwrap()
|
|
||||||
),
|
|
||||||
"1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
encode(
|
|
||||||
&hex::decode("ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
|
|
||||||
.unwrap()
|
|
||||||
),
|
|
||||||
"2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_decode() {
|
|
||||||
assert_eq!(hex::encode(decode("").unwrap()), "");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("x0xf8v9fxf3jk8zln1cwlsrmhqvp0f88").unwrap()),
|
|
||||||
"0839703786356bca59b0f4a32987eb2e6de43ae8"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s").unwrap()),
|
|
||||||
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(decode("2gs8k559z4rlahfx0y688s49m2vvszylcikrfinm30ly9rak69236nkam5ydvly1ai7xac99vxfc4ii84hawjbk876blyk1jfhkbbyx").unwrap()),
|
|
||||||
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_matches!(
|
|
||||||
decode("xoxf8v9fxf3jk8zln1cwlsrmhqvp0f88"),
|
|
||||||
Err(Error::BadBase32)
|
|
||||||
);
|
|
||||||
assert_matches!(
|
|
||||||
decode("2b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"),
|
|
||||||
Err(Error::BadBase32)
|
|
||||||
);
|
|
||||||
assert_matches!(decode("2"), Err(Error::BadBase32));
|
|
||||||
assert_matches!(decode("2gs"), Err(Error::BadBase32));
|
|
||||||
assert_matches!(decode("2gs8"), Err(Error::BadBase32));
|
|
||||||
}
|
|
||||||
|
|
||||||
proptest! {
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn roundtrip(s: Vec<u8>) {
|
|
||||||
assert_eq!(s, decode(&encode(&s)).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1 +0,0 @@
|
||||||
pub mod base32;
|
|
|
@ -41,7 +41,7 @@ perlarchname=$($perl -e 'use Config; print $Config{archname};')
|
||||||
AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname])
|
AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname])
|
||||||
AC_MSG_RESULT($perllibdir)
|
AC_MSG_RESULT($perllibdir)
|
||||||
|
|
||||||
# Look for libsodium, an optional dependency.
|
# Look for libsodium.
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Check for the required Perl dependencies (DBI and DBD::SQLite).
|
# Check for the required Perl dependencies (DBI and DBD::SQLite).
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package Nix::Config;
|
package Nix::Config;
|
||||||
|
|
||||||
use MIME::Base64;
|
use MIME::Base64;
|
||||||
|
use Nix::Store;
|
||||||
|
|
||||||
$version = "@PACKAGE_VERSION@";
|
$version = "@PACKAGE_VERSION@";
|
||||||
|
|
||||||
|
|
|
@ -240,7 +240,7 @@ SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
auto h = Hash::parseAny(s, parseHashType(algo));
|
||||||
string s = h.to_string(toBase32 ? Base32 : Base16, false);
|
auto s = h.to_string(toBase32 ? Base32 : Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
|
33
scripts/check-hydra-status.sh
Normal file
33
scripts/check-hydra-status.sh
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
# set -x
|
||||||
|
|
||||||
|
|
||||||
|
# mapfile BUILDS_FOR_LATEST_EVAL < <(
|
||||||
|
# curl -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master/evals | \
|
||||||
|
# jq -r '.evals[0].builds[] | @sh')
|
||||||
|
BUILDS_FOR_LATEST_EVAL=$(
|
||||||
|
curl -sS -H 'Accept: application/json' https://hydra.nixos.org/jobset/nix/master/evals | \
|
||||||
|
jq -r '.evals[0].builds[]')
|
||||||
|
|
||||||
|
someBuildFailed=0
|
||||||
|
|
||||||
|
for buildId in $BUILDS_FOR_LATEST_EVAL; do
|
||||||
|
buildInfo=$(curl -sS -H 'Accept: application/json' "https://hydra.nixos.org/build/$buildId")
|
||||||
|
|
||||||
|
finished=$(echo "$buildInfo" | jq -r '.finished')
|
||||||
|
|
||||||
|
if [[ $finished = 0 ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
buildStatus=$(echo "$buildInfo" | jq -r '.buildstatus')
|
||||||
|
|
||||||
|
if [[ $buildStatus != 0 ]]; then
|
||||||
|
someBuildFailed=1
|
||||||
|
echo "Job “$(echo "$buildInfo" | jq -r '.job')” failed on hydra: $buildInfo"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
exit "$someBuildFailed"
|
|
@ -246,7 +246,8 @@ get_volume_pass() {
|
||||||
verify_volume_pass() {
|
verify_volume_pass() {
|
||||||
local volume_special="$1" # (i.e., disk1s7)
|
local volume_special="$1" # (i.e., disk1s7)
|
||||||
local volume_uuid="$2"
|
local volume_uuid="$2"
|
||||||
/usr/sbin/diskutil apfs unlockVolume "$volume_special" -verify -stdinpassphrase -user "$volume_uuid"
|
_sudo "to confirm the password actually unlocks the volume" \
|
||||||
|
/usr/sbin/diskutil apfs unlockVolume "$volume_special" -verify -stdinpassphrase -user "$volume_uuid"
|
||||||
}
|
}
|
||||||
|
|
||||||
volume_pass_works() {
|
volume_pass_works() {
|
||||||
|
@ -440,7 +441,22 @@ add_nix_vol_fstab_line() {
|
||||||
# shellcheck disable=SC1003,SC2026
|
# shellcheck disable=SC1003,SC2026
|
||||||
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
local escaped_mountpoint="${NIX_ROOT/ /'\\\'040}"
|
||||||
shift
|
shift
|
||||||
EDITOR="/usr/bin/ex" _sudo "to add nix to fstab" "$@" <<EOF
|
|
||||||
|
# wrap `ex` to work around a problem with vim plugins breaking exit codes;
|
||||||
|
# (see https://github.com/NixOS/nix/issues/5468)
|
||||||
|
# we'd prefer EDITOR="/usr/bin/ex --noplugin" but vifs doesn't word-split
|
||||||
|
# the EDITOR env.
|
||||||
|
#
|
||||||
|
# TODO: at some point we should switch to `--clean`, but it wasn't added
|
||||||
|
# until https://github.com/vim/vim/releases/tag/v8.0.1554 while the macOS
|
||||||
|
# minver 10.12.6 seems to have released with vim 7.4
|
||||||
|
cat > "$SCRATCH/ex_cleanroom_wrapper" <<EOF
|
||||||
|
#!/bin/sh
|
||||||
|
/usr/bin/ex --noplugin "\$@"
|
||||||
|
EOF
|
||||||
|
chmod 755 "$SCRATCH/ex_cleanroom_wrapper"
|
||||||
|
|
||||||
|
EDITOR="$SCRATCH/ex_cleanroom_wrapper" _sudo "to add nix to fstab" "$@" <<EOF
|
||||||
:a
|
:a
|
||||||
UUID=$uuid $escaped_mountpoint apfs rw,noauto,nobrowse,suid,owners
|
UUID=$uuid $escaped_mountpoint apfs rw,noauto,nobrowse,suid,owners
|
||||||
.
|
.
|
||||||
|
@ -631,7 +647,7 @@ EOF
|
||||||
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
# technically /etc/synthetic.d/nix is supported in Big Sur+
|
||||||
# but handling both takes even more code...
|
# but handling both takes even more code...
|
||||||
_sudo "to add Nix to /etc/synthetic.conf" \
|
_sudo "to add Nix to /etc/synthetic.conf" \
|
||||||
/usr/bin/ex /etc/synthetic.conf <<EOF
|
/usr/bin/ex --noplugin /etc/synthetic.conf <<EOF
|
||||||
:a
|
:a
|
||||||
${NIX_ROOT:1}
|
${NIX_ROOT:1}
|
||||||
.
|
.
|
||||||
|
@ -670,22 +686,27 @@ encrypt_volume() {
|
||||||
local volume_uuid="$1"
|
local volume_uuid="$1"
|
||||||
local volume_label="$2"
|
local volume_label="$2"
|
||||||
local password
|
local password
|
||||||
|
|
||||||
|
task "Encrypt the Nix volume" >&2
|
||||||
|
|
||||||
# Note: mount/unmount are late additions to support the right order
|
# Note: mount/unmount are late additions to support the right order
|
||||||
# of operations for creating the volume and then baking its uuid into
|
# of operations for creating the volume and then baking its uuid into
|
||||||
# other artifacts; not as well-trod wrt to potential errors, race
|
# other artifacts; not as well-trod wrt to potential errors, race
|
||||||
# conditions, etc.
|
# conditions, etc.
|
||||||
|
|
||||||
/usr/sbin/diskutil mount "$volume_label"
|
_sudo "to mount your Nix volume for encrypting" \
|
||||||
|
/usr/sbin/diskutil mount "$volume_label"
|
||||||
|
|
||||||
password="$(/usr/bin/xxd -l 32 -p -c 256 /dev/random)"
|
password="$(/usr/bin/xxd -l 32 -p -c 256 /dev/random)"
|
||||||
_sudo "to add your Nix volume's password to Keychain" \
|
_sudo "to add your Nix volume's password to Keychain" \
|
||||||
/usr/bin/security -i <<EOF
|
/usr/bin/security -i <<EOF
|
||||||
add-generic-password -a "$volume_label" -s "$volume_uuid" -l "$volume_label encryption password" -D "Encrypted volume password" -j "Added automatically by the Nix installer for use by $NIX_VOLUME_MOUNTD_DEST" -w "$password" -T /System/Library/CoreServices/APFSUserAgent -T /System/Library/CoreServices/CSUserAgent -T /usr/bin/security "/Library/Keychains/System.keychain"
|
add-generic-password -a "$volume_label" -s "$volume_uuid" -l "$volume_label encryption password" -D "Encrypted volume password" -j "Added automatically by the Nix installer for use by $NIX_VOLUME_MOUNTD_DEST" -w "$password" -T /System/Library/CoreServices/APFSUserAgent -T /System/Library/CoreServices/CSUserAgent -T /usr/bin/security "/Library/Keychains/System.keychain"
|
||||||
EOF
|
EOF
|
||||||
builtin printf "%s" "$password" | _sudo "to encrypt your Nix volume" \
|
builtin printf "%s" "$password" | _sudo "to actually encrypt your Nix volume" \
|
||||||
/usr/sbin/diskutil apfs encryptVolume "$volume_label" -user disk -stdinpassphrase
|
/usr/sbin/diskutil apfs encryptVolume "$volume_label" -user disk -stdinpassphrase
|
||||||
|
|
||||||
/usr/sbin/diskutil unmount force "$volume_label"
|
_sudo "to unmount the encrypted volume" \
|
||||||
|
/usr/sbin/diskutil unmount force "$volume_label"
|
||||||
}
|
}
|
||||||
|
|
||||||
create_volume() {
|
create_volume() {
|
||||||
|
@ -742,6 +763,9 @@ setup_volume() {
|
||||||
|
|
||||||
use_special="${NIX_VOLUME_USE_SPECIAL:-$(create_volume)}"
|
use_special="${NIX_VOLUME_USE_SPECIAL:-$(create_volume)}"
|
||||||
|
|
||||||
|
_sudo "to ensure the Nix volume is not mounted" \
|
||||||
|
/usr/sbin/diskutil unmount force "$use_special" || true # might not be mounted
|
||||||
|
|
||||||
use_uuid=${NIX_VOLUME_USE_UUID:-$(volume_uuid_from_special "$use_special")}
|
use_uuid=${NIX_VOLUME_USE_UUID:-$(volume_uuid_from_special "$use_special")}
|
||||||
|
|
||||||
setup_fstab "$use_uuid"
|
setup_fstab "$use_uuid"
|
||||||
|
@ -791,7 +815,7 @@ setup_volume_daemon() {
|
||||||
local volume_uuid="$2"
|
local volume_uuid="$2"
|
||||||
if ! test_voldaemon; then
|
if ! test_voldaemon; then
|
||||||
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
task "Configuring LaunchDaemon to mount '$NIX_VOLUME_LABEL'" >&2
|
||||||
_sudo "to install the Nix volume mounter" /usr/bin/ex "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
_sudo "to install the Nix volume mounter" /usr/bin/ex --noplugin "$NIX_VOLUME_MOUNTD_DEST" <<EOF
|
||||||
:a
|
:a
|
||||||
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
$(generate_mount_daemon "$cmd_type" "$volume_uuid")
|
||||||
.
|
.
|
||||||
|
|
|
@ -13,11 +13,22 @@ NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
|
||||||
read_only_root() {
|
read_only_root() {
|
||||||
# this touch command ~should~ always produce an error
|
# this touch command ~should~ always produce an error
|
||||||
# as of this change I confirmed /usr/bin/touch emits:
|
# as of this change I confirmed /usr/bin/touch emits:
|
||||||
|
# "touch: /: Operation not permitted" Monterey
|
||||||
# "touch: /: Read-only file system" Catalina+ and Big Sur
|
# "touch: /: Read-only file system" Catalina+ and Big Sur
|
||||||
# "touch: /: Permission denied" Mojave
|
# "touch: /: Permission denied" Mojave
|
||||||
# (not matching prefix for compat w/ coreutils touch in case using
|
# (not matching prefix for compat w/ coreutils touch in case using
|
||||||
# an explicit path causes problems; its prefix differs)
|
# an explicit path causes problems; its prefix differs)
|
||||||
[[ "$(/usr/bin/touch / 2>&1)" = *"Read-only file system" ]]
|
case "$(/usr/bin/touch / 2>&1)" in
|
||||||
|
*"Read-only file system") # Catalina, Big Sur
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*"Operation not permitted") # Monterey
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
return 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
# Avoiding the slow semantic way to get this information (~330ms vs ~8ms)
|
# Avoiding the slow semantic way to get this information (~330ms vs ~8ms)
|
||||||
# unless using touch causes problems. Just in case, that approach is:
|
# unless using touch causes problems. Just in case, that approach is:
|
||||||
|
@ -67,7 +78,7 @@ poly_service_installed_check() {
|
||||||
poly_service_uninstall_directions() {
|
poly_service_uninstall_directions() {
|
||||||
echo "$1. Remove macOS-specific components:"
|
echo "$1. Remove macOS-specific components:"
|
||||||
if should_create_volume && test_nix_volume_mountd_installed; then
|
if should_create_volume && test_nix_volume_mountd_installed; then
|
||||||
darwin_volume_uninstall_directions
|
nix_volume_mountd_uninstall_directions
|
||||||
fi
|
fi
|
||||||
if test_nix_daemon_installed; then
|
if test_nix_daemon_installed; then
|
||||||
nix_daemon_uninstall_directions
|
nix_daemon_uninstall_directions
|
||||||
|
@ -207,7 +218,7 @@ EOF
|
||||||
setup_darwin_volume
|
setup_darwin_volume
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$(diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then
|
if [ "$(/usr/sbin/diskutil info -plist /nix | xmllint --xpath "(/plist/dict/key[text()='GlobalPermissionsEnabled'])/following-sibling::*[1]" -)" = "<false/>" ]; then
|
||||||
failure "This script needs a /nix volume with global permissions! This may require running sudo diskutil enableOwnership /nix."
|
failure "This script needs a /nix volume with global permissions! This may require running sudo /usr/sbin/diskutil enableOwnership /nix."
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
|
@ -377,6 +377,11 @@ cure_artifacts() {
|
||||||
}
|
}
|
||||||
|
|
||||||
validate_starting_assumptions() {
|
validate_starting_assumptions() {
|
||||||
|
task "Checking for artifacts of previous installs"
|
||||||
|
cat <<EOF
|
||||||
|
Before I try to install, I'll check for signs Nix already is or has
|
||||||
|
been installed on this system.
|
||||||
|
EOF
|
||||||
if type nix-env 2> /dev/null >&2; then
|
if type nix-env 2> /dev/null >&2; then
|
||||||
warning <<EOF
|
warning <<EOF
|
||||||
Nix already appears to be installed. This installer may run into issues.
|
Nix already appears to be installed. This installer may run into issues.
|
||||||
|
@ -386,20 +391,34 @@ $(uninstall_directions)
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# TODO: I think it would be good for this step to accumulate more
|
||||||
|
# knowledge of older obsolete artifacts, if there are any.
|
||||||
|
# We could issue a "reminder" here that the user might want
|
||||||
|
# to clean them up?
|
||||||
|
|
||||||
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
||||||
|
# TODO: I think it would be good to accumulate a list of all
|
||||||
|
# of the copies so that people don't hit this 2 or 3x in
|
||||||
|
# a row for different files.
|
||||||
if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then
|
if [ -e "$profile_target$PROFILE_BACKUP_SUFFIX" ]; then
|
||||||
|
# this backup process first released in Nix 2.1
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
When this script runs, it backs up the current $profile_target to
|
I back up shell profile/rc scripts before I add Nix to them.
|
||||||
$profile_target$PROFILE_BACKUP_SUFFIX. This backup file already exists, though.
|
I need to back up $profile_target to $profile_target$PROFILE_BACKUP_SUFFIX,
|
||||||
|
but the latter already exists.
|
||||||
|
|
||||||
Please follow these instructions to clean up the old backup file:
|
Here's how to clean up the old backup file:
|
||||||
|
|
||||||
1. Copy $profile_target and $profile_target$PROFILE_BACKUP_SUFFIX to another place, just
|
1. Back up (copy) $profile_target and $profile_target$PROFILE_BACKUP_SUFFIX
|
||||||
in case.
|
to another location, just in case.
|
||||||
|
|
||||||
2. Take care to make sure that $profile_target$PROFILE_BACKUP_SUFFIX doesn't look like
|
2. Ensure $profile_target$PROFILE_BACKUP_SUFFIX does not have anything
|
||||||
it has anything nix-related in it. If it does, something is probably
|
Nix-related in it. If it does, something is probably quite
|
||||||
quite wrong. Please open an issue or get in touch immediately.
|
wrong. Please open an issue or get in touch immediately.
|
||||||
|
|
||||||
|
3. Once you confirm $profile_target is backed up and
|
||||||
|
$profile_target$PROFILE_BACKUP_SUFFIX doesn't mention Nix, run:
|
||||||
|
mv $profile_target$PROFILE_BACKUP_SUFFIX $profile_target
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
@ -557,21 +576,40 @@ create_directories() {
|
||||||
# since this bit is cross-platform:
|
# since this bit is cross-platform:
|
||||||
# - first try with `command -vp` to try and find
|
# - first try with `command -vp` to try and find
|
||||||
# chown in the usual places
|
# chown in the usual places
|
||||||
|
# * to work around some sort of deficiency in
|
||||||
|
# `command -p` in macOS bash 3.2, we also add
|
||||||
|
# PATH="$(getconf PATH 2>/dev/null)". As long as
|
||||||
|
# getconf is found, this should set a sane PATH
|
||||||
|
# which `command -p` in bash 3.2 appears to use.
|
||||||
|
# A bash with a properly-working `command -p`
|
||||||
|
# should ignore this hard-set PATH in favor of
|
||||||
|
# whatever it obtains internally. See
|
||||||
|
# github.com/NixOS/nix/issues/5768
|
||||||
# - fall back on `command -v` which would find
|
# - fall back on `command -v` which would find
|
||||||
# any chown on path
|
# any chown on path
|
||||||
# if we don't find one, the command is already
|
# if we don't find one, the command is already
|
||||||
# hiding behind || true, and the general state
|
# hiding behind || true, and the general state
|
||||||
# should be one the user can repair once they
|
# should be one the user can repair once they
|
||||||
# figure out where chown is...
|
# figure out where chown is...
|
||||||
local get_chr_own="$(command -vp chown)"
|
local get_chr_own="$(PATH="$(getconf PATH 2>/dev/null)" command -vp chown)"
|
||||||
if [[ -z "$get_chr_own" ]]; then
|
if [[ -z "$get_chr_own" ]]; then
|
||||||
get_chr_own="$(command -v chown)"
|
get_chr_own="$(command -v chown)"
|
||||||
fi
|
fi
|
||||||
_sudo "to take root ownership of existing Nix store files" \
|
|
||||||
"$get_chr_own" -R "root:$NIX_BUILD_GROUP_NAME" "$NIX_ROOT" || true
|
if [[ -z "$get_chr_own" ]]; then
|
||||||
|
reminder <<EOF
|
||||||
|
I wanted to take root ownership of existing Nix store files,
|
||||||
|
but I couldn't locate 'chown'. (You may need to fix your PATH.)
|
||||||
|
To manually change file ownership, you can run:
|
||||||
|
sudo chown -R 'root:$NIX_BUILD_GROUP_NAME' '$NIX_ROOT'
|
||||||
|
EOF
|
||||||
|
else
|
||||||
|
_sudo "to take root ownership of existing Nix store files" \
|
||||||
|
"$get_chr_own" -R "root:$NIX_BUILD_GROUP_NAME" "$NIX_ROOT" || true
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
_sudo "to make the basic directory structure of Nix (part 1)" \
|
_sudo "to make the basic directory structure of Nix (part 1)" \
|
||||||
install -dv -m 0755 /nix /nix/var /nix/var/log /nix/var/log/nix /nix/var/log/nix/drvs /nix/var/nix{,/db,/gcroots,/profiles,/temproots,/userpool} /nix/var/nix/{gcroots,profiles}/per-user
|
install -dv -m 0755 /nix /nix/var /nix/var/log /nix/var/log/nix /nix/var/log/nix/drvs /nix/var/nix{,/db,/gcroots,/profiles,/temproots,/userpool,/daemon-socket} /nix/var/nix/{gcroots,profiles}/per-user
|
||||||
|
|
||||||
_sudo "to make the basic directory structure of Nix (part 2)" \
|
_sudo "to make the basic directory structure of Nix (part 2)" \
|
||||||
install -dv -g "$NIX_BUILD_GROUP_NAME" -m 1775 /nix/store
|
install -dv -g "$NIX_BUILD_GROUP_NAME" -m 1775 /nix/store
|
||||||
|
@ -599,7 +637,7 @@ manager. This will happen in a few stages:
|
||||||
1. Make sure your computer doesn't already have Nix. If it does, I
|
1. Make sure your computer doesn't already have Nix. If it does, I
|
||||||
will show you instructions on how to clean up your old install.
|
will show you instructions on how to clean up your old install.
|
||||||
|
|
||||||
2. Show you what we are going to install and where. Then we will ask
|
2. Show you what I am going to install and where. Then I will ask
|
||||||
if you are ready to continue.
|
if you are ready to continue.
|
||||||
|
|
||||||
3. Create the system users and groups that the Nix daemon uses to run
|
3. Create the system users and groups that the Nix daemon uses to run
|
||||||
|
@ -614,14 +652,14 @@ manager. This will happen in a few stages:
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
if ui_confirm "Would you like to see a more detailed list of what we will do?"; then
|
if ui_confirm "Would you like to see a more detailed list of what I will do?"; then
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
|
||||||
We will:
|
I will:
|
||||||
|
|
||||||
- make sure your computer doesn't already have Nix files
|
- make sure your computer doesn't already have Nix files
|
||||||
(if it does, I will tell you how to clean them up.)
|
(if it does, I will tell you how to clean them up.)
|
||||||
- create local users (see the list above for the users we'll make)
|
- create local users (see the list above for the users I'll make)
|
||||||
- create a local group ($NIX_BUILD_GROUP_NAME)
|
- create a local group ($NIX_BUILD_GROUP_NAME)
|
||||||
- install Nix in to $NIX_ROOT
|
- install Nix in to $NIX_ROOT
|
||||||
- create a configuration file in /etc/nix
|
- create a configuration file in /etc/nix
|
||||||
|
@ -656,7 +694,7 @@ run in a headless fashion, like this:
|
||||||
|
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
|
|
||||||
or maybe in a CI pipeline. Because of that, we're going to skip the
|
or maybe in a CI pipeline. Because of that, I'm going to skip the
|
||||||
verbose output in the interest of brevity.
|
verbose output in the interest of brevity.
|
||||||
|
|
||||||
If you would like to
|
If you would like to
|
||||||
|
@ -670,7 +708,7 @@ EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
This script is going to call sudo a lot. Every time we do, it'll
|
This script is going to call sudo a lot. Every time I do, it'll
|
||||||
output exactly what it'll do, and why.
|
output exactly what it'll do, and why.
|
||||||
|
|
||||||
Just like this:
|
Just like this:
|
||||||
|
@ -682,15 +720,15 @@ EOF
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
|
||||||
This might look scary, but everything can be undone by running just a
|
This might look scary, but everything can be undone by running just a
|
||||||
few commands. We used to ask you to confirm each time sudo ran, but it
|
few commands. I used to ask you to confirm each time sudo ran, but it
|
||||||
was too many times. Instead, I'll just ask you this one time:
|
was too many times. Instead, I'll just ask you this one time:
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
if ui_confirm "Can we use sudo?"; then
|
if ui_confirm "Can I use sudo?"; then
|
||||||
ok "Yay! Thanks! Let's get going!"
|
ok "Yay! Thanks! Let's get going!"
|
||||||
else
|
else
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
That is okay, but we can't install.
|
That is okay, but I can't install.
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
@ -809,10 +847,10 @@ main() {
|
||||||
# can fail faster in this case. Sourcing install-darwin... now runs
|
# can fail faster in this case. Sourcing install-darwin... now runs
|
||||||
# `touch /` to detect Read-only root, but it could update times on
|
# `touch /` to detect Read-only root, but it could update times on
|
||||||
# pre-Catalina macOS if run as root user.
|
# pre-Catalina macOS if run as root user.
|
||||||
if [ $EUID -eq 0 ]; then
|
if [ "$EUID" -eq 0 ]; then
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
Please do not run this script with root privileges. We will call sudo
|
Please do not run this script with root privileges. I will call sudo
|
||||||
when we need to.
|
when I need to.
|
||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ fi
|
||||||
|
|
||||||
# Determine if we could use the multi-user installer or not
|
# Determine if we could use the multi-user installer or not
|
||||||
if [ "$(uname -s)" = "Linux" ]; then
|
if [ "$(uname -s)" = "Linux" ]; then
|
||||||
echo "Note: a multi-user installation is possible. See https://nixos.org/nix/manual/#sect-multi-user-installation" >&2
|
echo "Note: a multi-user installation is possible. See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation" >&2
|
||||||
fi
|
fi
|
||||||
|
|
||||||
case "$(uname -s)" in
|
case "$(uname -s)" in
|
||||||
|
@ -98,7 +98,7 @@ while [ $# -gt 0 ]; do
|
||||||
echo " providing multi-user support and better isolation for local builds."
|
echo " providing multi-user support and better isolation for local builds."
|
||||||
echo " Both for security and reproducibility, this method is recommended if"
|
echo " Both for security and reproducibility, this method is recommended if"
|
||||||
echo " supported on your platform."
|
echo " supported on your platform."
|
||||||
echo " See https://nixos.org/nix/manual/#sect-multi-user-installation"
|
echo " See https://nixos.org/manual/nix/stable/installation/installing-binary.html#multi-user-installation"
|
||||||
echo ""
|
echo ""
|
||||||
echo " --no-daemon: Simple, single-user installation that does not require root and is"
|
echo " --no-daemon: Simple, single-user installation that does not require root and is"
|
||||||
echo " trivial to uninstall."
|
echo " trivial to uninstall."
|
||||||
|
@ -134,7 +134,7 @@ fi
|
||||||
|
|
||||||
echo "performing a single-user installation of Nix..." >&2
|
echo "performing a single-user installation of Nix..." >&2
|
||||||
|
|
||||||
if ! [ -e $dest ]; then
|
if ! [ -e "$dest" ]; then
|
||||||
cmd="mkdir -m 0755 $dest && chown $USER $dest"
|
cmd="mkdir -m 0755 $dest && chown $USER $dest"
|
||||||
echo "directory $dest does not exist; creating it by running '$cmd' using sudo" >&2
|
echo "directory $dest does not exist; creating it by running '$cmd' using sudo" >&2
|
||||||
if ! sudo sh -c "$cmd"; then
|
if ! sudo sh -c "$cmd"; then
|
||||||
|
@ -143,12 +143,12 @@ if ! [ -e $dest ]; then
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! [ -w $dest ]; then
|
if ! [ -w "$dest" ]; then
|
||||||
echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nixos.org/nix/manual/#ssec-multi-user. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2
|
echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see https://nixos.org/manual/nix/stable/installation/multi-user.html. If you wish to continue with a single-user install for $USER please run 'chown -R $USER $dest' as root." >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p $dest/store
|
mkdir -p "$dest/store"
|
||||||
|
|
||||||
printf "copying Nix to %s..." "${dest}/store" >&2
|
printf "copying Nix to %s..." "${dest}/store" >&2
|
||||||
# Insert a newline if no progress is shown.
|
# Insert a newline if no progress is shown.
|
||||||
|
@ -189,17 +189,17 @@ fi
|
||||||
|
|
||||||
# Install an SSL certificate bundle.
|
# Install an SSL certificate bundle.
|
||||||
if [ -z "$NIX_SSL_CERT_FILE" ] || ! [ -f "$NIX_SSL_CERT_FILE" ]; then
|
if [ -z "$NIX_SSL_CERT_FILE" ] || ! [ -f "$NIX_SSL_CERT_FILE" ]; then
|
||||||
$nix/bin/nix-env -i "$cacert"
|
"$nix/bin/nix-env" -i "$cacert"
|
||||||
export NIX_SSL_CERT_FILE="$HOME/.nix-profile/etc/ssl/certs/ca-bundle.crt"
|
export NIX_SSL_CERT_FILE="$HOME/.nix-profile/etc/ssl/certs/ca-bundle.crt"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Subscribe the user to the Nixpkgs channel and fetch it.
|
# Subscribe the user to the Nixpkgs channel and fetch it.
|
||||||
if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
|
if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
|
||||||
if ! $nix/bin/nix-channel --list | grep -q "^nixpkgs "; then
|
if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then
|
||||||
$nix/bin/nix-channel --add https://nixos.org/channels/nixpkgs-unstable
|
"$nix/bin/nix-channel" --add https://nixos.org/channels/nixpkgs-unstable
|
||||||
fi
|
fi
|
||||||
if [ -z "$_NIX_INSTALLER_TEST" ]; then
|
if [ -z "$_NIX_INSTALLER_TEST" ]; then
|
||||||
if ! $nix/bin/nix-channel --update nixpkgs; then
|
if ! "$nix/bin/nix-channel" --update nixpkgs; then
|
||||||
echo "Fetching the nixpkgs channel failed. (Are you offline?)"
|
echo "Fetching the nixpkgs channel failed. (Are you offline?)"
|
||||||
echo "To try again later, run \"nix-channel --update nixpkgs\"."
|
echo "To try again later, run \"nix-channel --update nixpkgs\"."
|
||||||
fi
|
fi
|
||||||
|
@ -215,7 +215,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
break
|
break
|
||||||
|
@ -226,7 +226,7 @@ if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
echo -e "\nif [ -e $p ]; then . $p; fi # added by Nix installer" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
break
|
break
|
||||||
|
|
|
@ -15,7 +15,7 @@ readonly SERVICE_OVERRIDE=${SERVICE_DEST}.d/override.conf
|
||||||
|
|
||||||
create_systemd_override() {
|
create_systemd_override() {
|
||||||
header "Configuring proxy for the nix-daemon service"
|
header "Configuring proxy for the nix-daemon service"
|
||||||
_sudo "create directory for systemd unit override" mkdir -p "$(dirname $SERVICE_OVERRIDE)"
|
_sudo "create directory for systemd unit override" mkdir -p "$(dirname "$SERVICE_OVERRIDE")"
|
||||||
cat <<EOF | _sudo "create systemd unit override" tee "$SERVICE_OVERRIDE"
|
cat <<EOF | _sudo "create systemd unit override" tee "$SERVICE_OVERRIDE"
|
||||||
[Service]
|
[Service]
|
||||||
$1
|
$1
|
||||||
|
|
|
@ -81,10 +81,10 @@ if [ "$(uname -s)" != "Darwin" ]; then
|
||||||
require_util xz "unpack the binary tarball"
|
require_util xz "unpack the binary tarball"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if command -v wget > /dev/null 2>&1; then
|
if command -v curl > /dev/null 2>&1; then
|
||||||
fetch() { wget "$1" -O "$2"; }
|
|
||||||
elif command -v curl > /dev/null 2>&1; then
|
|
||||||
fetch() { curl -L "$1" -o "$2"; }
|
fetch() { curl -L "$1" -o "$2"; }
|
||||||
|
elif command -v wget > /dev/null 2>&1; then
|
||||||
|
fetch() { wget "$1" -O "$2"; }
|
||||||
else
|
else
|
||||||
oops "you don't have wget or curl installed, which I need to download the binary tarball"
|
oops "you don't have wget or curl installed, which I need to download the binary tarball"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
nix_noinst_scripts := \
|
nix_noinst_scripts := \
|
||||||
$(d)/nix-http-export.cgi \
|
$(d)/nix-profile.sh
|
||||||
$(d)/nix-profile.sh \
|
|
||||||
$(d)/nix-reduce-build
|
|
||||||
|
|
||||||
noinst-scripts += $(nix_noinst_scripts)
|
noinst-scripts += $(nix_noinst_scripts)
|
||||||
|
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
#! /bin/sh
|
|
||||||
|
|
||||||
export HOME=/tmp
|
|
||||||
export NIX_REMOTE=daemon
|
|
||||||
|
|
||||||
TMP_DIR="${TMP_DIR:-/tmp/nix-export}"
|
|
||||||
|
|
||||||
@coreutils@/mkdir -p "$TMP_DIR" || true
|
|
||||||
@coreutils@/chmod a+r "$TMP_DIR"
|
|
||||||
|
|
||||||
needed_path="?$QUERY_STRING"
|
|
||||||
needed_path="${needed_path#*[?&]needed_path=}"
|
|
||||||
needed_path="${needed_path%%&*}"
|
|
||||||
#needed_path="$(echo $needed_path | ./unhttp)"
|
|
||||||
needed_path="${needed_path//%2B/+}"
|
|
||||||
needed_path="${needed_path//%3D/=}"
|
|
||||||
|
|
||||||
echo needed_path: "$needed_path" >&2
|
|
||||||
|
|
||||||
NIX_STORE="${NIX_STORE_DIR:-/nix/store}"
|
|
||||||
|
|
||||||
echo NIX_STORE: "${NIX_STORE}" >&2
|
|
||||||
|
|
||||||
full_path="${NIX_STORE}"/"$needed_path"
|
|
||||||
|
|
||||||
if [ "$needed_path" != "${needed_path%.drv}" ]; then
|
|
||||||
echo "Status: 403 You should create the derivation file yourself"
|
|
||||||
echo "Content-Type: text/plain"
|
|
||||||
echo
|
|
||||||
echo "Refusing to disclose derivation contents"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
|
|
||||||
if @bindir@/nix-store --check-validity "$full_path"; then
|
|
||||||
if ! [ -e nix-export/"$needed_path".nar.gz ]; then
|
|
||||||
@bindir@/nix-store --export "$full_path" | @gzip@ > "$TMP_DIR"/"$needed_path".nar.gz
|
|
||||||
@coreutils@/ln -fs "$TMP_DIR"/"$needed_path".nar.gz nix-export/"$needed_path".nar.gz
|
|
||||||
fi;
|
|
||||||
echo "Status: 301 Moved"
|
|
||||||
echo "Location: nix-export/"$needed_path".nar.gz"
|
|
||||||
echo
|
|
||||||
else
|
|
||||||
echo "Status: 404 No such path found"
|
|
||||||
echo "Content-Type: text/plain"
|
|
||||||
echo
|
|
||||||
echo "Path not found:"
|
|
||||||
echo "$needed_path"
|
|
||||||
echo "checked:"
|
|
||||||
echo "$full_path"
|
|
||||||
fi
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ __ETC_PROFILE_NIX_SOURCED=1
|
||||||
export NIX_PROFILES="@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
export NIX_PROFILES="@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if [ ! -z "${NIX_SSL_CERT_FILE:-}" ]; then
|
if [ -n "${NIX_SSL_CERT_FILE:-}" ]; then
|
||||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
elif [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
|
elif [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
export NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||||
|
@ -18,14 +18,14 @@ elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS
|
||||||
else
|
else
|
||||||
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
||||||
check_nix_profiles() {
|
check_nix_profiles() {
|
||||||
if [ "$ZSH_VERSION" ]; then
|
if [ -n "$ZSH_VERSION" ]; then
|
||||||
# Zsh by default doesn't split words in unquoted parameter expansion.
|
# Zsh by default doesn't split words in unquoted parameter expansion.
|
||||||
# Set local_options for these options to be reverted at the end of the function
|
# Set local_options for these options to be reverted at the end of the function
|
||||||
# and shwordsplit to force splitting words in $NIX_PROFILES below.
|
# and shwordsplit to force splitting words in $NIX_PROFILES below.
|
||||||
setopt local_options shwordsplit
|
setopt local_options shwordsplit
|
||||||
fi
|
fi
|
||||||
for i in $NIX_PROFILES; do
|
for i in $NIX_PROFILES; do
|
||||||
if [ -e $i/etc/ssl/certs/ca-bundle.crt ]; then
|
if [ -e "$i/etc/ssl/certs/ca-bundle.crt" ]; then
|
||||||
export NIX_SSL_CERT_FILE=$i/etc/ssl/certs/ca-bundle.crt
|
export NIX_SSL_CERT_FILE=$i/etc/ssl/certs/ca-bundle.crt
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
|
@ -24,6 +24,9 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
||||||
export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
|
export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Only use MANPATH if it is already set. In general `man` will just simply
|
||||||
|
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
|
||||||
|
# which is in the $PATH. For more info, run `manpath -d`.
|
||||||
if [ -n "${MANPATH-}" ]; then
|
if [ -n "${MANPATH-}" ]; then
|
||||||
export MANPATH="$NIX_LINK/share/man:$MANPATH"
|
export MANPATH="$NIX_LINK/share/man:$MANPATH"
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -1,171 +0,0 @@
|
||||||
#! @bash@
|
|
||||||
|
|
||||||
WORKING_DIRECTORY=$(mktemp -d "${TMPDIR:-/tmp}"/nix-reduce-build-XXXXXX);
|
|
||||||
cd "$WORKING_DIRECTORY";
|
|
||||||
|
|
||||||
if test -z "$1" || test "a--help" = "a$1" ; then
|
|
||||||
echo 'nix-reduce-build (paths or Nix expressions) -- (package sources)' >&2
|
|
||||||
echo As in: >&2
|
|
||||||
echo nix-reduce-build /etc/nixos/nixos -- ssh://user@somewhere.nowhere.example.org >&2
|
|
||||||
echo nix-reduce-build /etc/nixos/nixos -- \\
|
|
||||||
echo " " \''http://somewhere.nowhere.example.org/nix/nix-http-export.cgi?needed_path='\' >&2
|
|
||||||
echo " store path name will be added into the end of the URL" >&2
|
|
||||||
echo nix-reduce-build /etc/nixos/nixos -- file://home/user/nar/ >&2
|
|
||||||
echo " that should be a directory where gzipped 'nix-store --export' ">&2
|
|
||||||
echo " files are located (they should have .nar.gz extension)" >&2
|
|
||||||
echo " Or all together: " >&2
|
|
||||||
echo -e nix-reduce-build /expr.nix /e2.nix -- \\\\\\\n\
|
|
||||||
" ssh://a@b.example.com http://n.example.com/get-nar?q= file://nar/" >&2
|
|
||||||
echo " Also supports best-effort local builds of failing expression set:" >&2
|
|
||||||
echo "nix-reduce-build /e.nix -- nix-daemon:// nix-self://" >&2
|
|
||||||
echo " nix-daemon:// builds using daemon"
|
|
||||||
echo " nix-self:// builds directly using nix-store from current installation" >&2
|
|
||||||
echo " nix-daemon-fixed:// and nix-self-fixed:// do the same, but only for" >&2;
|
|
||||||
echo "derivations with specified output hash (sha256, sha1 or md5)." >&2
|
|
||||||
echo " nix-daemon-substitute:// and nix-self-substitute:// try to substitute" >&2;
|
|
||||||
echo "maximum amount of paths" >&2;
|
|
||||||
echo " nix-daemon-build:// and nix-self-build:// try to build (not substitute)" >&2;
|
|
||||||
echo "maximum amount of paths" >&2;
|
|
||||||
echo " If no package sources are specified, required paths are listed." >&2;
|
|
||||||
exit;
|
|
||||||
fi;
|
|
||||||
|
|
||||||
while ! test "$1" = "--" || test "$1" = "" ; do
|
|
||||||
echo "$1" >> initial; >&2
|
|
||||||
shift;
|
|
||||||
done
|
|
||||||
shift;
|
|
||||||
echo Will work on $(cat initial | wc -l) targets. >&2
|
|
||||||
|
|
||||||
while read ; do
|
|
||||||
case "$REPLY" in
|
|
||||||
${NIX_STORE_DIR:-/nix/store}/*)
|
|
||||||
echo "$REPLY" >> paths; >&2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
(
|
|
||||||
IFS=: ;
|
|
||||||
nix-instantiate $REPLY >> paths;
|
|
||||||
);
|
|
||||||
;;
|
|
||||||
esac;
|
|
||||||
done < initial;
|
|
||||||
echo Proceeding $(cat paths | wc -l) paths. >&2
|
|
||||||
|
|
||||||
while read; do
|
|
||||||
case "$REPLY" in
|
|
||||||
*.drv)
|
|
||||||
echo "$REPLY" >> derivers; >&2
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
nix-store --query --deriver "$REPLY" >>derivers;
|
|
||||||
;;
|
|
||||||
esac;
|
|
||||||
done < paths;
|
|
||||||
echo Found $(cat derivers | wc -l) derivers. >&2
|
|
||||||
|
|
||||||
cat derivers | xargs nix-store --query -R > derivers-closure;
|
|
||||||
echo Proceeding at most $(cat derivers-closure | wc -l) derivers. >&2
|
|
||||||
|
|
||||||
cat derivers-closure | egrep '[.]drv$' | xargs nix-store --query --outputs > wanted-paths;
|
|
||||||
cat derivers-closure | egrep -v '[.]drv$' >> wanted-paths;
|
|
||||||
echo Prepared $(cat wanted-paths | wc -l) paths to get. >&2
|
|
||||||
|
|
||||||
cat wanted-paths | xargs nix-store --check-validity --print-invalid > needed-paths;
|
|
||||||
echo We need $(cat needed-paths | wc -l) paths. >&2
|
|
||||||
|
|
||||||
egrep '[.]drv$' derivers-closure > critical-derivers;
|
|
||||||
|
|
||||||
if test -z "$1" ; then
|
|
||||||
cat needed-paths;
|
|
||||||
fi;
|
|
||||||
|
|
||||||
refresh_critical_derivers() {
|
|
||||||
echo "Finding needed derivers..." >&2;
|
|
||||||
cat critical-derivers | while read; do
|
|
||||||
if ! (nix-store --query --outputs "$REPLY" | xargs nix-store --check-validity &> /dev/null;); then
|
|
||||||
echo "$REPLY";
|
|
||||||
fi;
|
|
||||||
done > new-critical-derivers;
|
|
||||||
mv new-critical-derivers critical-derivers;
|
|
||||||
echo The needed paths are realized by $(cat critical-derivers | wc -l) derivers. >&2
|
|
||||||
}
|
|
||||||
|
|
||||||
build_here() {
|
|
||||||
cat critical-derivers | while read; do
|
|
||||||
echo "Realising $REPLY using nix-daemon" >&2
|
|
||||||
@bindir@/nix-store -r "${REPLY}"
|
|
||||||
done;
|
|
||||||
}
|
|
||||||
|
|
||||||
try_to_substitute(){
|
|
||||||
cat needed-paths | while read ; do
|
|
||||||
echo "Building $REPLY using nix-daemon" >&2
|
|
||||||
@bindir@/nix-store -r "${NIX_STORE_DIR:-/nix/store}/${REPLY##*/}"
|
|
||||||
done;
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in "$@"; do
|
|
||||||
sshHost="${i#ssh://}";
|
|
||||||
httpHost="${i#http://}";
|
|
||||||
httpsHost="${i#https://}";
|
|
||||||
filePath="${i#file:/}";
|
|
||||||
if [ "$i" != "$sshHost" ]; then
|
|
||||||
cat needed-paths | while read; do
|
|
||||||
echo "Getting $REPLY and its closure over ssh" >&2
|
|
||||||
nix-copy-closure --from "$sshHost" --gzip "$REPLY" </dev/null || true;
|
|
||||||
done;
|
|
||||||
elif [ "$i" != "$httpHost" ] || [ "$i" != "$httpsHost" ]; then
|
|
||||||
cat needed-paths | while read; do
|
|
||||||
echo "Getting $REPLY over http/https" >&2
|
|
||||||
curl ${BAD_CERTIFICATE:+-k} -L "$i${REPLY##*/}" | gunzip | nix-store --import;
|
|
||||||
done;
|
|
||||||
elif [ "$i" != "$filePath" ] ; then
|
|
||||||
cat needed-paths | while read; do
|
|
||||||
echo "Installing $REPLY from file" >&2
|
|
||||||
gunzip < "$filePath/${REPLY##*/}".nar.gz | nix-store --import;
|
|
||||||
done;
|
|
||||||
elif [ "$i" = "nix-daemon://" ] ; then
|
|
||||||
NIX_REMOTE=daemon try_to_substitute;
|
|
||||||
refresh_critical_derivers;
|
|
||||||
NIX_REMOTE=daemon build_here;
|
|
||||||
elif [ "$i" = "nix-self://" ] ; then
|
|
||||||
NIX_REMOTE= try_to_substitute;
|
|
||||||
refresh_critical_derivers;
|
|
||||||
NIX_REMOTE= build_here;
|
|
||||||
elif [ "$i" = "nix-daemon-fixed://" ] ; then
|
|
||||||
refresh_critical_derivers;
|
|
||||||
|
|
||||||
cat critical-derivers | while read; do
|
|
||||||
if egrep '"(md5|sha1|sha256)"' "$REPLY" &>/dev/null; then
|
|
||||||
echo "Realising $REPLY using nix-daemon" >&2
|
|
||||||
NIX_REMOTE=daemon @bindir@/nix-store -r "${REPLY}"
|
|
||||||
fi;
|
|
||||||
done;
|
|
||||||
elif [ "$i" = "nix-self-fixed://" ] ; then
|
|
||||||
refresh_critical_derivers;
|
|
||||||
|
|
||||||
cat critical-derivers | while read; do
|
|
||||||
if egrep '"(md5|sha1|sha256)"' "$REPLY" &>/dev/null; then
|
|
||||||
echo "Realising $REPLY using direct Nix build" >&2
|
|
||||||
NIX_REMOTE= @bindir@/nix-store -r "${REPLY}"
|
|
||||||
fi;
|
|
||||||
done;
|
|
||||||
elif [ "$i" = "nix-daemon-substitute://" ] ; then
|
|
||||||
NIX_REMOTE=daemon try_to_substitute;
|
|
||||||
elif [ "$i" = "nix-self-substitute://" ] ; then
|
|
||||||
NIX_REMOTE= try_to_substitute;
|
|
||||||
elif [ "$i" = "nix-daemon-build://" ] ; then
|
|
||||||
refresh_critical_derivers;
|
|
||||||
NIX_REMOTE=daemon build_here;
|
|
||||||
elif [ "$i" = "nix-self-build://" ] ; then
|
|
||||||
refresh_critical_derivers;
|
|
||||||
NIX_REMOTE= build_here;
|
|
||||||
fi;
|
|
||||||
mv needed-paths wanted-paths;
|
|
||||||
cat wanted-paths | xargs nix-store --check-validity --print-invalid > needed-paths;
|
|
||||||
echo We still need $(cat needed-paths | wc -l) paths. >&2
|
|
||||||
done;
|
|
||||||
|
|
||||||
cd /
|
|
||||||
rm -r "$WORKING_DIRECTORY"
|
|
|
@ -3,7 +3,7 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
script=$(nix-build -A outputs.hydraJobs.installerScriptForGHA --no-out-link)
|
script=$(nix-build -A outputs.hydraJobs.installerScriptForGHA --no-out-link)
|
||||||
installerHash=$(echo $script | cut -b12-43 -)
|
installerHash=$(echo "$script" | cut -b12-43 -)
|
||||||
|
|
||||||
installerURL=https://$CACHIX_NAME.cachix.org/serve/$installerHash/install
|
installerURL=https://$CACHIX_NAME.cachix.org/serve/$installerHash/install
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
|
(import (fetchTarball "https://github.com/edolstra/flake-compat/archive/master.tar.gz") {
|
||||||
src = ./.;
|
src = ./.;
|
||||||
}).shellNix
|
}).shellNix
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
#include "legacy.hh"
|
#include "legacy.hh"
|
||||||
|
#include "experimental-features.hh"
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
using std::cin;
|
using std::cin;
|
||||||
|
@ -31,7 +32,7 @@ std::string escapeUri(std::string uri)
|
||||||
return uri;
|
return uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
static string currentLoad;
|
static std::string currentLoad;
|
||||||
|
|
||||||
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||||
{
|
{
|
||||||
|
@ -96,7 +97,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<StorePath> drvPath;
|
std::optional<StorePath> drvPath;
|
||||||
string storeUri;
|
std::string storeUri;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
|
|
||||||
|
@ -130,11 +131,14 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
for (auto & m : machines) {
|
for (auto & m : machines) {
|
||||||
debug("considering building on remote machine '%s'", m.storeUri);
|
debug("considering building on remote machine '%s'", m.storeUri);
|
||||||
|
|
||||||
if (m.enabled && std::find(m.systemTypes.begin(),
|
if (m.enabled
|
||||||
m.systemTypes.end(),
|
&& (neededSystem == "builtin"
|
||||||
neededSystem) != m.systemTypes.end() &&
|
|| std::find(m.systemTypes.begin(),
|
||||||
|
m.systemTypes.end(),
|
||||||
|
neededSystem) != m.systemTypes.end()) &&
|
||||||
m.allSupported(requiredFeatures) &&
|
m.allSupported(requiredFeatures) &&
|
||||||
m.mandatoryMet(requiredFeatures)) {
|
m.mandatoryMet(requiredFeatures))
|
||||||
|
{
|
||||||
rightType = true;
|
rightType = true;
|
||||||
AutoCloseFD free;
|
AutoCloseFD free;
|
||||||
uint64_t load = 0;
|
uint64_t load = 0;
|
||||||
|
@ -179,7 +183,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
// build the hint template.
|
// build the hint template.
|
||||||
string errorText =
|
std::string errorText =
|
||||||
"Failed to find a machine for remote build!\n"
|
"Failed to find a machine for remote build!\n"
|
||||||
"derivation: %s\nrequired (system, features): (%s, %s)";
|
"derivation: %s\nrequired (system, features): (%s, %s)";
|
||||||
errorText += "\n%s available machines:";
|
errorText += "\n%s available machines:";
|
||||||
|
@ -189,7 +193,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
errorText += "\n(%s, %s, %s, %s)";
|
errorText += "\n(%s, %s, %s, %s)";
|
||||||
|
|
||||||
// add the template values.
|
// add the template values.
|
||||||
string drvstr;
|
std::string drvstr;
|
||||||
if (drvPath.has_value())
|
if (drvPath.has_value())
|
||||||
drvstr = drvPath->to_string();
|
drvstr = drvPath->to_string();
|
||||||
else
|
else
|
||||||
|
@ -204,7 +208,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
|
|
||||||
for (auto & m : machines)
|
for (auto & m : machines)
|
||||||
error
|
error
|
||||||
% concatStringsSep<vector<string>>(", ", m.systemTypes)
|
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
||||||
% m.maxJobs
|
% m.maxJobs
|
||||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||||
|
@ -295,7 +299,7 @@ connected:
|
||||||
|
|
||||||
std::set<Realisation> missingRealisations;
|
std::set<Realisation> missingRealisations;
|
||||||
StorePathSet missingPaths;
|
StorePathSet missingPaths;
|
||||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
|
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
|
||||||
for (auto & outputName : wantedOutputs) {
|
for (auto & outputName : wantedOutputs) {
|
||||||
auto thisOutputHash = outputHashes.at(outputName);
|
auto thisOutputHash = outputHashes.at(outputName);
|
||||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||||
|
@ -327,7 +331,7 @@ connected:
|
||||||
for (auto & realisation : missingRealisations) {
|
for (auto & realisation : missingRealisations) {
|
||||||
// Should hold, because if the feature isn't enabled the set
|
// Should hold, because if the feature isn't enabled the set
|
||||||
// of missing realisations should be empty
|
// of missing realisations should be empty
|
||||||
settings.requireExperimentalFeature("ca-derivations");
|
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||||
store->registerDrvOutput(realisation);
|
store->registerDrvOutput(realisation);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
Copyright (c) 2014 Chase Geigle
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
File diff suppressed because it is too large
Load diff
|
@ -54,6 +54,36 @@ void StoreCommand::run()
|
||||||
run(getStore());
|
run(getStore());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CopyCommand::CopyCommand()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "from",
|
||||||
|
.description = "URL of the source Nix store.",
|
||||||
|
.labels = {"store-uri"},
|
||||||
|
.handler = {&srcUri},
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "to",
|
||||||
|
.description = "URL of the destination Nix store.",
|
||||||
|
.labels = {"store-uri"},
|
||||||
|
.handler = {&dstUri},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> CopyCommand::createStore()
|
||||||
|
{
|
||||||
|
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<Store> CopyCommand::getDstStore()
|
||||||
|
{
|
||||||
|
if (srcUri.empty() && dstUri.empty())
|
||||||
|
throw UsageError("you must pass '--from' and/or '--to'");
|
||||||
|
|
||||||
|
return dstUri.empty() ? openStore() : openStore(dstUri);
|
||||||
|
}
|
||||||
|
|
||||||
EvalCommand::EvalCommand()
|
EvalCommand::EvalCommand()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -74,7 +104,15 @@ ref<Store> EvalCommand::getEvalStore()
|
||||||
ref<EvalState> EvalCommand::getEvalState()
|
ref<EvalState> EvalCommand::getEvalState()
|
||||||
{
|
{
|
||||||
if (!evalState)
|
if (!evalState)
|
||||||
evalState = std::make_shared<EvalState>(searchPath, getEvalStore(), getStore());
|
evalState =
|
||||||
|
#if HAVE_BOEHMGC
|
||||||
|
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
|
||||||
|
searchPath, getEvalStore(), getStore())
|
||||||
|
#else
|
||||||
|
std::make_shared<EvalState>(
|
||||||
|
searchPath, getEvalStore(), getStore())
|
||||||
|
#endif
|
||||||
|
;
|
||||||
return ref<EvalState>(evalState);
|
return ref<EvalState>(evalState);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,7 +158,7 @@ void BuiltPathsCommand::run(ref<Store> store)
|
||||||
// XXX: This only computes the store path closure, ignoring
|
// XXX: This only computes the store path closure, ignoring
|
||||||
// intermediate realisations
|
// intermediate realisations
|
||||||
StorePathSet pathsRoots, pathsClosure;
|
StorePathSet pathsRoots, pathsClosure;
|
||||||
for (auto & root: paths) {
|
for (auto & root : paths) {
|
||||||
auto rootFromThis = root.outPaths();
|
auto rootFromThis = root.outPaths();
|
||||||
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
|
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
|
||||||
}
|
}
|
||||||
|
@ -138,17 +176,20 @@ StorePathsCommand::StorePathsCommand(bool recursive)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
void StorePathsCommand::run(ref<Store> store, BuiltPaths paths)
|
void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
|
||||||
{
|
{
|
||||||
StorePaths storePaths;
|
StorePathSet storePaths;
|
||||||
for (auto& builtPath : paths)
|
for (auto & builtPath : paths)
|
||||||
for (auto& p : builtPath.outPaths())
|
for (auto & p : builtPath.outPaths())
|
||||||
storePaths.push_back(p);
|
storePaths.insert(p);
|
||||||
|
|
||||||
run(store, std::move(storePaths));
|
auto sorted = store->topoSortPaths(storePaths);
|
||||||
|
std::reverse(sorted.begin(), sorted.end());
|
||||||
|
|
||||||
|
run(store, std::move(sorted));
|
||||||
}
|
}
|
||||||
|
|
||||||
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> storePaths)
|
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
|
||||||
{
|
{
|
||||||
if (storePaths.size() != 1)
|
if (storePaths.size() != 1)
|
||||||
throw UsageError("this command requires exactly one store path");
|
throw UsageError("this command requires exactly one store path");
|
||||||
|
@ -200,10 +241,10 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
|
||||||
|
|
||||||
for (auto & buildable : buildables) {
|
for (auto & buildable : buildables) {
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](BuiltPath::Opaque bo) {
|
[&](const BuiltPath::Opaque & bo) {
|
||||||
result.push_back(bo.path);
|
result.push_back(bo.path);
|
||||||
},
|
},
|
||||||
[&](BuiltPath::Built bfd) {
|
[&](const BuiltPath::Built & bfd) {
|
||||||
for (auto & output : bfd.outputs) {
|
for (auto & output : bfd.outputs) {
|
||||||
result.push_back(output.second);
|
result.push_back(output.second);
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,19 @@ private:
|
||||||
std::shared_ptr<Store> _store;
|
std::shared_ptr<Store> _store;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* A command that copies something between `--from` and `--to`
|
||||||
|
stores. */
|
||||||
|
struct CopyCommand : virtual StoreCommand
|
||||||
|
{
|
||||||
|
std::string srcUri, dstUri;
|
||||||
|
|
||||||
|
CopyCommand();
|
||||||
|
|
||||||
|
ref<Store> createStore() override;
|
||||||
|
|
||||||
|
ref<Store> getDstStore();
|
||||||
|
};
|
||||||
|
|
||||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||||
{
|
{
|
||||||
EvalCommand();
|
EvalCommand();
|
||||||
|
@ -169,7 +182,7 @@ public:
|
||||||
|
|
||||||
using StoreCommand::run;
|
using StoreCommand::run;
|
||||||
|
|
||||||
virtual void run(ref<Store> store, BuiltPaths paths) = 0;
|
virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
|
||||||
|
|
||||||
void run(ref<Store> store) override;
|
void run(ref<Store> store) override;
|
||||||
|
|
||||||
|
@ -182,9 +195,9 @@ struct StorePathsCommand : public BuiltPathsCommand
|
||||||
|
|
||||||
using BuiltPathsCommand::run;
|
using BuiltPathsCommand::run;
|
||||||
|
|
||||||
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
|
virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
|
||||||
|
|
||||||
void run(ref<Store> store, BuiltPaths paths) override;
|
void run(ref<Store> store, BuiltPaths && paths) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* A command that operates on exactly one store path. */
|
/* A command that operates on exactly one store path. */
|
||||||
|
@ -194,7 +207,7 @@ struct StorePathCommand : public StorePathsCommand
|
||||||
|
|
||||||
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
|
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
|
||||||
|
|
||||||
void run(ref<Store> store, std::vector<StorePath> storePaths) override;
|
void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* A helper class for registering commands globally. */
|
/* A helper class for registering commands globally. */
|
||||||
|
|
|
@ -97,7 +97,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
lockFlags.writeLockFile = false;
|
lockFlags.writeLockFile = false;
|
||||||
lockFlags.inputOverrides.insert_or_assign(
|
lockFlags.inputOverrides.insert_or_assign(
|
||||||
flake::parseInputPath(inputPath),
|
flake::parseInputPath(inputPath),
|
||||||
parseFlakeRef(flakeRef, absPath(".")));
|
parseFlakeRef(flakeRef, absPath("."), true));
|
||||||
}}
|
}}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -158,7 +158,10 @@ SourceExprCommand::SourceExprCommand()
|
||||||
|
|
||||||
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
||||||
{
|
{
|
||||||
return {"defaultPackage." + settings.thisSystem.get()};
|
return {
|
||||||
|
"packages." + settings.thisSystem.get() + ".default",
|
||||||
|
"defaultPackage." + settings.thisSystem.get()
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||||
|
@ -191,18 +194,21 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
||||||
auto sep = prefix_.rfind('.');
|
auto sep = prefix_.rfind('.');
|
||||||
std::string searchWord;
|
std::string searchWord;
|
||||||
if (sep != std::string::npos) {
|
if (sep != std::string::npos) {
|
||||||
searchWord = prefix_.substr(sep, std::string::npos);
|
searchWord = prefix_.substr(sep + 1, std::string::npos);
|
||||||
prefix_ = prefix_.substr(0, sep);
|
prefix_ = prefix_.substr(0, sep);
|
||||||
} else {
|
} else {
|
||||||
searchWord = prefix_;
|
searchWord = prefix_;
|
||||||
prefix_ = "";
|
prefix_ = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
|
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||||
state->forceValue(v1);
|
Value &v1(*v);
|
||||||
|
state->forceValue(v1, pos);
|
||||||
Value v2;
|
Value v2;
|
||||||
state->autoCallFunction(*autoArgs, v1, v2);
|
state->autoCallFunction(*autoArgs, v1, v2);
|
||||||
|
|
||||||
|
completionType = ctAttrs;
|
||||||
|
|
||||||
if (v2.type() == nAttrs) {
|
if (v2.type() == nAttrs) {
|
||||||
for (auto & i : *v2.attrs) {
|
for (auto & i : *v2.attrs) {
|
||||||
std::string name = i.name;
|
std::string name = i.name;
|
||||||
|
@ -232,7 +238,9 @@ void completeFlakeRefWithFragment(
|
||||||
prefix. */
|
prefix. */
|
||||||
try {
|
try {
|
||||||
auto hash = prefix.find('#');
|
auto hash = prefix.find('#');
|
||||||
if (hash != std::string::npos) {
|
if (hash == std::string::npos) {
|
||||||
|
completeFlakeRef(evalState->store, prefix);
|
||||||
|
} else {
|
||||||
auto fragment = prefix.substr(hash + 1);
|
auto fragment = prefix.substr(hash + 1);
|
||||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||||
// FIXME: do tilde expansion.
|
// FIXME: do tilde expansion.
|
||||||
|
@ -248,6 +256,8 @@ void completeFlakeRefWithFragment(
|
||||||
flake. */
|
flake. */
|
||||||
attrPathPrefixes.push_back("");
|
attrPathPrefixes.push_back("");
|
||||||
|
|
||||||
|
completionType = ctAttrs;
|
||||||
|
|
||||||
for (auto & attrPathPrefixS : attrPathPrefixes) {
|
for (auto & attrPathPrefixS : attrPathPrefixes) {
|
||||||
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
|
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
|
||||||
auto attrPathS = attrPathPrefixS + std::string(fragment);
|
auto attrPathS = attrPathPrefixS + std::string(fragment);
|
||||||
|
@ -285,12 +295,13 @@ void completeFlakeRefWithFragment(
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
warn(e.msg());
|
warn(e.msg());
|
||||||
}
|
}
|
||||||
|
|
||||||
completeFlakeRef(evalState->store, prefix);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||||
{
|
{
|
||||||
|
if (!settings.isExperimentalFeatureEnabled(Xp::Flakes))
|
||||||
|
return;
|
||||||
|
|
||||||
if (prefix == "")
|
if (prefix == "")
|
||||||
completions->add(".");
|
completions->add(".");
|
||||||
|
|
||||||
|
@ -338,6 +349,18 @@ Installable::getCursor(EvalState & state)
|
||||||
return cursors[0];
|
return cursors[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static StorePath getDeriver(
|
||||||
|
ref<Store> store,
|
||||||
|
const Installable & i,
|
||||||
|
const StorePath & drvPath)
|
||||||
|
{
|
||||||
|
auto derivers = store->queryValidDerivers(drvPath);
|
||||||
|
if (derivers.empty())
|
||||||
|
throw Error("'%s' does not have a known deriver", i.what());
|
||||||
|
// FIXME: use all derivers?
|
||||||
|
return *derivers.begin();
|
||||||
|
}
|
||||||
|
|
||||||
struct InstallableStorePath : Installable
|
struct InstallableStorePath : Installable
|
||||||
{
|
{
|
||||||
ref<Store> store;
|
ref<Store> store;
|
||||||
|
@ -346,7 +369,7 @@ struct InstallableStorePath : Installable
|
||||||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||||
: store(store), storePath(std::move(storePath)) { }
|
: store(store), storePath(std::move(storePath)) { }
|
||||||
|
|
||||||
std::string what() override { return store->printStorePath(storePath); }
|
std::string what() const override { return store->printStorePath(storePath); }
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override
|
DerivedPaths toDerivedPaths() override
|
||||||
{
|
{
|
||||||
|
@ -367,6 +390,15 @@ struct InstallableStorePath : Installable
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePathSet toDrvPaths(ref<Store> store) override
|
||||||
|
{
|
||||||
|
if (storePath.isDerivation()) {
|
||||||
|
return {storePath};
|
||||||
|
} else {
|
||||||
|
return {getDeriver(store, *this, storePath)};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::optional<StorePath> getStorePath() override
|
std::optional<StorePath> getStorePath() override
|
||||||
{
|
{
|
||||||
return storePath;
|
return storePath;
|
||||||
|
@ -395,6 +427,14 @@ DerivedPaths InstallableValue::toDerivedPaths()
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
||||||
|
{
|
||||||
|
StorePathSet res;
|
||||||
|
for (auto & drv : toDerivations())
|
||||||
|
res.insert(drv.drvPath);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
struct InstallableAttrPath : InstallableValue
|
struct InstallableAttrPath : InstallableValue
|
||||||
{
|
{
|
||||||
SourceExprCommand & cmd;
|
SourceExprCommand & cmd;
|
||||||
|
@ -405,12 +445,12 @@ struct InstallableAttrPath : InstallableValue
|
||||||
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
std::string what() override { return attrPath; }
|
std::string what() const override { return attrPath; }
|
||||||
|
|
||||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||||
{
|
{
|
||||||
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
||||||
state.forceValue(*vRes);
|
state.forceValue(*vRes, pos);
|
||||||
return {vRes, pos};
|
return {vRes, pos};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -460,7 +500,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
|
||||||
state.forceValue(*aOutputs->value);
|
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
||||||
|
|
||||||
return aOutputs->value;
|
return aOutputs->value;
|
||||||
}
|
}
|
||||||
|
@ -485,7 +525,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
auto vFlake = state.allocValue();
|
auto vFlake = state.allocValue();
|
||||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||||
|
|
||||||
state.forceAttrs(*vFlake);
|
state.forceAttrs(*vFlake, noPos);
|
||||||
|
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
@ -508,13 +548,14 @@ InstallableFlake::InstallableFlake(
|
||||||
SourceExprCommand * cmd,
|
SourceExprCommand * cmd,
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
Strings && attrPaths,
|
std::string_view fragment,
|
||||||
Strings && prefixes,
|
Strings attrPaths,
|
||||||
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags)
|
const flake::LockFlags & lockFlags)
|
||||||
: InstallableValue(state),
|
: InstallableValue(state),
|
||||||
flakeRef(flakeRef),
|
flakeRef(flakeRef),
|
||||||
attrPaths(attrPaths),
|
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||||
prefixes(prefixes),
|
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||||
lockFlags(lockFlags)
|
lockFlags(lockFlags)
|
||||||
{
|
{
|
||||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||||
|
@ -529,6 +570,8 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
||||||
auto root = cache->getRoot();
|
auto root = cache->getRoot();
|
||||||
|
|
||||||
for (auto & attrPath : getActualAttrPaths()) {
|
for (auto & attrPath : getActualAttrPaths()) {
|
||||||
|
debug("trying flake output attribute '%s'", attrPath);
|
||||||
|
|
||||||
auto attr = root->findAlongAttrPath(
|
auto attr = root->findAlongAttrPath(
|
||||||
parseAttrPath(*state, attrPath),
|
parseAttrPath(*state, attrPath),
|
||||||
true
|
true
|
||||||
|
@ -572,7 +615,7 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
|
||||||
for (auto & attrPath : getActualAttrPaths()) {
|
for (auto & attrPath : getActualAttrPaths()) {
|
||||||
try {
|
try {
|
||||||
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
||||||
state.forceValue(*v);
|
state.forceValue(*v, pos);
|
||||||
return {v, pos};
|
return {v, pos};
|
||||||
} catch (AttrPathNotFound & e) {
|
} catch (AttrPathNotFound & e) {
|
||||||
}
|
}
|
||||||
|
@ -671,7 +714,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
this,
|
this,
|
||||||
getEvalState(),
|
getEvalState(),
|
||||||
std::move(flakeRef),
|
std::move(flakeRef),
|
||||||
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
|
fragment,
|
||||||
|
getDefaultFlakeAttrPaths(),
|
||||||
getDefaultFlakeAttrPathPrefixes(),
|
getDefaultFlakeAttrPathPrefixes(),
|
||||||
lockFlags));
|
lockFlags));
|
||||||
continue;
|
continue;
|
||||||
|
@ -697,13 +741,13 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
|
||||||
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
|
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
|
||||||
{
|
{
|
||||||
BuiltPaths res;
|
BuiltPaths res;
|
||||||
for (auto & b : hopefullyBuiltPaths)
|
for (const auto & b : hopefullyBuiltPaths)
|
||||||
std::visit(
|
std::visit(
|
||||||
overloaded{
|
overloaded{
|
||||||
[&](DerivedPath::Opaque bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
res.push_back(BuiltPath::Opaque{bo.path});
|
res.push_back(BuiltPath::Opaque{bo.path});
|
||||||
},
|
},
|
||||||
[&](DerivedPath::Built bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
OutputPathMap outputs;
|
OutputPathMap outputs;
|
||||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||||
|
@ -714,7 +758,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
|
||||||
"the derivation '%s' doesn't have an output named '%s'",
|
"the derivation '%s' doesn't have an output named '%s'",
|
||||||
store->printStorePath(bfd.drvPath), output);
|
store->printStorePath(bfd.drvPath), output);
|
||||||
if (settings.isExperimentalFeatureEnabled(
|
if (settings.isExperimentalFeatureEnabled(
|
||||||
"ca-derivations")) {
|
Xp::CaDerivations)) {
|
||||||
auto outputId =
|
auto outputId =
|
||||||
DrvOutput{outputHashes.at(output), output};
|
DrvOutput{outputHashes.at(output), output};
|
||||||
auto realisation =
|
auto realisation =
|
||||||
|
@ -823,19 +867,15 @@ StorePathSet toDerivations(
|
||||||
{
|
{
|
||||||
StorePathSet drvPaths;
|
StorePathSet drvPaths;
|
||||||
|
|
||||||
for (auto & i : installables)
|
for (const auto & i : installables)
|
||||||
for (auto & b : i->toDerivedPaths())
|
for (const auto & b : i->toDerivedPaths())
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](DerivedPath::Opaque bo) {
|
[&](const DerivedPath::Opaque & bo) {
|
||||||
if (!useDeriver)
|
if (!useDeriver)
|
||||||
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
||||||
auto derivers = store->queryValidDerivers(bo.path);
|
drvPaths.insert(getDeriver(store, *i, bo.path));
|
||||||
if (derivers.empty())
|
|
||||||
throw Error("'%s' does not have a known deriver", i->what());
|
|
||||||
// FIXME: use all derivers?
|
|
||||||
drvPaths.insert(*derivers.begin());
|
|
||||||
},
|
},
|
||||||
[&](DerivedPath::Built bfd) {
|
[&](const DerivedPath::Built & bfd) {
|
||||||
drvPaths.insert(bfd.drvPath);
|
drvPaths.insert(bfd.drvPath);
|
||||||
},
|
},
|
||||||
}, b.raw());
|
}, b.raw());
|
||||||
|
|
|
@ -33,10 +33,15 @@ struct Installable
|
||||||
{
|
{
|
||||||
virtual ~Installable() { }
|
virtual ~Installable() { }
|
||||||
|
|
||||||
virtual std::string what() = 0;
|
virtual std::string what() const = 0;
|
||||||
|
|
||||||
virtual DerivedPaths toDerivedPaths() = 0;
|
virtual DerivedPaths toDerivedPaths() = 0;
|
||||||
|
|
||||||
|
virtual StorePathSet toDrvPaths(ref<Store> store)
|
||||||
|
{
|
||||||
|
throw Error("'%s' cannot be converted to a derivation path", what());
|
||||||
|
}
|
||||||
|
|
||||||
DerivedPath toDerivedPath();
|
DerivedPath toDerivedPath();
|
||||||
|
|
||||||
UnresolvedApp toApp(EvalState & state);
|
UnresolvedApp toApp(EvalState & state);
|
||||||
|
@ -81,6 +86,8 @@ struct InstallableValue : Installable
|
||||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||||
|
|
||||||
DerivedPaths toDerivedPaths() override;
|
DerivedPaths toDerivedPaths() override;
|
||||||
|
|
||||||
|
StorePathSet toDrvPaths(ref<Store> store) override;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct InstallableFlake : InstallableValue
|
struct InstallableFlake : InstallableValue
|
||||||
|
@ -95,11 +102,12 @@ struct InstallableFlake : InstallableValue
|
||||||
SourceExprCommand * cmd,
|
SourceExprCommand * cmd,
|
||||||
ref<EvalState> state,
|
ref<EvalState> state,
|
||||||
FlakeRef && flakeRef,
|
FlakeRef && flakeRef,
|
||||||
Strings && attrPaths,
|
std::string_view fragment,
|
||||||
Strings && prefixes,
|
Strings attrPaths,
|
||||||
|
Strings prefixes,
|
||||||
const flake::LockFlags & lockFlags);
|
const flake::LockFlags & lockFlags);
|
||||||
|
|
||||||
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||||
|
|
||||||
std::vector<std::string> getActualAttrPaths();
|
std::vector<std::string> getActualAttrPaths();
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue