forked from lix-project/lix
Compare commits
31 commits
main
...
remove-mak
Author | SHA1 | Date | |
---|---|---|---|
Qyriad | acb164b2c9 | ||
Qyriad | 76b45b4861 | ||
puck | 9229e87347 | ||
Ilya K | 9462c01c3e | ||
Qyriad | 78ce710722 | ||
Maximilian Bosch | 8773439a85 | ||
Artemis Tosini | 789aa39576 | ||
Maximilian Bosch | 104448e75d | ||
eldritch horrors | a1ad4e52a6 | ||
eldritch horrors | fb0996aaa8 | ||
eldritch horrors | dfe3baea12 | ||
Maximilian Bosch | ce76d3eab2 | ||
Artemis Tosini | c03de0df62 | ||
Maximilian Bosch | ecad3632cc | ||
eldritch horrors | 5420b3afd6 | ||
eldritch horrors | 5e69f8aa3d | ||
eldritch horrors | 38442e3123 | ||
Artemis Tosini | 7114b0465a | ||
Qyriad | f24223931d | ||
Artemis Tosini | b247ef72dc | ||
Qyriad | be4a3168c9 | ||
Qyriad | b913a939b0 | ||
Qyriad | 05e3b1d39e | ||
eldritch horrors | 86bfede948 | ||
eldritch horrors | 257d7ffa7b | ||
Qyriad | 7063170d5f | ||
eldritch horrors | ff9a4fc336 | ||
eldritch horrors | e5903aab65 | ||
puck | c8c838381d | ||
puck | 272c2ff15f | ||
Alyssa Ross | c1319831fb |
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -1,16 +1,8 @@
|
||||||
Makefile.config
|
|
||||||
perl/Makefile.config
|
|
||||||
|
|
||||||
# /
|
# /
|
||||||
/aclocal.m4
|
|
||||||
/autom4te.cache
|
|
||||||
/precompiled-headers.h.gch
|
/precompiled-headers.h.gch
|
||||||
/config.*
|
|
||||||
/configure
|
|
||||||
/stamp-h1
|
/stamp-h1
|
||||||
/svn-revision
|
/svn-revision
|
||||||
/libtool
|
/libtool
|
||||||
/config
|
|
||||||
|
|
||||||
# /doc/manual/
|
# /doc/manual/
|
||||||
/doc/manual/*.1
|
/doc/manual/*.1
|
||||||
|
|
71
Makefile
71
Makefile
|
@ -1,71 +0,0 @@
|
||||||
include mk/build-dir.mk
|
|
||||||
|
|
||||||
-include $(buildprefix)Makefile.config
|
|
||||||
clean-files += $(buildprefix)Makefile.config
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
|
||||||
makefiles = \
|
|
||||||
mk/precompiled-headers.mk \
|
|
||||||
local.mk \
|
|
||||||
src/libutil/local.mk \
|
|
||||||
src/libstore/local.mk \
|
|
||||||
src/libfetchers/local.mk \
|
|
||||||
src/libmain/local.mk \
|
|
||||||
src/libexpr/local.mk \
|
|
||||||
src/libcmd/local.mk \
|
|
||||||
src/nix/local.mk \
|
|
||||||
src/resolve-system-dependencies/local.mk \
|
|
||||||
scripts/local.mk \
|
|
||||||
misc/bash/local.mk \
|
|
||||||
misc/fish/local.mk \
|
|
||||||
misc/zsh/local.mk \
|
|
||||||
misc/systemd/local.mk \
|
|
||||||
misc/launchd/local.mk
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
|
||||||
UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data
|
|
||||||
makefiles += \
|
|
||||||
tests/unit/libutil/local.mk \
|
|
||||||
tests/unit/libutil-support/local.mk \
|
|
||||||
tests/unit/libstore/local.mk
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_TESTS), yes)
|
|
||||||
makefiles += \
|
|
||||||
tests/unit/libstore-support/local.mk \
|
|
||||||
tests/unit/libexpr/local.mk \
|
|
||||||
tests/unit/libexpr-support/local.mk \
|
|
||||||
tests/functional/local.mk \
|
|
||||||
tests/functional/ca/local.mk \
|
|
||||||
tests/functional/dyn-drv/local.mk \
|
|
||||||
tests/functional/test-libstoreconsumer/local.mk \
|
|
||||||
tests/functional/repl_characterization/local.mk \
|
|
||||||
tests/functional/plugins/local.mk
|
|
||||||
else
|
|
||||||
makefiles += \
|
|
||||||
mk/disable-tests.mk
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Some makefiles require access to built programs and must be included late.
|
|
||||||
makefiles-late =
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
|
||||||
makefiles-late += doc/manual/local.mk
|
|
||||||
makefiles-late += doc/internal-api/local.mk
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Miscellaneous global Flags
|
|
||||||
|
|
||||||
OPTIMIZE = 1
|
|
||||||
|
|
||||||
ifeq ($(OPTIMIZE), 1)
|
|
||||||
GLOBAL_CXXFLAGS += -O2 $(CXXLTO)
|
|
||||||
GLOBAL_LDFLAGS += $(CXXLTO)
|
|
||||||
else
|
|
||||||
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
|
|
||||||
endif
|
|
||||||
|
|
||||||
include mk/lib.mk
|
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
|
|
|
@ -1,52 +0,0 @@
|
||||||
AR = @AR@
|
|
||||||
BDW_GC_LIBS = @BDW_GC_LIBS@
|
|
||||||
BOOST_LDFLAGS = @BOOST_LDFLAGS@
|
|
||||||
BUILD_SHARED_LIBS = @BUILD_SHARED_LIBS@
|
|
||||||
CC = @CC@
|
|
||||||
CFLAGS = @CFLAGS@
|
|
||||||
CXX = @CXX@
|
|
||||||
CXXFLAGS = @CXXFLAGS@
|
|
||||||
CXXLTO = @CXXLTO@
|
|
||||||
EDITLINE_LIBS = @EDITLINE_LIBS@
|
|
||||||
ENABLE_S3 = @ENABLE_S3@
|
|
||||||
GTEST_LIBS = @GTEST_LIBS@
|
|
||||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
|
||||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
|
||||||
HOST_OS = @host_os@
|
|
||||||
LDFLAGS = @LDFLAGS@
|
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
|
||||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
|
||||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
|
||||||
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
|
||||||
NIXDOC_LIBS = -llix_doc
|
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
|
||||||
RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@
|
|
||||||
SHELL = @bash@
|
|
||||||
SODIUM_LIBS = @SODIUM_LIBS@
|
|
||||||
SQLITE3_LIBS = @SQLITE3_LIBS@
|
|
||||||
bash = @bash@
|
|
||||||
bindir = @bindir@
|
|
||||||
datadir = @datadir@
|
|
||||||
datarootdir = @datarootdir@
|
|
||||||
doc_generate = @doc_generate@
|
|
||||||
docdir = @docdir@
|
|
||||||
embedded_sandbox_shell = @embedded_sandbox_shell@
|
|
||||||
exec_prefix = @exec_prefix@
|
|
||||||
includedir = @includedir@
|
|
||||||
libdir = @libdir@
|
|
||||||
libexecdir = @libexecdir@
|
|
||||||
localstatedir = @localstatedir@
|
|
||||||
lsof = @lsof@
|
|
||||||
mandir = @mandir@
|
|
||||||
pkglibdir = $(libdir)/$(PACKAGE_NAME)
|
|
||||||
prefix = @prefix@
|
|
||||||
sandbox_shell = @sandbox_shell@
|
|
||||||
storedir = @storedir@
|
|
||||||
sysconfdir = @sysconfdir@
|
|
||||||
system = @system@
|
|
||||||
ENABLE_BUILD = @ENABLE_BUILD@
|
|
||||||
ENABLE_TESTS = @ENABLE_TESTS@
|
|
||||||
internal_api_docs = @internal_api_docs@
|
|
396
configure.ac
396
configure.ac
|
@ -1,396 +0,0 @@
|
||||||
AC_INIT([nix],[m4_esyscmd(bash -c "echo -n $(cat ./.version)$VERSION_SUFFIX")])
|
|
||||||
AC_CONFIG_MACRO_DIRS([m4])
|
|
||||||
AC_CONFIG_SRCDIR(README.md)
|
|
||||||
AC_CONFIG_AUX_DIR(config)
|
|
||||||
|
|
||||||
AC_PROG_SED
|
|
||||||
|
|
||||||
# Construct a Nix system name (like "i686-linux"):
|
|
||||||
# https://www.gnu.org/software/autoconf/manual/html_node/Canonicalizing.html#index-AC_005fCANONICAL_005fHOST-1
|
|
||||||
# The inital value is produced by the `config/config.guess` script:
|
|
||||||
# upstream: https://git.savannah.gnu.org/cgit/config.git/tree/config.guess
|
|
||||||
# It has the following form, which is not documented anywhere:
|
|
||||||
# <cpu>-<vendor>-<os>[<version>][-<abi>]
|
|
||||||
# If `./configure` is passed any of the `--host`, `--build`, `--target` options, the value comes from `config/config.sub` instead:
|
|
||||||
# upstream: https://git.savannah.gnu.org/cgit/config.git/tree/config.sub
|
|
||||||
AC_CANONICAL_HOST
|
|
||||||
AC_MSG_CHECKING([for the canonical Nix system name])
|
|
||||||
|
|
||||||
AC_ARG_WITH(system, AS_HELP_STRING([--with-system=SYSTEM],[Platform identifier (e.g., `i686-linux').]),
|
|
||||||
[system=$withval],
|
|
||||||
[case "$host_cpu" in
|
|
||||||
i*86)
|
|
||||||
machine_name="i686";;
|
|
||||||
amd64)
|
|
||||||
machine_name="x86_64";;
|
|
||||||
armv6|armv7)
|
|
||||||
machine_name="${host_cpu}l";;
|
|
||||||
*)
|
|
||||||
machine_name="$host_cpu";;
|
|
||||||
esac
|
|
||||||
|
|
||||||
case "$host_os" in
|
|
||||||
linux-gnu*|linux-musl*)
|
|
||||||
# For backward compatibility, strip the `-gnu' part.
|
|
||||||
system="$machine_name-linux";;
|
|
||||||
*)
|
|
||||||
# Strip the version number from names such as `gnu0.3',
|
|
||||||
# `darwin10.2.0', etc.
|
|
||||||
system="$machine_name-`echo $host_os | "$SED" -e's/@<:@0-9.@:>@*$//g'`";;
|
|
||||||
esac])
|
|
||||||
|
|
||||||
AC_MSG_RESULT($system)
|
|
||||||
AC_SUBST(system)
|
|
||||||
AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
|
||||||
|
|
||||||
|
|
||||||
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
|
||||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
|
||||||
|
|
||||||
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
|
|
||||||
# to -O2 otherwise, which we don't want to have hardcoded
|
|
||||||
CFLAGS=${CFLAGS-""}
|
|
||||||
CXXFLAGS=${CXXFLAGS-""}
|
|
||||||
|
|
||||||
AC_PROG_CC
|
|
||||||
AC_PROG_CXX
|
|
||||||
AC_PROG_CPP
|
|
||||||
|
|
||||||
AC_CHECK_TOOL([AR], [ar])
|
|
||||||
|
|
||||||
# Use 64-bit file system calls so that we can support files > 2 GiB.
|
|
||||||
AC_SYS_LARGEFILE
|
|
||||||
|
|
||||||
|
|
||||||
# Solaris-specific stuff.
|
|
||||||
AC_STRUCT_DIRENT_D_TYPE
|
|
||||||
case "$host_os" in
|
|
||||||
solaris*)
|
|
||||||
# Solaris requires -lsocket -lnsl for network functions
|
|
||||||
LDFLAGS="-lsocket -lnsl $LDFLAGS"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
|
|
||||||
ENSURE_NO_GCC_BUG_80431
|
|
||||||
|
|
||||||
|
|
||||||
# Check for pubsetbuf.
|
|
||||||
AC_MSG_CHECKING([for pubsetbuf])
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <iostream>
|
|
||||||
using namespace std;
|
|
||||||
static char buf[1024];]],
|
|
||||||
[[cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));]])],
|
|
||||||
[AC_MSG_RESULT(yes) AC_DEFINE(HAVE_PUBSETBUF, 1, [Whether pubsetbuf is available.])],
|
|
||||||
AC_MSG_RESULT(no))
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
|
|
||||||
AC_CHECK_FUNCS([statvfs pipe2])
|
|
||||||
|
|
||||||
|
|
||||||
# Check for lutimes, optionally used for changing the mtime of
|
|
||||||
# symlinks.
|
|
||||||
AC_CHECK_FUNCS([lutimes])
|
|
||||||
|
|
||||||
|
|
||||||
# Check whether the store optimiser can optimise symlinks.
|
|
||||||
AC_MSG_CHECKING([whether it is possible to create a link to a symlink])
|
|
||||||
ln -s bla tmp_link
|
|
||||||
if ln tmp_link tmp_link2 2> /dev/null; then
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
AC_DEFINE(CAN_LINK_SYMLINK, 1, [Whether link() works on symlinks.])
|
|
||||||
else
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
fi
|
|
||||||
rm -f tmp_link tmp_link2
|
|
||||||
|
|
||||||
|
|
||||||
# Check for <locale>.
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_CHECK_HEADERS([locale])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
|
|
||||||
AC_DEFUN([NEED_PROG],
|
|
||||||
[
|
|
||||||
AC_PATH_PROG($1, $2)
|
|
||||||
if test -z "$$1"; then
|
|
||||||
AC_MSG_ERROR([$2 is required])
|
|
||||||
fi
|
|
||||||
])
|
|
||||||
|
|
||||||
NEED_PROG(bash, bash)
|
|
||||||
AC_PATH_PROG(flex, flex, false)
|
|
||||||
AC_PATH_PROG(bison, bison, false)
|
|
||||||
AC_PATH_PROG(dot, dot)
|
|
||||||
AC_PATH_PROG(lsof, lsof, lsof)
|
|
||||||
NEED_PROG(jq, jq)
|
|
||||||
|
|
||||||
|
|
||||||
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
|
||||||
|
|
||||||
|
|
||||||
AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix store (defaults to /nix/store)]),
|
|
||||||
storedir=$withval, storedir='/nix/store')
|
|
||||||
AC_SUBST(storedir)
|
|
||||||
|
|
||||||
|
|
||||||
# Look for boost, a required dependency.
|
|
||||||
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
|
||||||
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
|
||||||
# Thus we append the returned CPPFLAGS to the CXXFLAGS here.
|
|
||||||
AX_BOOST_BASE([1.66], [CXXFLAGS="$BOOST_CPPFLAGS $CXXFLAGS"], [AC_MSG_ERROR([Nix requires boost.])])
|
|
||||||
# For unknown reasons, setting this directly in the ACTION-IF-FOUND above
|
|
||||||
# ends up with LDFLAGS being empty, so we set it afterwards.
|
|
||||||
LDFLAGS="$BOOST_LDFLAGS $LDFLAGS"
|
|
||||||
|
|
||||||
# On some platforms, new-style atomics need a helper library
|
|
||||||
AC_MSG_CHECKING(whether -latomic is needed)
|
|
||||||
AC_LINK_IFELSE([AC_LANG_SOURCE([[
|
|
||||||
#include <stdint.h>
|
|
||||||
uint64_t v;
|
|
||||||
int main() {
|
|
||||||
return (int)__atomic_load_n(&v, __ATOMIC_ACQUIRE);
|
|
||||||
}]])], GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=no, GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=yes)
|
|
||||||
AC_MSG_RESULT($GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC)
|
|
||||||
if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
|
||||||
LDFLAGS="-latomic $LDFLAGS"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Running the functional tests without building Nix is useful for testing
|
|
||||||
# different pre-built versions of Nix against each other.
|
|
||||||
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
|
||||||
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
|
||||||
AC_SUBST(ENABLE_BUILD)
|
|
||||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
|
||||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
|
||||||
# run them.
|
|
||||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
|
||||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
|
||||||
AC_SUBST(ENABLE_TESTS)
|
|
||||||
|
|
||||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
|
||||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
|
||||||
internal_api_docs=$enableval, internal_api_docs=no)
|
|
||||||
AC_SUBST(internal_api_docs)
|
|
||||||
|
|
||||||
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
|
||||||
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
|
||||||
lto=$enableval, lto=no)
|
|
||||||
if test "$lto" = yes; then
|
|
||||||
if $CXX --version | grep -q GCC; then
|
|
||||||
AC_SUBST(CXXLTO, [-flto=jobserver])
|
|
||||||
else
|
|
||||||
echo "error: LTO is only supported with GCC at the moment" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
AC_SUBST(CXXLTO, [""])
|
|
||||||
fi
|
|
||||||
|
|
||||||
PKG_PROG_PKG_CONFIG
|
|
||||||
|
|
||||||
AC_ARG_ENABLE(shared, AS_HELP_STRING([--enable-shared],[Build shared libraries for Nix [default=yes]]),
|
|
||||||
shared=$enableval, shared=yes)
|
|
||||||
if test "$shared" = yes; then
|
|
||||||
AC_SUBST(BUILD_SHARED_LIBS, 1, [Whether to build shared libraries.])
|
|
||||||
else
|
|
||||||
AC_SUBST(BUILD_SHARED_LIBS, 0, [Whether to build shared libraries.])
|
|
||||||
PKG_CONFIG="$PKG_CONFIG --static"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Look for OpenSSL, a required dependency. FIXME: this is only (maybe)
|
|
||||||
# used by S3BinaryCacheStore.
|
|
||||||
PKG_CHECK_MODULES([OPENSSL], [libcrypto >= 1.1.1], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
|
|
||||||
# Look for libarchive.
|
|
||||||
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
|
||||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
|
||||||
if test "$shared" != yes; then
|
|
||||||
LIBARCHIVE_LIBS+=' -lz'
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Look for SQLite, a required dependency.
|
|
||||||
PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Look for libcurl, a required dependency.
|
|
||||||
PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Look for editline, a required dependency.
|
|
||||||
# The the libeditline.pc file was added only in libeditline >= 1.15.2,
|
|
||||||
# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607,
|
|
||||||
# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for
|
|
||||||
# editline.h when the pkg-config approach fails.
|
|
||||||
PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [
|
|
||||||
AC_CHECK_HEADERS([editline.h], [true],
|
|
||||||
[AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])])
|
|
||||||
AC_SEARCH_LIBS([readline read_history], [editline], [],
|
|
||||||
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
|
|
||||||
])
|
|
||||||
|
|
||||||
# Look for libsodium.
|
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Look for libbrotli{enc,dec}.
|
|
||||||
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Look for libcpuid.
|
|
||||||
have_libcpuid=
|
|
||||||
if test "$machine_name" = "x86_64"; then
|
|
||||||
AC_ARG_ENABLE([cpuid],
|
|
||||||
AS_HELP_STRING([--disable-cpuid], [Do not determine microarchitecture levels with libcpuid (relevant to x86_64 only)]))
|
|
||||||
if test "x$enable_cpuid" != "xno"; then
|
|
||||||
PKG_CHECK_MODULES([LIBCPUID], [libcpuid],
|
|
||||||
[CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"
|
|
||||||
have_libcpuid=1
|
|
||||||
AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])]
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
|
|
||||||
|
|
||||||
|
|
||||||
# Look for libseccomp, required for Linux sandboxing.
|
|
||||||
case "$host_os" in
|
|
||||||
linux*)
|
|
||||||
AC_ARG_ENABLE([seccomp-sandboxing],
|
|
||||||
AS_HELP_STRING([--disable-seccomp-sandboxing],[Don't build support for seccomp sandboxing (only recommended if your arch doesn't support libseccomp yet!)
|
|
||||||
]))
|
|
||||||
if test "x$enable_seccomp_sandboxing" != "xno"; then
|
|
||||||
PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp],
|
|
||||||
[CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"])
|
|
||||||
have_seccomp=1
|
|
||||||
AC_DEFINE([HAVE_SECCOMP], [1], [Whether seccomp is available and should be used for sandboxing.])
|
|
||||||
else
|
|
||||||
have_seccomp=
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
have_seccomp=
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
|
||||||
|
|
||||||
|
|
||||||
# Look for aws-cpp-sdk-s3.
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_CHECK_HEADERS([aws/s3/S3Client.h],
|
|
||||||
[AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1],
|
|
||||||
[AC_DEFINE([ENABLE_S3], [0], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=])
|
|
||||||
AC_SUBST(ENABLE_S3, [$enable_s3])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
if test -n "$enable_s3"; then
|
|
||||||
declare -a aws_version_tokens=($(printf '#include <aws/core/VersionConfig.h>\nAWS_SDK_VERSION_STRING' | $CPP $CPPFLAGS - | grep -v '^#.*' | sed 's/"//g' | tr '.' ' '))
|
|
||||||
AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.])
|
|
||||||
AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.])
|
|
||||||
AC_DEFINE_UNQUOTED([AWS_VERSION_PATCH], ${aws_version_tokens@<:@2@:>@}, [Patch version of aws-sdk-cpp.])
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# Whether to use the Boehm garbage collector.
|
|
||||||
AC_ARG_ENABLE(gc, AS_HELP_STRING([--enable-gc],[enable garbage collection in the Nix expression evaluator (requires Boehm GC) [default=yes]]),
|
|
||||||
gc=$enableval, gc=yes)
|
|
||||||
if test "$gc" = yes; then
|
|
||||||
PKG_CHECK_MODULES([BDW_GC], [bdw-gc])
|
|
||||||
CXXFLAGS="$BDW_GC_CFLAGS $CXXFLAGS"
|
|
||||||
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
if test "$ENABLE_TESTS" = yes; then
|
|
||||||
|
|
||||||
# Look for gtest.
|
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main])
|
|
||||||
|
|
||||||
|
|
||||||
# Look for rapidcheck.
|
|
||||||
AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK])
|
|
||||||
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_SUBST(RAPIDCHECK_HEADERS)
|
|
||||||
[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"]
|
|
||||||
[LIBS="-lrapidcheck -lgtest $LIBS"]
|
|
||||||
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
|
||||||
dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols
|
|
||||||
AC_LINK_IFELSE([
|
|
||||||
AC_LANG_PROGRAM([[
|
|
||||||
#include <gtest/gtest.h>
|
|
||||||
#include <rapidcheck/gtest.h>
|
|
||||||
]], [[
|
|
||||||
return RUN_ALL_TESTS();
|
|
||||||
]])
|
|
||||||
],
|
|
||||||
[],
|
|
||||||
[AC_MSG_ERROR([librapidcheck is not found.])])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Look for nlohmann/json.
|
|
||||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
|
||||||
|
|
||||||
|
|
||||||
# documentation generation switch
|
|
||||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
|
||||||
AC_SUBST(doc_generate)
|
|
||||||
|
|
||||||
# Look for lowdown library.
|
|
||||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Look for toml11, a required dependency.
|
|
||||||
AC_ARG_VAR([TOML11_HEADERS], [include path of toml11 headers])
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
[CXXFLAGS="-I $TOML11_HEADERS $CXXFLAGS"]
|
|
||||||
AC_CHECK_HEADER([toml.hpp], [], [AC_MSG_ERROR([toml11 is not found.])])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
# Setuid installations.
|
|
||||||
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
|
||||||
|
|
||||||
|
|
||||||
# Nice to have, but not essential.
|
|
||||||
AC_CHECK_FUNCS([strsignal posix_fallocate sysconf])
|
|
||||||
|
|
||||||
|
|
||||||
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
|
|
||||||
sandbox_shell=$withval)
|
|
||||||
AC_SUBST(sandbox_shell)
|
|
||||||
if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
|
|
||||||
AC_MSG_CHECKING([whether sandbox-shell has the standalone feature])
|
|
||||||
# busybox shell sometimes allows executing other busybox applets,
|
|
||||||
# even if they are not in the path, breaking our sandbox
|
|
||||||
if PATH= $sandbox_shell -c "busybox" 2>&1 | grep -qv "not found"; then
|
|
||||||
AC_MSG_RESULT(enabled)
|
|
||||||
AC_MSG_ERROR([Please disable busybox FEATURE_SH_STANDALONE])
|
|
||||||
else
|
|
||||||
AC_MSG_RESULT(disabled)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
|
|
||||||
embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
|
|
||||||
AC_SUBST(embedded_sandbox_shell)
|
|
||||||
if test "$embedded_sandbox_shell" = yes; then
|
|
||||||
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
|
||||||
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
|
|
||||||
for name in $ac_subst_vars; do
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
done
|
|
||||||
|
|
||||||
rm -f Makefile.config
|
|
||||||
|
|
||||||
AC_CONFIG_HEADERS([config.h])
|
|
||||||
AC_CONFIG_FILES([])
|
|
||||||
AC_OUTPUT
|
|
|
@ -1,19 +0,0 @@
|
||||||
.PHONY: internal-api-html
|
|
||||||
|
|
||||||
ifeq ($(internal_api_docs), yes)
|
|
||||||
|
|
||||||
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
|
|
||||||
mkdir -p $(docdir)/internal-api
|
|
||||||
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
|
|
||||||
|
|
||||||
# Generate the HTML API docs for Nix's unstable internal interfaces.
|
|
||||||
internal-api-html: $(docdir)/internal-api/html/index.html
|
|
||||||
|
|
||||||
else
|
|
||||||
|
|
||||||
# Make a nicer error message
|
|
||||||
internal-api-html:
|
|
||||||
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
|
||||||
@exit 1
|
|
||||||
|
|
||||||
endif
|
|
|
@ -1,178 +0,0 @@
|
||||||
ifeq ($(doc_generate),yes)
|
|
||||||
|
|
||||||
# The version of Nix used to generate the doc. Can also be
|
|
||||||
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
|
|
||||||
# if one prefers.
|
|
||||||
doc_nix = $(nix_PATH)
|
|
||||||
|
|
||||||
MANUAL_SRCS := \
|
|
||||||
$(call rwildcard, $(d)/src, *.md) \
|
|
||||||
$(call rwildcard, $(d)/src, */*.md)
|
|
||||||
|
|
||||||
man-pages := $(foreach n, \
|
|
||||||
nix-env.1 nix-store.1 \
|
|
||||||
nix-build.1 nix-shell.1 nix-instantiate.1 \
|
|
||||||
nix-collect-garbage.1 \
|
|
||||||
nix-prefetch-url.1 nix-channel.1 \
|
|
||||||
nix-hash.1 nix-copy-closure.1 \
|
|
||||||
nix.conf.5 nix-daemon.8 \
|
|
||||||
nix-profiles.5 \
|
|
||||||
, doc/manual/generated/in/$(n))
|
|
||||||
|
|
||||||
# man pages for subcommands
|
|
||||||
# convert from `$(d)/src/command-ref/nix-{1}/{2}.md` to `$(d)/nix-{1}-{2}.1`
|
|
||||||
# FIXME: unify with how nix3-cli man pages are generated
|
|
||||||
man-pages += $(foreach subcommand, \
|
|
||||||
$(filter-out %opt-common.md %env-common.md, $(wildcard $(d)/src/command-ref/nix-*/*.md)), \
|
|
||||||
doc/manual/generated/in/$(subst /,-,$(subst $(d)/src/command-ref/,,$(subst .md,.1,$(subcommand)))))
|
|
||||||
|
|
||||||
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
|
|
||||||
|
|
||||||
# Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox.
|
|
||||||
# Set cores to 0 because otherwise nix show-config resolves the cores based on the current machine
|
|
||||||
dummy-env = env -i \
|
|
||||||
HOME=/dummy \
|
|
||||||
NIX_CONF_DIR=/dummy \
|
|
||||||
NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt \
|
|
||||||
NIX_STATE_DIR=/dummy \
|
|
||||||
NIX_CONFIG='cores = 0'
|
|
||||||
|
|
||||||
nix-eval = $(dummy-env) $(doc_nix) eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
|
||||||
|
|
||||||
doc/manual/generated/in/nix-env-%.1: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh \
|
|
||||||
--out-no-smarty "$(subst nix-env-,nix-env --,$$(basename "$@" .1))" 1 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/nix-env/$*.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
doc/manual/generated/in/nix-store-%.1: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh \
|
|
||||||
--out-no-smarty "$(subst nix-store-,nix-store --,$$(basename "$@" .1))" 1 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/nix-store/$*.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
|
|
||||||
doc/manual/generated/in/%.1: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .1)" 1 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/$*.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
doc/manual/generated/in/%.8: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .8)" 8 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/$*.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
doc/manual/generated/in/nix.conf.5: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .5)" 5 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/conf-file.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
doc/manual/generated/in/nix-profiles.5: doc/manual/generated/out
|
|
||||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .5)" 5 \
|
|
||||||
doc/manual/generated/out/markdown/command-ref/files/profiles.md \
|
|
||||||
$@
|
|
||||||
|
|
||||||
doc/manual/generated/in/command-ref/new-cli: doc/manual/generated/in/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/command-ref
|
|
||||||
@rm -rf $@ $@.tmp
|
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/command-ref/conf-file.md: doc/manual/generated/in/conf-file.json $(d)/utils.nix doc/manual/generated/in/command-ref/experimental-features-shortlist.md $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/command-ref
|
|
||||||
$(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { inlineHTML = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/nix.json: $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in
|
|
||||||
$(trace-gen) $(dummy-env) $(doc_nix) __dump-cli > $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/conf-file.json: $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in
|
|
||||||
$(trace-gen) $(dummy-env) $(doc_nix) show-config --json --experimental-features nix-command > $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/contributing/experimental-feature-descriptions.md: doc/manual/generated/in/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/contributing
|
|
||||||
@rm -rf $@ $@.tmp
|
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))'
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/command-ref/experimental-features-shortlist.md: doc/manual/generated/in/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/command-ref
|
|
||||||
@rm -rf $@ $@.tmp
|
|
||||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))'
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/xp-features.json: $(doc_nix)
|
|
||||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(doc_nix) __dump-xp-features > $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/language/builtins.md: doc/manual/generated/in/language.json $(d)/generate-builtins.nix $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/language
|
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/language/builtin-constants.md: doc/manual/generated/in/language.json $(d)/generate-builtin-constants.nix $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in/language
|
|
||||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@
|
|
||||||
|
|
||||||
doc/manual/generated/in/language.json: $(doc_nix)
|
|
||||||
@mkdir -p doc/manual/generated/in
|
|
||||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(doc_nix) __dump-language > $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
|
|
||||||
# Generate "Upcoming release" notes (or clear it and remove from menu)
|
|
||||||
doc/manual/generated/in/release-notes/rl-next-generated.md: $(d)/rl-next $(d)/rl-next/*
|
|
||||||
@mkdir -p doc/manual/generated/in/release-notes
|
|
||||||
@if type -p build-release-notes > /dev/null; then \
|
|
||||||
echo " GEN " $@; \
|
|
||||||
build-release-notes doc/manual/rl-next > $@; \
|
|
||||||
else \
|
|
||||||
echo " NULL " $@; \
|
|
||||||
true > $@; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Generate the HTML manual.
|
|
||||||
.PHONY: manual-html
|
|
||||||
manual-html: $(docdir)/manual/index.html
|
|
||||||
install: $(docdir)/manual/index.html
|
|
||||||
|
|
||||||
# Generate 'nix' manpages.
|
|
||||||
install: $(mandir)/man1/nix3-manpages
|
|
||||||
man: doc/manual/generated/man1/nix3-manpages
|
|
||||||
all: doc/manual/generated/man1/nix3-manpages
|
|
||||||
|
|
||||||
# FIXME: unify with how the other man pages are generated.
|
|
||||||
# this one works differently and does not use any of the amenities provided by `/mk/lib.mk`.
|
|
||||||
$(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages
|
|
||||||
@mkdir -p $(DESTDIR)$$(dirname $@)
|
|
||||||
$(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@)
|
|
||||||
|
|
||||||
doc/manual/generated/man1/nix3-manpages: doc/manual/generated/out
|
|
||||||
@mkdir -p $(DESTDIR)$$(dirname $@)
|
|
||||||
$(trace-gen) for i in doc/manual/generated/out/markdown/command-ref/new-cli/*.md; do \
|
|
||||||
name=$$(basename $$i .md); \
|
|
||||||
tmpFile=$$(mktemp); \
|
|
||||||
if [[ $$name = SUMMARY ]]; then continue; fi; \
|
|
||||||
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
|
|
||||||
cat $$i >> $$tmpFile; \
|
|
||||||
lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
|
||||||
rm $$tmpFile; \
|
|
||||||
done
|
|
||||||
@touch $@
|
|
||||||
|
|
||||||
doc/manual/generated/out: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md doc/manual/generated/in/command-ref/new-cli doc/manual/generated/in/command-ref/experimental-features-shortlist.md doc/manual/generated/in/contributing/experimental-feature-descriptions.md doc/manual/generated/in/command-ref/conf-file.md doc/manual/generated/in/language/builtins.md doc/manual/generated/in/language/builtin-constants.md doc/manual/generated/in/release-notes/rl-next-generated.md $(d)/substitute.py
|
|
||||||
@rm -rf $@
|
|
||||||
$(trace-gen) \
|
|
||||||
MDBOOK_SUBSTITUTE_SEARCH=doc/manual/generated/in \
|
|
||||||
RUST_LOG=warn \
|
|
||||||
mdbook build doc/manual -d generated/out 2>&1 \
|
|
||||||
| { grep -Fv "because fragment resolution isn't implemented" || :; }
|
|
||||||
@find $@ -iname meson.build -delete
|
|
||||||
|
|
||||||
$(docdir)/manual/index.html: doc/manual/generated/out
|
|
||||||
@mkdir -p $(DESTDIR)$(docdir)
|
|
||||||
@rm -rf $(DESTDIR)$(docdir)/manual
|
|
||||||
@cp -r $</html $(DESTDIR)$(docdir)/manual
|
|
||||||
|
|
||||||
endif
|
|
|
@ -25,17 +25,17 @@ individual users can switch between different environments.
|
||||||
`nix-env` takes exactly one *operation* flag which indicates the
|
`nix-env` takes exactly one *operation* flag which indicates the
|
||||||
subcommand to be performed. The following operations are available:
|
subcommand to be performed. The following operations are available:
|
||||||
|
|
||||||
- [`--install`](./nix-env/install.md)
|
- [`--install`](./nix-env/install.md) - add packages to user environment
|
||||||
- [`--upgrade`](./nix-env/upgrade.md)
|
- [`--upgrade`](./nix-env/upgrade.md) - upgrade packages in user environment
|
||||||
- [`--uninstall`](./nix-env/uninstall.md)
|
- [`--uninstall`](./nix-env/uninstall.md) - remove packages from user environment
|
||||||
- [`--set`](./nix-env/set.md)
|
- [`--set`](./nix-env/set.md) - set profile to contain a specified derivation
|
||||||
- [`--set-flag`](./nix-env/set-flag.md)
|
- [`--set-flag`](./nix-env/set-flag.md) - modify meta attributes of installed packages
|
||||||
- [`--query`](./nix-env/query.md)
|
- [`--query`](./nix-env/query.md) - display information about packages
|
||||||
- [`--switch-profile`](./nix-env/switch-profile.md)
|
- [`--switch-profile`](./nix-env/switch-profile.md) - set user environment to a given profile
|
||||||
- [`--list-generations`](./nix-env/list-generations.md)
|
- [`--list-generations`](./nix-env/list-generations.md) - list profile generations
|
||||||
- [`--delete-generations`](./nix-env/delete-generations.md)
|
- [`--delete-generations`](./nix-env/delete-generations.md) - delete profile generations
|
||||||
- [`--switch-generation`](./nix-env/switch-generation.md)
|
- [`--switch-generation`](./nix-env/switch-generation.md) - set user environment to a given profile generation
|
||||||
- [`--rollback`](./nix-env/rollback.md)
|
- [`--rollback`](./nix-env/rollback.md) - set user environment to previous generation
|
||||||
|
|
||||||
These pages can be viewed offline:
|
These pages can be viewed offline:
|
||||||
|
|
||||||
|
|
|
@ -1,61 +1,125 @@
|
||||||
# Hacking
|
# Hacking
|
||||||
|
|
||||||
This section provides some notes on how to hack on Nix. To get the
|
This section provides some notes on how to hack on Nix. To get the latest version of Lix from Forgejo:
|
||||||
latest version of Nix from GitHub:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ git clone https://github.com/NixOS/nix.git
|
$ git clone https://git.lix.systems/lix-project/lix
|
||||||
$ cd nix
|
$ cd lix
|
||||||
```
|
```
|
||||||
|
|
||||||
The following instructions assume you already have some version of Nix installed locally, so that you can use it to set up the development environment. If you don't have it installed, follow the [installation instructions].
|
The following instructions assume you already have some version of Nix or Lix installed locally, so that you can use it to set up the development environment. If you don't have it installed, follow the [installation instructions].
|
||||||
|
|
||||||
[installation instructions]: ../installation/installation.md
|
[installation instructions]: ../installation/installation.md
|
||||||
|
|
||||||
## Building Nix with flakes
|
## Building Lix in a development shell
|
||||||
|
|
||||||
This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled.
|
### Setting up the development shell
|
||||||
See the [Building Nix](#building-nix) section for equivalent instructions using stable Nix interfaces.
|
|
||||||
|
If you are using Lix or Nix with the [`flakes`] and [`nix-command`] experimental features enabled, the following command will build all dependencies and start a shell in which all environment variables are setup for those dependencies to be found:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ nix develop
|
||||||
|
```
|
||||||
|
|
||||||
|
That will use the default stdenv for your system. To get a shell with one of the other [supported compilation environments](#compilation-environments), specify its attribute name after a hash (which you may need to quote, depending on your shell):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ nix develop ".#native-clangStdenvPackages"
|
||||||
|
```
|
||||||
|
|
||||||
|
For classic Nix, use:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ nix-shell -A native-clangStdenvPackages
|
||||||
|
```
|
||||||
|
|
||||||
[`flakes`]: @docroot@/contributing/experimental-features.md#xp-feature-flakes
|
[`flakes`]: @docroot@/contributing/experimental-features.md#xp-feature-flakes
|
||||||
[`nix-command`]: @docroot@/contributing/experimental-features.md#xp-nix-command
|
[`nix-command`]: @docroot@/contributing/experimental-features.md#xp-nix-command
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
|
||||||
|
|
||||||
```console
|
### Building from the development shell
|
||||||
$ nix develop
|
|
||||||
|
As always you may run [stdenv's phases by name](https://nixos.org/manual/nixpkgs/unstable/#sec-building-stdenv-package-in-nix-shell), e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ configurePhase
|
||||||
|
$ buildPhase
|
||||||
|
$ checkPhase
|
||||||
|
$ installPhase
|
||||||
|
$ installCheckPhase
|
||||||
```
|
```
|
||||||
|
|
||||||
This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` immediately after building it.
|
To build manually, however, use the following:
|
||||||
|
|
||||||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
```bash
|
||||||
|
$ meson setup ./build "--prefix=$out" $mesonFlags
|
||||||
```console
|
|
||||||
$ nix develop .#native-clangStdenvPackages
|
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note**
|
(A simple `meson setup ./build` will also build, but will do a different thing, not having the settings from package.nix applied).
|
||||||
>
|
|
||||||
> Use `ccacheStdenv` to drastically improve rebuild time.
|
|
||||||
> By default, [ccache](https://ccache.dev) keeps artifacts in `~/.cache/ccache/`.
|
|
||||||
|
|
||||||
To build Nix itself in this shell:
|
```bash
|
||||||
|
$ meson compile -C build
|
||||||
```console
|
$ meson test -C build --suite=check
|
||||||
[nix-shell]$ autoreconfPhase
|
$ meson install -C build
|
||||||
[nix-shell]$ configurePhase
|
$ meson test -C build --suite=installcheck
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
|
||||||
```
|
```
|
||||||
|
|
||||||
To install it in `$(pwd)/outputs` and test it:
|
(Check and installcheck may both be done after install, allowing you to omit the --suite argument entirely, but this is the order package.nix runs them in.)
|
||||||
|
|
||||||
```console
|
This will install Lix to `$PWD/outputs`, the `/bin` of which is prepended to PATH in the development shells.
|
||||||
[nix-shell]$ make install
|
|
||||||
[nix-shell]$ make installcheck -j $NIX_BUILD_CORES
|
If the tests fail and Meson helpfully has no output for why, use the `--print-error-logs` option to `meson test`.
|
||||||
[nix-shell]$ nix --version
|
|
||||||
nix (Nix) 2.12
|
If you change a setting in the buildsystem (i.e., any of the `meson.build` files), most cases will automatically regenerate the Meson configuration just before compiling.
|
||||||
|
Some cases, however, like trying to build a specific target whose name is new to the buildsystem (e.g. `meson compile -C build src/libmelt/libmelt.dylib`, when `libmelt.dylib` did not exist as a target the last time the buildsystem was generated), then you can reconfigure using new settings but existing options, and only recompiling stuff affected by the changes:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ meson setup --reconfigure build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note that changes to the default values in `meson.options` or in the `default_options :` argument to `project()` are **not** propagated with `--reconfigure`.
|
||||||
|
|
||||||
|
If you want a totally clean build, you can use:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ meson setup --wipe build
|
||||||
|
```
|
||||||
|
|
||||||
|
That will work regardless of if `./build` exists or not.
|
||||||
|
|
||||||
|
Specific, named targets may be addressed in `meson build -C build <target>`, with the "target ID", if there is one, which is the first string argument passed to target functions that have one, and unrelated to the variable name, e.g.:
|
||||||
|
|
||||||
|
```meson
|
||||||
|
libexpr_dylib = library('nixexpr', …)
|
||||||
|
```
|
||||||
|
|
||||||
|
can be addressed with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ meson compile -C build nixexpr
|
||||||
|
```
|
||||||
|
|
||||||
|
All targets may be addressed as their output, relative to the build directory, e.g.:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ meson compile -C build src/libexpr/libnixexpr.so
|
||||||
|
```
|
||||||
|
|
||||||
|
But Meson does not consider intermediate files like object files targets.
|
||||||
|
To build a specific object file, use Ninja directly and specify the output file relative to the build directory:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ ninja -C build src/libexpr/libnixexpr.so.p/nixexpr.cc.o
|
||||||
|
```
|
||||||
|
|
||||||
|
To inspect the canonical source of truth on what the state of the buildsystem configuration is, use:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ meson introspect
|
||||||
|
```
|
||||||
|
|
||||||
|
## Building Lix outside of development shells
|
||||||
|
|
||||||
To build a release version of Nix for the current operating system and CPU architecture:
|
To build a release version of Nix for the current operating system and CPU architecture:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -64,50 +128,11 @@ $ nix build
|
||||||
|
|
||||||
You can also build Nix for one of the [supported platforms](#platforms).
|
You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
|
|
||||||
## Building Nix
|
|
||||||
|
|
||||||
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell
|
|
||||||
```
|
|
||||||
|
|
||||||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages
|
|
||||||
```
|
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> You can use `native-ccacheStdenvPackages` to drastically improve rebuild time.
|
> You can use `native-ccacheStdenvPackages` to drastically improve rebuild time.
|
||||||
> By default, [ccache](https://ccache.dev) keeps artifacts in `~/.cache/ccache/`.
|
> By default, [ccache](https://ccache.dev) keeps artifacts in `~/.cache/ccache/`.
|
||||||
|
|
||||||
To build Nix itself in this shell:
|
|
||||||
|
|
||||||
```console
|
|
||||||
[nix-shell]$ autoreconfPhase
|
|
||||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
|
||||||
```
|
|
||||||
|
|
||||||
To install it in `$(pwd)/outputs` and test it:
|
|
||||||
|
|
||||||
```console
|
|
||||||
[nix-shell]$ make install
|
|
||||||
[nix-shell]$ make installcheck -j $NIX_BUILD_CORES
|
|
||||||
[nix-shell]$ ./outputs/out/bin/nix --version
|
|
||||||
nix (Nix) 2.12
|
|
||||||
```
|
|
||||||
|
|
||||||
To build a release version of Nix for the current operating system and CPU architecture:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-build
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also build Nix for one of the [supported platforms](#platforms).
|
|
||||||
|
|
||||||
## Platforms
|
## Platforms
|
||||||
|
|
||||||
Nix can be built for various platforms, as specified in [`flake.nix`]:
|
Nix can be built for various platforms, as specified in [`flake.nix`]:
|
||||||
|
@ -148,55 +173,38 @@ Add more [system types](#system-type) to `crossSystems` in `flake.nix` to bootst
|
||||||
|
|
||||||
### Building for multiple platforms at once
|
### Building for multiple platforms at once
|
||||||
|
|
||||||
It is useful to perform multiple cross and native builds on the same source tree,
|
It is useful to perform multiple cross and native builds on the same source tree, for example to ensure that better support for one platform doesn't break the build for another.
|
||||||
for example to ensure that better support for one platform doesn't break the build for another.
|
As Lix now uses Meson, out-of-tree builds are supported first class. In the invocation
|
||||||
In order to facilitate this, Nix has some support for being built out of tree – that is, placing build artefacts in a different directory than the source code:
|
|
||||||
|
|
||||||
1. Create a directory for the build, e.g.
|
```bash
|
||||||
|
$ meson setup build
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
the argument after `setup` specifies the directory for this build, conventionally simply called "build", but it may be called anything, and you may run `meson setup <somedir>` for as many different directories as you want.
|
||||||
mkdir build
|
To compile the configuration for a given build directory, pass that build directory to the `-C` argument of `meson compile`:
|
||||||
```
|
|
||||||
|
|
||||||
2. Run the configure script from that directory, e.g.
|
```bash
|
||||||
|
$ meson setup some-custom-build
|
||||||
```bash
|
$ meson compile -C some-custom-build
|
||||||
cd build
|
```
|
||||||
../configure <configure flags>
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Run make from the source directory, but with the build directory specified, e.g.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make builddir=build <make flags>
|
|
||||||
```
|
|
||||||
|
|
||||||
## System type
|
## System type
|
||||||
|
|
||||||
Nix uses a string with he following format to identify the *system type* or *platform* it runs on:
|
Lix uses a string with the following format to identify the *system type* or *platform* it runs on:
|
||||||
|
|
||||||
```
|
```
|
||||||
<cpu>-<os>[-<abi>]
|
<cpu>-<os>[-<abi>]
|
||||||
```
|
```
|
||||||
|
|
||||||
It is set when Nix is compiled for the given system, and based on the output of [`config.guess`](https://github.com/nixos/nix/blob/master/config/config.guess) ([upstream](https://git.savannah.gnu.org/cgit/config.git/tree/config.guess)):
|
It is set when Nix is compiled for the given system, and determined by [Meson's `host_machine.cpu_family()` and `host_machine.system()` values](https://mesonbuild.com/Reference-manual_builtin_host_machine.html).
|
||||||
|
|
||||||
```
|
For historic reasons and backward-compatibility, some CPU and OS identifiers are translated from the GNU Autotools naming convention in [`meson.build`](https://git.lix.systems/lix-project/lix/blob/main/meson.build) as follows:
|
||||||
<cpu>-<vendor>-<os>[<version>][-<abi>]
|
|
||||||
```
|
|
||||||
|
|
||||||
When Nix is built such that `./configure` is passed any of the `--host`, `--build`, `--target` options, the value is based on the output of [`config.sub`](https://github.com/nixos/nix/blob/master/config/config.sub) ([upstream](https://git.savannah.gnu.org/cgit/config.git/tree/config.sub)):
|
| `host_machine.cpu_family()` | Nix |
|
||||||
|
|
||||||
```
|
|
||||||
<cpu>-<vendor>[-<kernel>]-<os>
|
|
||||||
```
|
|
||||||
|
|
||||||
For historic reasons and backward-compatibility, some CPU and OS identifiers are translated from the GNU Autotools naming convention in [`configure.ac`](https://github.com/nixos/nix/blob/master/configure.ac) as follows:
|
|
||||||
|
|
||||||
| `config.guess` | Nix |
|
|
||||||
|----------------------------|---------------------|
|
|----------------------------|---------------------|
|
||||||
| `amd64` | `x86_64` |
|
| `x86` | `i686` |
|
||||||
| `i*86` | `i686` |
|
| `i686` | `i686` |
|
||||||
|
| `i686` | `i686` |
|
||||||
| `arm6` | `arm6l` |
|
| `arm6` | `arm6l` |
|
||||||
| `arm7` | `arm7l` |
|
| `arm7` | `arm7l` |
|
||||||
| `linux-gnu*` | `linux` |
|
| `linux-gnu*` | `linux` |
|
||||||
|
@ -229,13 +237,14 @@ You can use any of the other supported environments in place of `nix-ccacheStden
|
||||||
|
|
||||||
## Editor integration
|
## Editor integration
|
||||||
|
|
||||||
The `clangd` LSP server is installed by default on the `clang`-based `devShell`s.
|
The `clangd` LSP server is installed by default in each development shell.
|
||||||
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix).
|
See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix).
|
||||||
|
|
||||||
To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
|
Clangd requires a compilation database, which Meson generates by default. After running `meson setup`, there will already be a `compile_commands.json` file in the build directory.
|
||||||
|
Some editor configurations may prefer that file to be in the root directory, which you can accomplish with a simple:
|
||||||
|
|
||||||
```console
|
```bash
|
||||||
make clean && bear -- make -j$NIX_BUILD_CORES install
|
$ ln -sf ./build/compile_commands.json ./compile_commands.json
|
||||||
```
|
```
|
||||||
|
|
||||||
Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
||||||
|
@ -253,15 +262,7 @@ This happens late in the process, so `nix build` is not suitable for iterating.
|
||||||
To build the manual incrementally, run:
|
To build the manual incrementally, run:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
make html -j $NIX_BUILD_CORES
|
meson compile -C build manual
|
||||||
```
|
|
||||||
|
|
||||||
In order to reflect changes to the [Makefile], clear all generated files before re-building:
|
|
||||||
|
|
||||||
[Makefile]: https://github.com/NixOS/nix/blob/master/doc/manual/local.mk
|
|
||||||
|
|
||||||
```console
|
|
||||||
rm $(git ls-files doc/manual/ -o | grep -F '.md') && rmdir doc/manual/src/command-ref/new-cli && make html -j $NIX_BUILD_CORES
|
|
||||||
```
|
```
|
||||||
|
|
||||||
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
[`mdbook-linkcheck`] does not implement checking [URI fragments] yet.
|
||||||
|
@ -292,9 +293,9 @@ can also build and view it yourself:
|
||||||
|
|
||||||
or inside a `nix develop` shell by running:
|
or inside a `nix develop` shell by running:
|
||||||
|
|
||||||
```
|
```bash
|
||||||
# make internal-api-html
|
$ meson compile -C build internal-api-docs
|
||||||
# xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
$ xdg-open ./outputs/doc/share/doc/nix/internal-api/html/index.html
|
||||||
```
|
```
|
||||||
|
|
||||||
## Coverage analysis
|
## Coverage analysis
|
||||||
|
|
33
flake.nix
33
flake.nix
|
@ -196,24 +196,6 @@
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
|
|
||||||
# NOTE: mesonBuildClang depends on mesonBuild depends on build to avoid OOMs
|
|
||||||
# on aarch64 builders caused by too many parallel compiler/linker processes.
|
|
||||||
mesonBuild = forAllSystems (
|
|
||||||
system:
|
|
||||||
(self.packages.${system}.nix.override { buildWithMeson = true; }).overrideAttrs (prev: {
|
|
||||||
buildInputs = prev.buildInputs ++ [ self.packages.${system}.nix ];
|
|
||||||
})
|
|
||||||
);
|
|
||||||
mesonBuildClang = forAllSystems (
|
|
||||||
system:
|
|
||||||
(nixpkgsFor.${system}.stdenvs.clangStdenvPackages.nix.override { buildWithMeson = true; })
|
|
||||||
.overrideAttrs
|
|
||||||
(prev: {
|
|
||||||
buildInputs = prev.buildInputs ++ [ self.hydraJobs.mesonBuild.${system} ];
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
# Perl bindings for various platforms.
|
# Perl bindings for various platforms.
|
||||||
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings);
|
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings);
|
||||||
|
|
||||||
|
@ -237,7 +219,6 @@
|
||||||
inherit (pkgs) build-release-notes;
|
inherit (pkgs) build-release-notes;
|
||||||
internalApiDocs = true;
|
internalApiDocs = true;
|
||||||
busybox-sandbox-shell = pkgs.busybox-sandbox-shell;
|
busybox-sandbox-shell = pkgs.busybox-sandbox-shell;
|
||||||
buildWithMeson = true;
|
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
nix.overrideAttrs (prev: {
|
nix.overrideAttrs (prev: {
|
||||||
|
@ -367,9 +348,6 @@
|
||||||
checks = forAllSystems (
|
checks = forAllSystems (
|
||||||
system:
|
system:
|
||||||
{
|
{
|
||||||
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
|
|
||||||
mesonBuild = self.hydraJobs.mesonBuild.${system};
|
|
||||||
mesonBuildClang = self.hydraJobs.mesonBuildClang.${system};
|
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
|
@ -468,19 +446,10 @@
|
||||||
# for some reason that seems accidental and was changed in
|
# for some reason that seems accidental and was changed in
|
||||||
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
|
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
|
||||||
# default LLVM is newer.
|
# default LLVM is newer.
|
||||||
(pkgs.buildPackages.clang-tools.override { inherit (pkgs.buildPackages) llvmPackages; })
|
(pkgs.buildPackages.clang-tools.override { inherit (pkgs.buildPackages) llvmPackages; });
|
||||||
++ [
|
|
||||||
# FIXME(Qyriad): remove once the migration to Meson is complete.
|
|
||||||
pkgs.buildPackages.meson
|
|
||||||
pkgs.buildPackages.ninja
|
|
||||||
pkgs.buildPackages.cmake
|
|
||||||
|
|
||||||
pkgs.buildPackages.clangbuildanalyzer
|
|
||||||
];
|
|
||||||
|
|
||||||
src = null;
|
src = null;
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
strictDeps = false;
|
strictDeps = false;
|
||||||
|
|
||||||
shellHook = ''
|
shellHook = ''
|
||||||
|
|
6
justfile
6
justfile
|
@ -6,13 +6,13 @@ clean:
|
||||||
setup:
|
setup:
|
||||||
meson setup build --prefix="$PWD/outputs/out"
|
meson setup build --prefix="$PWD/outputs/out"
|
||||||
|
|
||||||
build:
|
build *OPTIONS:
|
||||||
meson compile -C build
|
meson compile -C build {{ OPTIONS }}
|
||||||
|
|
||||||
compile:
|
compile:
|
||||||
just build
|
just build
|
||||||
|
|
||||||
install:
|
install *OPTIONS: (build OPTIONS)
|
||||||
meson install -C build
|
meson install -C build
|
||||||
|
|
||||||
test *OPTIONS:
|
test *OPTIONS:
|
||||||
|
|
17
local.mk
17
local.mk
|
@ -1,17 +0,0 @@
|
||||||
# 2024-03-24: jade benchmarked the default sanitize reporting in clang and got
|
|
||||||
# a regression of about 10% on hackage-packages.nix with clang. So we are trapping instead.
|
|
||||||
#
|
|
||||||
# This has an overhead of 0-4% on gcc and unmeasurably little on clang, in
|
|
||||||
# Nix evaluation benchmarks.
|
|
||||||
DEFAULT_SANITIZE_FLAGS = -fsanitize=signed-integer-overflow -fsanitize-undefined-trap-on-error
|
|
||||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch $(DEFAULT_SANITIZE_FLAGS) -D_GLIBCXX_ASSERTIONS=1
|
|
||||||
GLOBAL_LDFLAGS += $(DEFAULT_SANITIZE_FLAGS)
|
|
||||||
# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
|
|
||||||
ERROR_SWITCH_ENUM = -Werror=switch-enum
|
|
||||||
|
|
||||||
$(foreach i, config.h $(wildcard src/lib*/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
|
||||||
|
|
||||||
$(GCH): src/libutil/util.hh config.h
|
|
||||||
|
|
||||||
GCH_CXXFLAGS = -I src/libutil
|
|
|
@ -1,951 +0,0 @@
|
||||||
# ===========================================================================
|
|
||||||
# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html
|
|
||||||
# ===========================================================================
|
|
||||||
#
|
|
||||||
# SYNOPSIS
|
|
||||||
#
|
|
||||||
# AX_CXX_COMPILE_STDCXX(VERSION, [ext|noext], [mandatory|optional])
|
|
||||||
#
|
|
||||||
# DESCRIPTION
|
|
||||||
#
|
|
||||||
# Check for baseline language coverage in the compiler for the specified
|
|
||||||
# version of the C++ standard. If necessary, add switches to CXX and
|
|
||||||
# CXXCPP to enable support. VERSION may be '11' (for the C++11 standard)
|
|
||||||
# or '14' (for the C++14 standard).
|
|
||||||
#
|
|
||||||
# The second argument, if specified, indicates whether you insist on an
|
|
||||||
# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g.
|
|
||||||
# -std=c++11). If neither is specified, you get whatever works, with
|
|
||||||
# preference for an extended mode.
|
|
||||||
#
|
|
||||||
# The third argument, if specified 'mandatory' or if left unspecified,
|
|
||||||
# indicates that baseline support for the specified C++ standard is
|
|
||||||
# required and that the macro should error out if no mode with that
|
|
||||||
# support is found. If specified 'optional', then configuration proceeds
|
|
||||||
# regardless, after defining HAVE_CXX${VERSION} if and only if a
|
|
||||||
# supporting mode is found.
|
|
||||||
#
|
|
||||||
# LICENSE
|
|
||||||
#
|
|
||||||
# Copyright (c) 2008 Benjamin Kosnik <bkoz@redhat.com>
|
|
||||||
# Copyright (c) 2012 Zack Weinberg <zackw@panix.com>
|
|
||||||
# Copyright (c) 2013 Roy Stogner <roystgnr@ices.utexas.edu>
|
|
||||||
# Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov <sokolov@google.com>
|
|
||||||
# Copyright (c) 2015 Paul Norman <penorman@mac.com>
|
|
||||||
# Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
|
|
||||||
# Copyright (c) 2016, 2018 Krzesimir Nowak <qdlacz@gmail.com>
|
|
||||||
# Copyright (c) 2019 Enji Cooper <yaneurabeya@gmail.com>
|
|
||||||
#
|
|
||||||
# Copying and distribution of this file, with or without modification, are
|
|
||||||
# permitted in any medium without royalty provided the copyright notice
|
|
||||||
# and this notice are preserved. This file is offered as-is, without any
|
|
||||||
# warranty.
|
|
||||||
|
|
||||||
#serial 11
|
|
||||||
|
|
||||||
dnl This macro is based on the code from the AX_CXX_COMPILE_STDCXX_11 macro
|
|
||||||
dnl (serial version number 13).
|
|
||||||
|
|
||||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
|
|
||||||
m4_if([$1], [11], [ax_cxx_compile_alternatives="11 0x"],
|
|
||||||
[$1], [14], [ax_cxx_compile_alternatives="14 1y"],
|
|
||||||
[$1], [17], [ax_cxx_compile_alternatives="17 1z"],
|
|
||||||
[m4_fatal([invalid first argument `$1' to AX_CXX_COMPILE_STDCXX])])dnl
|
|
||||||
m4_if([$2], [], [],
|
|
||||||
[$2], [ext], [],
|
|
||||||
[$2], [noext], [],
|
|
||||||
[m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX])])dnl
|
|
||||||
m4_if([$3], [], [ax_cxx_compile_cxx$1_required=true],
|
|
||||||
[$3], [mandatory], [ax_cxx_compile_cxx$1_required=true],
|
|
||||||
[$3], [optional], [ax_cxx_compile_cxx$1_required=false],
|
|
||||||
[m4_fatal([invalid third argument `$3' to AX_CXX_COMPILE_STDCXX])])
|
|
||||||
AC_LANG_PUSH([C++])dnl
|
|
||||||
ac_success=no
|
|
||||||
|
|
||||||
m4_if([$2], [noext], [], [dnl
|
|
||||||
if test x$ac_success = xno; then
|
|
||||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
|
||||||
switch="-std=gnu++${alternative}"
|
|
||||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
|
||||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
|
||||||
$cachevar,
|
|
||||||
[ac_save_CXX="$CXX"
|
|
||||||
CXX="$CXX $switch"
|
|
||||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
|
||||||
[eval $cachevar=yes],
|
|
||||||
[eval $cachevar=no])
|
|
||||||
CXX="$ac_save_CXX"])
|
|
||||||
if eval test x\$$cachevar = xyes; then
|
|
||||||
CXX="$CXX $switch"
|
|
||||||
if test -n "$CXXCPP" ; then
|
|
||||||
CXXCPP="$CXXCPP $switch"
|
|
||||||
fi
|
|
||||||
ac_success=yes
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi])
|
|
||||||
|
|
||||||
m4_if([$2], [ext], [], [dnl
|
|
||||||
if test x$ac_success = xno; then
|
|
||||||
dnl HP's aCC needs +std=c++11 according to:
|
|
||||||
dnl http://h21007.www2.hp.com/portal/download/files/unprot/aCxx/PDF_Release_Notes/769149-001.pdf
|
|
||||||
dnl Cray's crayCC needs "-h std=c++11"
|
|
||||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
|
||||||
for switch in -std=c++${alternative} +std=c++${alternative} "-h std=c++${alternative}"; do
|
|
||||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
|
||||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
|
||||||
$cachevar,
|
|
||||||
[ac_save_CXX="$CXX"
|
|
||||||
CXX="$CXX $switch"
|
|
||||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
|
||||||
[eval $cachevar=yes],
|
|
||||||
[eval $cachevar=no])
|
|
||||||
CXX="$ac_save_CXX"])
|
|
||||||
if eval test x\$$cachevar = xyes; then
|
|
||||||
CXX="$CXX $switch"
|
|
||||||
if test -n "$CXXCPP" ; then
|
|
||||||
CXXCPP="$CXXCPP $switch"
|
|
||||||
fi
|
|
||||||
ac_success=yes
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
if test x$ac_success = xyes; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi])
|
|
||||||
AC_LANG_POP([C++])
|
|
||||||
if test x$ax_cxx_compile_cxx$1_required = xtrue; then
|
|
||||||
if test x$ac_success = xno; then
|
|
||||||
AC_MSG_ERROR([*** A compiler with support for C++$1 language features is required.])
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if test x$ac_success = xno; then
|
|
||||||
HAVE_CXX$1=0
|
|
||||||
AC_MSG_NOTICE([No compiler with C++$1 support was found])
|
|
||||||
else
|
|
||||||
HAVE_CXX$1=1
|
|
||||||
AC_DEFINE(HAVE_CXX$1,1,
|
|
||||||
[define if the compiler supports basic C++$1 syntax])
|
|
||||||
fi
|
|
||||||
AC_SUBST(HAVE_CXX$1)
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
dnl Test body for checking C++11 support
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_11],
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
dnl Test body for checking C++14 support
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_14],
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
|
||||||
)
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_17],
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
|
||||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_17
|
|
||||||
)
|
|
||||||
|
|
||||||
dnl Tests for new features in C++11
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_11], [[
|
|
||||||
|
|
||||||
// If the compiler admits that it is not ready for C++11, why torture it?
|
|
||||||
// Hopefully, this will speed up the test.
|
|
||||||
|
|
||||||
#ifndef __cplusplus
|
|
||||||
|
|
||||||
#error "This is not a C++ compiler"
|
|
||||||
|
|
||||||
#elif __cplusplus < 201103L
|
|
||||||
|
|
||||||
#error "This is not a C++11 compiler"
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
namespace cxx11
|
|
||||||
{
|
|
||||||
|
|
||||||
namespace test_static_assert
|
|
||||||
{
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
struct check
|
|
||||||
{
|
|
||||||
static_assert(sizeof(int) <= sizeof(T), "not big enough");
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_final_override
|
|
||||||
{
|
|
||||||
|
|
||||||
struct Base
|
|
||||||
{
|
|
||||||
virtual ~Base() {}
|
|
||||||
virtual void f() {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct Derived : public Base
|
|
||||||
{
|
|
||||||
virtual ~Derived() override {}
|
|
||||||
virtual void f() override {}
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_double_right_angle_brackets
|
|
||||||
{
|
|
||||||
|
|
||||||
template < typename T >
|
|
||||||
struct check {};
|
|
||||||
|
|
||||||
typedef check<void> single_type;
|
|
||||||
typedef check<check<void>> double_type;
|
|
||||||
typedef check<check<check<void>>> triple_type;
|
|
||||||
typedef check<check<check<check<void>>>> quadruple_type;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_decltype
|
|
||||||
{
|
|
||||||
|
|
||||||
int
|
|
||||||
f()
|
|
||||||
{
|
|
||||||
int a = 1;
|
|
||||||
decltype(a) b = 2;
|
|
||||||
return a + b;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_type_deduction
|
|
||||||
{
|
|
||||||
|
|
||||||
template < typename T1, typename T2 >
|
|
||||||
struct is_same
|
|
||||||
{
|
|
||||||
static const bool value = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
template < typename T >
|
|
||||||
struct is_same<T, T>
|
|
||||||
{
|
|
||||||
static const bool value = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
template < typename T1, typename T2 >
|
|
||||||
auto
|
|
||||||
add(T1 a1, T2 a2) -> decltype(a1 + a2)
|
|
||||||
{
|
|
||||||
return a1 + a2;
|
|
||||||
}
|
|
||||||
|
|
||||||
int
|
|
||||||
test(const int c, volatile int v)
|
|
||||||
{
|
|
||||||
static_assert(is_same<int, decltype(0)>::value == true, "");
|
|
||||||
static_assert(is_same<int, decltype(c)>::value == false, "");
|
|
||||||
static_assert(is_same<int, decltype(v)>::value == false, "");
|
|
||||||
auto ac = c;
|
|
||||||
auto av = v;
|
|
||||||
auto sumi = ac + av + 'x';
|
|
||||||
auto sumf = ac + av + 1.0;
|
|
||||||
static_assert(is_same<int, decltype(ac)>::value == true, "");
|
|
||||||
static_assert(is_same<int, decltype(av)>::value == true, "");
|
|
||||||
static_assert(is_same<int, decltype(sumi)>::value == true, "");
|
|
||||||
static_assert(is_same<int, decltype(sumf)>::value == false, "");
|
|
||||||
static_assert(is_same<int, decltype(add(c, v))>::value == true, "");
|
|
||||||
return (sumf > 0.0) ? sumi : add(c, v);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_noexcept
|
|
||||||
{
|
|
||||||
|
|
||||||
int f() { return 0; }
|
|
||||||
int g() noexcept { return 0; }
|
|
||||||
|
|
||||||
static_assert(noexcept(f()) == false, "");
|
|
||||||
static_assert(noexcept(g()) == true, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_constexpr
|
|
||||||
{
|
|
||||||
|
|
||||||
template < typename CharT >
|
|
||||||
unsigned long constexpr
|
|
||||||
strlen_c_r(const CharT *const s, const unsigned long acc) noexcept
|
|
||||||
{
|
|
||||||
return *s ? strlen_c_r(s + 1, acc + 1) : acc;
|
|
||||||
}
|
|
||||||
|
|
||||||
template < typename CharT >
|
|
||||||
unsigned long constexpr
|
|
||||||
strlen_c(const CharT *const s) noexcept
|
|
||||||
{
|
|
||||||
return strlen_c_r(s, 0UL);
|
|
||||||
}
|
|
||||||
|
|
||||||
static_assert(strlen_c("") == 0UL, "");
|
|
||||||
static_assert(strlen_c("1") == 1UL, "");
|
|
||||||
static_assert(strlen_c("example") == 7UL, "");
|
|
||||||
static_assert(strlen_c("another\0example") == 7UL, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_rvalue_references
|
|
||||||
{
|
|
||||||
|
|
||||||
template < int N >
|
|
||||||
struct answer
|
|
||||||
{
|
|
||||||
static constexpr int value = N;
|
|
||||||
};
|
|
||||||
|
|
||||||
answer<1> f(int&) { return answer<1>(); }
|
|
||||||
answer<2> f(const int&) { return answer<2>(); }
|
|
||||||
answer<3> f(int&&) { return answer<3>(); }
|
|
||||||
|
|
||||||
void
|
|
||||||
test()
|
|
||||||
{
|
|
||||||
int i = 0;
|
|
||||||
const int c = 0;
|
|
||||||
static_assert(decltype(f(i))::value == 1, "");
|
|
||||||
static_assert(decltype(f(c))::value == 2, "");
|
|
||||||
static_assert(decltype(f(0))::value == 3, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_uniform_initialization
|
|
||||||
{
|
|
||||||
|
|
||||||
struct test
|
|
||||||
{
|
|
||||||
static const int zero {};
|
|
||||||
static const int one {1};
|
|
||||||
};
|
|
||||||
|
|
||||||
static_assert(test::zero == 0, "");
|
|
||||||
static_assert(test::one == 1, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_lambdas
|
|
||||||
{
|
|
||||||
|
|
||||||
void
|
|
||||||
test1()
|
|
||||||
{
|
|
||||||
auto lambda1 = [](){};
|
|
||||||
auto lambda2 = lambda1;
|
|
||||||
lambda1();
|
|
||||||
lambda2();
|
|
||||||
}
|
|
||||||
|
|
||||||
int
|
|
||||||
test2()
|
|
||||||
{
|
|
||||||
auto a = [](int i, int j){ return i + j; }(1, 2);
|
|
||||||
auto b = []() -> int { return '0'; }();
|
|
||||||
auto c = [=](){ return a + b; }();
|
|
||||||
auto d = [&](){ return c; }();
|
|
||||||
auto e = [a, &b](int x) mutable {
|
|
||||||
const auto identity = [](int y){ return y; };
|
|
||||||
for (auto i = 0; i < a; ++i)
|
|
||||||
a += b--;
|
|
||||||
return x + identity(a + b);
|
|
||||||
}(0);
|
|
||||||
return a + b + c + d + e;
|
|
||||||
}
|
|
||||||
|
|
||||||
int
|
|
||||||
test3()
|
|
||||||
{
|
|
||||||
const auto nullary = [](){ return 0; };
|
|
||||||
const auto unary = [](int x){ return x; };
|
|
||||||
using nullary_t = decltype(nullary);
|
|
||||||
using unary_t = decltype(unary);
|
|
||||||
const auto higher1st = [](nullary_t f){ return f(); };
|
|
||||||
const auto higher2nd = [unary](nullary_t f1){
|
|
||||||
return [unary, f1](unary_t f2){ return f2(unary(f1())); };
|
|
||||||
};
|
|
||||||
return higher1st(nullary) + higher2nd(nullary)(unary);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_variadic_templates
|
|
||||||
{
|
|
||||||
|
|
||||||
template <int...>
|
|
||||||
struct sum;
|
|
||||||
|
|
||||||
template <int N0, int... N1toN>
|
|
||||||
struct sum<N0, N1toN...>
|
|
||||||
{
|
|
||||||
static constexpr auto value = N0 + sum<N1toN...>::value;
|
|
||||||
};
|
|
||||||
|
|
||||||
template <>
|
|
||||||
struct sum<>
|
|
||||||
{
|
|
||||||
static constexpr auto value = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
static_assert(sum<>::value == 0, "");
|
|
||||||
static_assert(sum<1>::value == 1, "");
|
|
||||||
static_assert(sum<23>::value == 23, "");
|
|
||||||
static_assert(sum<1, 2>::value == 3, "");
|
|
||||||
static_assert(sum<5, 5, 11>::value == 21, "");
|
|
||||||
static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae
|
|
||||||
// Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function
|
|
||||||
// because of this.
|
|
||||||
namespace test_template_alias_sfinae
|
|
||||||
{
|
|
||||||
|
|
||||||
struct foo {};
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
using member = typename T::member_type;
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
void func(...) {}
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
void func(member<T>*) {}
|
|
||||||
|
|
||||||
void test();
|
|
||||||
|
|
||||||
void test() { func<foo>(0); }
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace cxx11
|
|
||||||
|
|
||||||
#endif // __cplusplus >= 201103L
|
|
||||||
|
|
||||||
]])
|
|
||||||
|
|
||||||
|
|
||||||
dnl Tests for new features in C++14
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_14], [[
|
|
||||||
|
|
||||||
// If the compiler admits that it is not ready for C++14, why torture it?
|
|
||||||
// Hopefully, this will speed up the test.
|
|
||||||
|
|
||||||
#ifndef __cplusplus
|
|
||||||
|
|
||||||
#error "This is not a C++ compiler"
|
|
||||||
|
|
||||||
#elif __cplusplus < 201402L
|
|
||||||
|
|
||||||
#error "This is not a C++14 compiler"
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
namespace cxx14
|
|
||||||
{
|
|
||||||
|
|
||||||
namespace test_polymorphic_lambdas
|
|
||||||
{
|
|
||||||
|
|
||||||
int
|
|
||||||
test()
|
|
||||||
{
|
|
||||||
const auto lambda = [](auto&&... args){
|
|
||||||
const auto istiny = [](auto x){
|
|
||||||
return (sizeof(x) == 1UL) ? 1 : 0;
|
|
||||||
};
|
|
||||||
const int aretiny[] = { istiny(args)... };
|
|
||||||
return aretiny[0];
|
|
||||||
};
|
|
||||||
return lambda(1, 1L, 1.0f, '1');
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_binary_literals
|
|
||||||
{
|
|
||||||
|
|
||||||
constexpr auto ivii = 0b0000000000101010;
|
|
||||||
static_assert(ivii == 42, "wrong value");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_generalized_constexpr
|
|
||||||
{
|
|
||||||
|
|
||||||
template < typename CharT >
|
|
||||||
constexpr unsigned long
|
|
||||||
strlen_c(const CharT *const s) noexcept
|
|
||||||
{
|
|
||||||
auto length = 0UL;
|
|
||||||
for (auto p = s; *p; ++p)
|
|
||||||
++length;
|
|
||||||
return length;
|
|
||||||
}
|
|
||||||
|
|
||||||
static_assert(strlen_c("") == 0UL, "");
|
|
||||||
static_assert(strlen_c("x") == 1UL, "");
|
|
||||||
static_assert(strlen_c("test") == 4UL, "");
|
|
||||||
static_assert(strlen_c("another\0test") == 7UL, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_lambda_init_capture
|
|
||||||
{
|
|
||||||
|
|
||||||
int
|
|
||||||
test()
|
|
||||||
{
|
|
||||||
auto x = 0;
|
|
||||||
const auto lambda1 = [a = x](int b){ return a + b; };
|
|
||||||
const auto lambda2 = [a = lambda1(x)](){ return a; };
|
|
||||||
return lambda2();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_digit_separators
|
|
||||||
{
|
|
||||||
|
|
||||||
constexpr auto ten_million = 100'000'000;
|
|
||||||
static_assert(ten_million == 100000000, "");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_return_type_deduction
|
|
||||||
{
|
|
||||||
|
|
||||||
auto f(int& x) { return x; }
|
|
||||||
decltype(auto) g(int& x) { return x; }
|
|
||||||
|
|
||||||
template < typename T1, typename T2 >
|
|
||||||
struct is_same
|
|
||||||
{
|
|
||||||
static constexpr auto value = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
template < typename T >
|
|
||||||
struct is_same<T, T>
|
|
||||||
{
|
|
||||||
static constexpr auto value = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
int
|
|
||||||
test()
|
|
||||||
{
|
|
||||||
auto x = 0;
|
|
||||||
static_assert(is_same<int, decltype(f(x))>::value, "");
|
|
||||||
static_assert(is_same<int&, decltype(g(x))>::value, "");
|
|
||||||
return x;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace cxx14
|
|
||||||
|
|
||||||
#endif // __cplusplus >= 201402L
|
|
||||||
|
|
||||||
]])
|
|
||||||
|
|
||||||
|
|
||||||
dnl Tests for new features in C++17
|
|
||||||
|
|
||||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[
|
|
||||||
|
|
||||||
// If the compiler admits that it is not ready for C++17, why torture it?
|
|
||||||
// Hopefully, this will speed up the test.
|
|
||||||
|
|
||||||
#ifndef __cplusplus
|
|
||||||
|
|
||||||
#error "This is not a C++ compiler"
|
|
||||||
|
|
||||||
#elif __cplusplus < 201703L
|
|
||||||
|
|
||||||
#error "This is not a C++17 compiler"
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
#include <initializer_list>
|
|
||||||
#include <utility>
|
|
||||||
#include <type_traits>
|
|
||||||
|
|
||||||
namespace cxx17
|
|
||||||
{
|
|
||||||
|
|
||||||
namespace test_constexpr_lambdas
|
|
||||||
{
|
|
||||||
|
|
||||||
constexpr int foo = [](){return 42;}();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test::nested_namespace::definitions
|
|
||||||
{
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_fold_expression
|
|
||||||
{
|
|
||||||
|
|
||||||
template<typename... Args>
|
|
||||||
int multiply(Args... args)
|
|
||||||
{
|
|
||||||
return (args * ... * 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
template<typename... Args>
|
|
||||||
bool all(Args... args)
|
|
||||||
{
|
|
||||||
return (args && ...);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_extended_static_assert
|
|
||||||
{
|
|
||||||
|
|
||||||
static_assert (true);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_auto_brace_init_list
|
|
||||||
{
|
|
||||||
|
|
||||||
auto foo = {5};
|
|
||||||
auto bar {5};
|
|
||||||
|
|
||||||
static_assert(std::is_same<std::initializer_list<int>, decltype(foo)>::value);
|
|
||||||
static_assert(std::is_same<int, decltype(bar)>::value);
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_typename_in_template_template_parameter
|
|
||||||
{
|
|
||||||
|
|
||||||
template<template<typename> typename X> struct D;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_fallthrough_nodiscard_maybe_unused_attributes
|
|
||||||
{
|
|
||||||
|
|
||||||
int f1()
|
|
||||||
{
|
|
||||||
return 42;
|
|
||||||
}
|
|
||||||
|
|
||||||
[[nodiscard]] int f2()
|
|
||||||
{
|
|
||||||
[[maybe_unused]] auto unused = f1();
|
|
||||||
|
|
||||||
switch (f1())
|
|
||||||
{
|
|
||||||
case 17:
|
|
||||||
f1();
|
|
||||||
[[fallthrough]];
|
|
||||||
case 42:
|
|
||||||
f1();
|
|
||||||
}
|
|
||||||
return f1();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_extended_aggregate_initialization
|
|
||||||
{
|
|
||||||
|
|
||||||
struct base1
|
|
||||||
{
|
|
||||||
int b1, b2 = 42;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct base2
|
|
||||||
{
|
|
||||||
base2() {
|
|
||||||
b3 = 42;
|
|
||||||
}
|
|
||||||
int b3;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct derived : base1, base2
|
|
||||||
{
|
|
||||||
int d;
|
|
||||||
};
|
|
||||||
|
|
||||||
derived d1 {{1, 2}, {}, 4}; // full initialization
|
|
||||||
derived d2 {{}, {}, 4}; // value-initialized bases
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_general_range_based_for_loop
|
|
||||||
{
|
|
||||||
|
|
||||||
struct iter
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
|
|
||||||
int& operator* ()
|
|
||||||
{
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
const int& operator* () const
|
|
||||||
{
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
|
|
||||||
iter& operator++()
|
|
||||||
{
|
|
||||||
++i;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct sentinel
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
};
|
|
||||||
|
|
||||||
bool operator== (const iter& i, const sentinel& s)
|
|
||||||
{
|
|
||||||
return i.i == s.i;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool operator!= (const iter& i, const sentinel& s)
|
|
||||||
{
|
|
||||||
return !(i == s);
|
|
||||||
}
|
|
||||||
|
|
||||||
struct range
|
|
||||||
{
|
|
||||||
iter begin() const
|
|
||||||
{
|
|
||||||
return {0};
|
|
||||||
}
|
|
||||||
|
|
||||||
sentinel end() const
|
|
||||||
{
|
|
||||||
return {5};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
void f()
|
|
||||||
{
|
|
||||||
range r {};
|
|
||||||
|
|
||||||
for (auto i : r)
|
|
||||||
{
|
|
||||||
[[maybe_unused]] auto v = i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_lambda_capture_asterisk_this_by_value
|
|
||||||
{
|
|
||||||
|
|
||||||
struct t
|
|
||||||
{
|
|
||||||
int i;
|
|
||||||
int foo()
|
|
||||||
{
|
|
||||||
return [*this]()
|
|
||||||
{
|
|
||||||
return i;
|
|
||||||
}();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_enum_class_construction
|
|
||||||
{
|
|
||||||
|
|
||||||
enum class byte : unsigned char
|
|
||||||
{};
|
|
||||||
|
|
||||||
byte foo {42};
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_constexpr_if
|
|
||||||
{
|
|
||||||
|
|
||||||
template <bool cond>
|
|
||||||
int f ()
|
|
||||||
{
|
|
||||||
if constexpr(cond)
|
|
||||||
{
|
|
||||||
return 13;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
return 42;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_selection_statement_with_initializer
|
|
||||||
{
|
|
||||||
|
|
||||||
int f()
|
|
||||||
{
|
|
||||||
return 13;
|
|
||||||
}
|
|
||||||
|
|
||||||
int f2()
|
|
||||||
{
|
|
||||||
if (auto i = f(); i > 0)
|
|
||||||
{
|
|
||||||
return 3;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (auto i = f(); i + 4)
|
|
||||||
{
|
|
||||||
case 17:
|
|
||||||
return 2;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_template_argument_deduction_for_class_templates
|
|
||||||
{
|
|
||||||
|
|
||||||
template <typename T1, typename T2>
|
|
||||||
struct pair
|
|
||||||
{
|
|
||||||
pair (T1 p1, T2 p2)
|
|
||||||
: m1 {p1},
|
|
||||||
m2 {p2}
|
|
||||||
{}
|
|
||||||
|
|
||||||
T1 m1;
|
|
||||||
T2 m2;
|
|
||||||
};
|
|
||||||
|
|
||||||
void f()
|
|
||||||
{
|
|
||||||
[[maybe_unused]] auto p = pair{13, 42u};
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_non_type_auto_template_parameters
|
|
||||||
{
|
|
||||||
|
|
||||||
template <auto n>
|
|
||||||
struct B
|
|
||||||
{};
|
|
||||||
|
|
||||||
B<5> b1;
|
|
||||||
B<'a'> b2;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_structured_bindings
|
|
||||||
{
|
|
||||||
|
|
||||||
int arr[2] = { 1, 2 };
|
|
||||||
std::pair<int, int> pr = { 1, 2 };
|
|
||||||
|
|
||||||
auto f1() -> int(&)[2]
|
|
||||||
{
|
|
||||||
return arr;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto f2() -> std::pair<int, int>&
|
|
||||||
{
|
|
||||||
return pr;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct S
|
|
||||||
{
|
|
||||||
int x1 : 2;
|
|
||||||
volatile double y1;
|
|
||||||
};
|
|
||||||
|
|
||||||
S f3()
|
|
||||||
{
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
auto [ x1, y1 ] = f1();
|
|
||||||
auto& [ xr1, yr1 ] = f1();
|
|
||||||
auto [ x2, y2 ] = f2();
|
|
||||||
auto& [ xr2, yr2 ] = f2();
|
|
||||||
const auto [ x3, y3 ] = f3();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_exception_spec_type_system
|
|
||||||
{
|
|
||||||
|
|
||||||
struct Good {};
|
|
||||||
struct Bad {};
|
|
||||||
|
|
||||||
void g1() noexcept;
|
|
||||||
void g2();
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
Bad
|
|
||||||
f(T*, T*);
|
|
||||||
|
|
||||||
template<typename T1, typename T2>
|
|
||||||
Good
|
|
||||||
f(T1*, T2*);
|
|
||||||
|
|
||||||
static_assert (std::is_same_v<Good, decltype(f(g1, g2))>);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
namespace test_inline_variables
|
|
||||||
{
|
|
||||||
|
|
||||||
template<class T> void f(T)
|
|
||||||
{}
|
|
||||||
|
|
||||||
template<class T> inline T g(T)
|
|
||||||
{
|
|
||||||
return T{};
|
|
||||||
}
|
|
||||||
|
|
||||||
template<> inline void f<>(int)
|
|
||||||
{}
|
|
||||||
|
|
||||||
template<> int g<>(int)
|
|
||||||
{
|
|
||||||
return 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace cxx17
|
|
||||||
|
|
||||||
#endif // __cplusplus < 201703L
|
|
||||||
|
|
||||||
]])
|
|
|
@ -1,35 +0,0 @@
|
||||||
# =============================================================================
|
|
||||||
# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx_17.html
|
|
||||||
# =============================================================================
|
|
||||||
#
|
|
||||||
# SYNOPSIS
|
|
||||||
#
|
|
||||||
# AX_CXX_COMPILE_STDCXX_17([ext|noext], [mandatory|optional])
|
|
||||||
#
|
|
||||||
# DESCRIPTION
|
|
||||||
#
|
|
||||||
# Check for baseline language coverage in the compiler for the C++17
|
|
||||||
# standard; if necessary, add switches to CXX and CXXCPP to enable
|
|
||||||
# support.
|
|
||||||
#
|
|
||||||
# This macro is a convenience alias for calling the AX_CXX_COMPILE_STDCXX
|
|
||||||
# macro with the version set to C++17. The two optional arguments are
|
|
||||||
# forwarded literally as the second and third argument respectively.
|
|
||||||
# Please see the documentation for the AX_CXX_COMPILE_STDCXX macro for
|
|
||||||
# more information. If you want to use this macro, you also need to
|
|
||||||
# download the ax_cxx_compile_stdcxx.m4 file.
|
|
||||||
#
|
|
||||||
# LICENSE
|
|
||||||
#
|
|
||||||
# Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
|
|
||||||
# Copyright (c) 2016 Krzesimir Nowak <qdlacz@gmail.com>
|
|
||||||
#
|
|
||||||
# Copying and distribution of this file, with or without modification, are
|
|
||||||
# permitted in any medium without royalty provided the copyright notice
|
|
||||||
# and this notice are preserved. This file is offered as-is, without any
|
|
||||||
# warranty.
|
|
||||||
|
|
||||||
#serial 2
|
|
||||||
|
|
||||||
AX_REQUIRE_DEFINED([AX_CXX_COMPILE_STDCXX])
|
|
||||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX_17], [AX_CXX_COMPILE_STDCXX([17], [$1], [$2])])
|
|
|
@ -1,64 +0,0 @@
|
||||||
# Ensure that this bug is not present in the C++ toolchain we are using.
|
|
||||||
#
|
|
||||||
# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431
|
|
||||||
#
|
|
||||||
# The test program is from that issue, with only a slight modification
|
|
||||||
# to set an exit status instead of printing strings.
|
|
||||||
AC_DEFUN([ENSURE_NO_GCC_BUG_80431],
|
|
||||||
[
|
|
||||||
AC_MSG_CHECKING([that GCC bug 80431 is fixed])
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_RUN_IFELSE(
|
|
||||||
[AC_LANG_PROGRAM(
|
|
||||||
[[
|
|
||||||
#include <cstdio>
|
|
||||||
|
|
||||||
static bool a = true;
|
|
||||||
static bool b = true;
|
|
||||||
|
|
||||||
struct Options { };
|
|
||||||
|
|
||||||
struct Option
|
|
||||||
{
|
|
||||||
Option(Options * options)
|
|
||||||
{
|
|
||||||
a = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
~Option()
|
|
||||||
{
|
|
||||||
b = false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct MyOptions : Options { };
|
|
||||||
|
|
||||||
struct MyOptions2 : virtual MyOptions
|
|
||||||
{
|
|
||||||
Option foo{this};
|
|
||||||
};
|
|
||||||
]],
|
|
||||||
[[
|
|
||||||
{
|
|
||||||
MyOptions2 opts;
|
|
||||||
}
|
|
||||||
return (a << 1) | b;
|
|
||||||
]])],
|
|
||||||
[status_80431=0],
|
|
||||||
[status_80431=$?],
|
|
||||||
[
|
|
||||||
# Assume we're bug-free when cross-compiling
|
|
||||||
])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
AS_CASE([$status_80431],
|
|
||||||
[0],[
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
],
|
|
||||||
[2],[
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug)
|
|
||||||
],
|
|
||||||
[
|
|
||||||
AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring)
|
|
||||||
])
|
|
||||||
])
|
|
42
meson.build
42
meson.build
|
@ -85,8 +85,8 @@ endif
|
||||||
enable_docs = get_option('enable-docs')
|
enable_docs = get_option('enable-docs')
|
||||||
enable_internal_api_docs = get_option('internal-api-docs')
|
enable_internal_api_docs = get_option('internal-api-docs')
|
||||||
|
|
||||||
doxygen = find_program('doxygen', required : enable_internal_api_docs)
|
doxygen = find_program('doxygen', required : enable_internal_api_docs, native : true)
|
||||||
bash = find_program('bash')
|
bash = find_program('bash', native : true)
|
||||||
|
|
||||||
rapidcheck_meson = dependency('rapidcheck', required : enable_internal_api_docs)
|
rapidcheck_meson = dependency('rapidcheck', required : enable_internal_api_docs)
|
||||||
|
|
||||||
|
@ -114,6 +114,25 @@ endif
|
||||||
|
|
||||||
cxx = meson.get_compiler('cpp')
|
cxx = meson.get_compiler('cpp')
|
||||||
|
|
||||||
|
# Translate some historical and Mesony CPU names to Lixy CPU names.
|
||||||
|
# FIXME(Qyriad): the 32-bit x86 code is not tested right now, because cross compilation for Lix
|
||||||
|
# to those architectures is currently broken for other reasons, namely:
|
||||||
|
# - nixos-23.11's x86_64-linux -> i686-linux glibc does not build (also applies to cppnix)
|
||||||
|
# - nixpkgs-unstable (as of 2024/04)'s boehmgc is not compatible with our patches
|
||||||
|
# It's also broken in cppnix, though.
|
||||||
|
host_cpu = host_machine.cpu_family()
|
||||||
|
if host_cpu in ['x86', 'i686', 'i386']
|
||||||
|
# Meson considers 32-bit x86 CPUs to be "x86", and does not consider 64-bit
|
||||||
|
# x86 CPUs to be "x86" (instead using "x86_64", which needs no translation).
|
||||||
|
host_cpu = 'i686'
|
||||||
|
elif host_cpu == 'amd64'
|
||||||
|
# This should not be needed under normal circumstances, but someone could pass a --cross-file
|
||||||
|
# that sets the cpu_family to this.
|
||||||
|
host_cpu = 'x86_64'
|
||||||
|
elif host_cpu in ['armv6', 'armv7']
|
||||||
|
host_cpu += 'l'
|
||||||
|
endif
|
||||||
|
|
||||||
host_system = host_machine.cpu_family() + '-' + host_machine.system()
|
host_system = host_machine.cpu_family() + '-' + host_machine.system()
|
||||||
message('canonical Nix system name:', host_system)
|
message('canonical Nix system name:', host_system)
|
||||||
|
|
||||||
|
@ -181,6 +200,7 @@ openssl = dependency('libcrypto', 'openssl', required : true)
|
||||||
deps += openssl
|
deps += openssl
|
||||||
|
|
||||||
aws_sdk = dependency('aws-cpp-sdk-core', required : false)
|
aws_sdk = dependency('aws-cpp-sdk-core', required : false)
|
||||||
|
aws_sdk_transfer = dependency('aws-cpp-sdk-transfer', required : aws_sdk.found())
|
||||||
if aws_sdk.found()
|
if aws_sdk.found()
|
||||||
# The AWS pkg-config adds -std=c++11.
|
# The AWS pkg-config adds -std=c++11.
|
||||||
# https://github.com/aws/aws-sdk-cpp/issues/2673
|
# https://github.com/aws/aws-sdk-cpp/issues/2673
|
||||||
|
@ -198,7 +218,7 @@ if aws_sdk.found()
|
||||||
'AWS_VERSION_MINOR': s[1].to_int(),
|
'AWS_VERSION_MINOR': s[1].to_int(),
|
||||||
'AWS_VERSION_PATCH': s[2].to_int(),
|
'AWS_VERSION_PATCH': s[2].to_int(),
|
||||||
}
|
}
|
||||||
aws_sdk_transfer = dependency('aws-cpp-sdk-transfer', required : true).partial_dependency(
|
aws_sdk_transfer = aws_sdk_transfer.partial_dependency(
|
||||||
compile_args : false,
|
compile_args : false,
|
||||||
includes : true,
|
includes : true,
|
||||||
link_args : true,
|
link_args : true,
|
||||||
|
@ -255,7 +275,7 @@ gtest = [
|
||||||
]
|
]
|
||||||
deps += gtest
|
deps += gtest
|
||||||
|
|
||||||
toml11 = dependency('toml11', version : '>=3.7.0', required : true)
|
toml11 = dependency('toml11', version : '>=3.7.0', required : true, method : 'cmake')
|
||||||
deps += toml11
|
deps += toml11
|
||||||
|
|
||||||
nlohmann_json = dependency('nlohmann_json', required : true)
|
nlohmann_json = dependency('nlohmann_json', required : true)
|
||||||
|
@ -272,17 +292,17 @@ deps += lix_doc
|
||||||
#
|
#
|
||||||
# Build-time tools
|
# Build-time tools
|
||||||
#
|
#
|
||||||
coreutils = find_program('coreutils')
|
coreutils = find_program('coreutils', native : true)
|
||||||
dot = find_program('dot', required : false)
|
dot = find_program('dot', required : false, native : true)
|
||||||
pymod = import('python')
|
pymod = import('python')
|
||||||
python = pymod.find_installation('python3')
|
python = pymod.find_installation('python3')
|
||||||
|
|
||||||
if enable_docs
|
if enable_docs
|
||||||
mdbook = find_program('mdbook')
|
mdbook = find_program('mdbook', native : true)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Used to workaround https://github.com/mesonbuild/meson/issues/2320 in src/nix/meson.build.
|
# Used to workaround https://github.com/mesonbuild/meson/issues/2320 in src/nix/meson.build.
|
||||||
installcmd = find_program('install')
|
installcmd = find_program('install', native : true)
|
||||||
|
|
||||||
enable_embedded_sandbox_shell = get_option('enable-embedded-sandbox-shell')
|
enable_embedded_sandbox_shell = get_option('enable-embedded-sandbox-shell')
|
||||||
if enable_embedded_sandbox_shell
|
if enable_embedded_sandbox_shell
|
||||||
|
@ -307,9 +327,9 @@ endif
|
||||||
# FIXME(Qyriad): the autoconf system checks that busybox has the "standalone" feature, indicating
|
# FIXME(Qyriad): the autoconf system checks that busybox has the "standalone" feature, indicating
|
||||||
# that busybox sh won't run busybox applets as builtins (which would break our sandbox).
|
# that busybox sh won't run busybox applets as builtins (which would break our sandbox).
|
||||||
|
|
||||||
lsof = find_program('lsof')
|
lsof = find_program('lsof', native : true)
|
||||||
bison = find_program('bison')
|
bison = find_program('bison', native : true)
|
||||||
flex = find_program('flex')
|
flex = find_program('flex', native : true)
|
||||||
|
|
||||||
# This is how Nix does generated headers...
|
# This is how Nix does generated headers...
|
||||||
# other instances of header generation use a very similar command.
|
# other instances of header generation use a very similar command.
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
$(eval $(call install-file-as, $(d)/completion.sh, $(datarootdir)/bash-completion/completions/nix, 0644))
|
|
|
@ -1 +0,0 @@
|
||||||
$(eval $(call install-file-as, $(d)/completion.fish, $(datarootdir)/fish/vendor_completions.d/nix.fish, 0644))
|
|
|
@ -1,5 +0,0 @@
|
||||||
ifdef HOST_DARWIN
|
|
||||||
|
|
||||||
$(eval $(call install-data-in, $(d)/org.nixos.nix-daemon.plist, $(prefix)/Library/LaunchDaemons))
|
|
||||||
|
|
||||||
endif
|
|
|
@ -1,8 +0,0 @@
|
||||||
ifdef HOST_LINUX
|
|
||||||
|
|
||||||
$(foreach n, nix-daemon.socket nix-daemon.service, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/systemd/system, 0644)))
|
|
||||||
$(foreach n, nix-daemon.conf, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/tmpfiles.d, 0644)))
|
|
||||||
|
|
||||||
clean-files += $(d)/nix-daemon.socket $(d)/nix-daemon.service $(d)/nix-daemon.conf
|
|
||||||
|
|
||||||
endif
|
|
|
@ -1,2 +0,0 @@
|
||||||
$(eval $(call install-file-as, $(d)/completion.zsh, $(datarootdir)/zsh/site-functions/_nix, 0644))
|
|
||||||
$(eval $(call install-file-as, $(d)/run-help-nix, $(datarootdir)/zsh/site-functions/run-help-nix, 0644))
|
|
|
@ -1,10 +0,0 @@
|
||||||
# Initialise support for build directories.
|
|
||||||
builddir ?=
|
|
||||||
|
|
||||||
ifdef builddir
|
|
||||||
buildprefix = $(builddir)/
|
|
||||||
buildprefixrel = $(builddir)
|
|
||||||
else
|
|
||||||
buildprefix =
|
|
||||||
buildprefixrel = .
|
|
||||||
endif
|
|
11
mk/clean.mk
11
mk/clean.mk
|
@ -1,11 +0,0 @@
|
||||||
clean-files :=
|
|
||||||
|
|
||||||
clean:
|
|
||||||
$(suppress) rm -fv -- $(clean-files)
|
|
||||||
|
|
||||||
dryclean:
|
|
||||||
@for i in $(clean-files); do if [ -e $$i ]; then echo $$i; fi; done | sort
|
|
||||||
|
|
||||||
print-top-help += \
|
|
||||||
echo " clean: Delete generated files"; \
|
|
||||||
echo " dryclean: Show what files would be deleted by 'make clean'";
|
|
|
@ -1,15 +0,0 @@
|
||||||
test_dir=tests/functional
|
|
||||||
|
|
||||||
test=$(echo -n "$test" | sed -e "s|^$test_dir/||")
|
|
||||||
|
|
||||||
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
|
||||||
|
|
||||||
: ${BASH:=/usr/bin/env bash}
|
|
||||||
|
|
||||||
init_test () {
|
|
||||||
cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
|
||||||
}
|
|
||||||
|
|
||||||
run_test_proper () {
|
|
||||||
cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
%.gen.hh: %
|
|
||||||
@echo 'R"__NIX_STR(' >> $@.tmp
|
|
||||||
$(trace-gen) cat $< >> $@.tmp
|
|
||||||
@echo ')__NIX_STR"' >> $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
|
@ -1,11 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -eu -o pipefail
|
|
||||||
|
|
||||||
test=$1
|
|
||||||
|
|
||||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
|
||||||
source "$dir/common-test.sh"
|
|
||||||
|
|
||||||
(init_test)
|
|
||||||
run_test_proper
|
|
|
@ -1,12 +0,0 @@
|
||||||
# This file is only active for `./configure --disable-tests`.
|
|
||||||
# Running `make check` or `make installcheck` would indicate a mistake in the
|
|
||||||
# caller.
|
|
||||||
|
|
||||||
installcheck:
|
|
||||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
|
|
||||||
@exit 1
|
|
||||||
|
|
||||||
# This currently has little effect.
|
|
||||||
check:
|
|
||||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
|
|
||||||
@exit 1
|
|
|
@ -1,14 +0,0 @@
|
||||||
# Utility function for recursively finding files, e.g.
|
|
||||||
# ‘$(call rwildcard, path/to/dir, *.c *.h)’.
|
|
||||||
rwildcard=$(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $(subst *,%,$2),$d))
|
|
||||||
|
|
||||||
# Given a file name, produce the corresponding dependency file
|
|
||||||
# (e.g. ‘foo/bar.o’ becomes ‘foo/.bar.o.dep’).
|
|
||||||
filename-to-dep = $(dir $1).$(notdir $1).dep
|
|
||||||
|
|
||||||
# Return the full path to a program by looking it up in $PATH, or the
|
|
||||||
# empty string if not found.
|
|
||||||
find-program = $(shell for i in $$(IFS=: ; echo $$PATH); do p=$$i/$(strip $1); if [ -e $$p ]; then echo $$p; break; fi; done)
|
|
||||||
|
|
||||||
# Ensure that the given string ends in a single slash.
|
|
||||||
add-trailing-slash = $(patsubst %/,%,$(1))/
|
|
|
@ -1,11 +0,0 @@
|
||||||
# Default installation paths.
|
|
||||||
prefix ?= /usr/local
|
|
||||||
libdir ?= $(prefix)/lib
|
|
||||||
bindir ?= $(prefix)/bin
|
|
||||||
libexecdir ?= $(prefix)/libexec
|
|
||||||
datadir ?= $(prefix)/share
|
|
||||||
localstatedir ?= $(prefix)/var
|
|
||||||
sysconfdir ?= $(prefix)/etc
|
|
||||||
mandir ?= $(prefix)/share/man
|
|
||||||
|
|
||||||
DESTDIR ?=
|
|
|
@ -1,62 +0,0 @@
|
||||||
# Add a rule for creating $(1) as a directory. This template may be
|
|
||||||
# called multiple times for the same directory.
|
|
||||||
define create-dir
|
|
||||||
_i := $$(call add-trailing-slash, $(DESTDIR)$$(strip $(1)))
|
|
||||||
ifndef $$(_i)_SEEN
|
|
||||||
$$(_i)_SEEN = 1
|
|
||||||
$$(_i):
|
|
||||||
$$(trace-mkdir) install -d "$$@"
|
|
||||||
endif
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
# Add a rule for installing file $(1) as file $(2) with mode $(3).
|
|
||||||
# The directory containing $(2) will be created automatically.
|
|
||||||
define install-file-as
|
|
||||||
|
|
||||||
_i := $(DESTDIR)$$(strip $(2))
|
|
||||||
|
|
||||||
install: $$(_i)
|
|
||||||
|
|
||||||
$$(_i): $(1) | $$(dir $$(_i))
|
|
||||||
$$(trace-install) install -m $(3) $(1) "$$@"
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$(dir $(2))))
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
# Add a rule for installing file $(1) in directory $(2) with mode
|
|
||||||
# $(3). The directory will be created automatically.
|
|
||||||
define install-file-in
|
|
||||||
$$(eval $$(call install-file-as,$(1),$(2)/$$(notdir $(1)),$(3)))
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
define install-program-in
|
|
||||||
$$(eval $$(call install-file-in,$(1),$(2),0755))
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
define install-data-in
|
|
||||||
$$(eval $$(call install-file-in,$(1),$(2),0644))
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
# Install a symlink from $(2) to $(1). Note that $(1) need not exist.
|
|
||||||
define install-symlink
|
|
||||||
|
|
||||||
_i := $(DESTDIR)$$(strip $(2))
|
|
||||||
|
|
||||||
install: $$(_i)
|
|
||||||
|
|
||||||
$$(_i): | $$(dir $$(_i))
|
|
||||||
$$(trace-install) ln -sfn $(1) "$$@"
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$(dir $(2))))
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
print-top-help += \
|
|
||||||
echo " install: Install into \$$(prefix) (currently set to '$(prefix)')";
|
|
168
mk/lib.mk
168
mk/lib.mk
|
@ -1,168 +0,0 @@
|
||||||
default: all
|
|
||||||
|
|
||||||
|
|
||||||
# Get rid of default suffixes. FIXME: is this a good idea?
|
|
||||||
.SUFFIXES:
|
|
||||||
|
|
||||||
|
|
||||||
# Initialise some variables.
|
|
||||||
bin-scripts :=
|
|
||||||
noinst-scripts :=
|
|
||||||
man-pages :=
|
|
||||||
install-tests :=
|
|
||||||
install-tests-groups :=
|
|
||||||
|
|
||||||
ifdef HOST_OS
|
|
||||||
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
|
|
||||||
ifeq ($(HOST_KERNEL), cygwin)
|
|
||||||
HOST_CYGWIN = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_DARWIN = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_FREEBSD = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(HOST_KERNEL), linux)
|
|
||||||
HOST_LINUX = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_SOLARIS = 1
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Hack to define a literal space.
|
|
||||||
space :=
|
|
||||||
space +=
|
|
||||||
|
|
||||||
|
|
||||||
# Hack to define a literal newline.
|
|
||||||
define newline
|
|
||||||
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
|
|
||||||
# Pass -fPIC if we're building dynamic libraries.
|
|
||||||
BUILD_SHARED_LIBS ?= 1
|
|
||||||
|
|
||||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
|
||||||
ifdef HOST_CYGWIN
|
|
||||||
GLOBAL_CFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
|
|
||||||
GLOBAL_CXXFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
|
|
||||||
else
|
|
||||||
GLOBAL_CFLAGS += -fPIC
|
|
||||||
GLOBAL_CXXFLAGS += -fPIC
|
|
||||||
endif
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
ifndef HOST_SOLARIS
|
|
||||||
ifndef HOST_FREEBSD
|
|
||||||
GLOBAL_LDFLAGS += -Wl,--no-copy-dt-needed-entries
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
SET_RPATH_TO_LIBS ?= 1
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Pass -g if we want debug info.
|
|
||||||
BUILD_DEBUG ?= 1
|
|
||||||
|
|
||||||
ifeq ($(BUILD_DEBUG), 1)
|
|
||||||
GLOBAL_CFLAGS += -g
|
|
||||||
GLOBAL_CXXFLAGS += -g
|
|
||||||
endif
|
|
||||||
|
|
||||||
|
|
||||||
include mk/build-dir.mk
|
|
||||||
include mk/install-dirs.mk
|
|
||||||
include mk/functions.mk
|
|
||||||
include mk/tracing.mk
|
|
||||||
include mk/clean.mk
|
|
||||||
include mk/install.mk
|
|
||||||
include mk/libraries.mk
|
|
||||||
include mk/programs.mk
|
|
||||||
include mk/patterns.mk
|
|
||||||
include mk/templates.mk
|
|
||||||
include mk/cxx-big-literal.mk
|
|
||||||
include mk/tests.mk
|
|
||||||
|
|
||||||
|
|
||||||
# Include all sub-Makefiles.
|
|
||||||
define include-sub-makefile
|
|
||||||
d := $$(patsubst %/,%,$$(dir $(1)))
|
|
||||||
include $(1)
|
|
||||||
endef
|
|
||||||
|
|
||||||
$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile,$(mf))))
|
|
||||||
|
|
||||||
|
|
||||||
# Instantiate stuff.
|
|
||||||
$(foreach lib, $(libraries), $(eval $(call build-library,$(lib))))
|
|
||||||
$(foreach prog, $(programs), $(eval $(call build-program,$(prog))))
|
|
||||||
$(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(bindir))))
|
|
||||||
$(foreach script, $(bin-scripts), $(eval programs-list += $(script)))
|
|
||||||
$(foreach script, $(noinst-scripts), $(eval programs-list += $(script)))
|
|
||||||
$(foreach template, $(template-files), $(eval $(call instantiate-template,$(template))))
|
|
||||||
$(foreach test, $(install-tests), \
|
|
||||||
$(eval $(call run-install-test,$(test))) \
|
|
||||||
$(eval installcheck: $(test).test))
|
|
||||||
$(foreach test-group, $(install-tests-groups), \
|
|
||||||
$(eval $(call run-install-test-group,$(test-group))) \
|
|
||||||
$(eval installcheck: $(test-group).test-group) \
|
|
||||||
$(foreach test, $($(test-group)-tests), \
|
|
||||||
$(eval $(call run-install-test,$(test))) \
|
|
||||||
$(eval $(test-group).test-group: $(test).test)))
|
|
||||||
|
|
||||||
# Include makefiles requiring built programs.
|
|
||||||
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
|
|
||||||
|
|
||||||
|
|
||||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: default all man help
|
|
||||||
|
|
||||||
all: $(programs-list) $(libs-list) $(man-pages)
|
|
||||||
|
|
||||||
man: $(man-pages)
|
|
||||||
|
|
||||||
|
|
||||||
help:
|
|
||||||
@echo "The following targets are available:"
|
|
||||||
@echo ""
|
|
||||||
@echo " default: Build default targets"
|
|
||||||
ifdef man-pages
|
|
||||||
@echo " man: Generate manual pages"
|
|
||||||
endif
|
|
||||||
@$(print-top-help)
|
|
||||||
ifdef programs-list
|
|
||||||
@echo ""
|
|
||||||
@echo "The following programs can be built:"
|
|
||||||
@echo ""
|
|
||||||
@for i in $(programs-list); do echo " $$i"; done
|
|
||||||
endif
|
|
||||||
ifdef libs-list
|
|
||||||
@echo ""
|
|
||||||
@echo "The following libraries can be built:"
|
|
||||||
@echo ""
|
|
||||||
@for i in $(libs-list); do echo " $$i"; done
|
|
||||||
endif
|
|
||||||
ifdef install-tests-groups
|
|
||||||
@echo ""
|
|
||||||
@echo "The following groups of functional tests can be run:"
|
|
||||||
@echo ""
|
|
||||||
@for i in $(install-tests-groups); do echo " $$i.test-group"; done
|
|
||||||
@echo ""
|
|
||||||
@echo "(installcheck includes tests in test groups too.)"
|
|
||||||
endif
|
|
||||||
@echo ""
|
|
||||||
@echo "The following variables control the build:"
|
|
||||||
@echo ""
|
|
||||||
@echo " BUILD_SHARED_LIBS ($(BUILD_SHARED_LIBS)): Whether to build shared libraries"
|
|
||||||
@echo " BUILD_DEBUG ($(BUILD_DEBUG)): Whether to include debug symbols"
|
|
||||||
@echo " CC ($(CC)): C compiler to be used"
|
|
||||||
@echo " CFLAGS: Flags for the C compiler"
|
|
||||||
@echo " CXX ($(CXX)): C++ compiler to be used"
|
|
||||||
@echo " CXXFLAGS: Flags for the C++ compiler"
|
|
||||||
@echo " CPPFLAGS: C preprocessor flags, used for both CC and CXX"
|
|
||||||
@$(print-var-help)
|
|
161
mk/libraries.mk
161
mk/libraries.mk
|
@ -1,161 +0,0 @@
|
||||||
libs-list :=
|
|
||||||
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
SO_EXT = dylib
|
|
||||||
else
|
|
||||||
ifdef HOST_CYGWIN
|
|
||||||
SO_EXT = dll
|
|
||||||
else
|
|
||||||
SO_EXT = so
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Build a library with symbolic name $(1). The library is defined by
|
|
||||||
# various variables prefixed by ‘$(1)_’:
|
|
||||||
#
|
|
||||||
# - $(1)_NAME: the name of the library (e.g. ‘libfoo’); defaults to
|
|
||||||
# $(1).
|
|
||||||
#
|
|
||||||
# - $(1)_DIR: the directory where the (non-installed) library will be
|
|
||||||
# placed.
|
|
||||||
#
|
|
||||||
# - $(1)_SOURCES: the source files of the library.
|
|
||||||
#
|
|
||||||
# - $(1)_CFLAGS: additional C compiler flags.
|
|
||||||
#
|
|
||||||
# - $(1)_CXXFLAGS: additional C++ compiler flags.
|
|
||||||
#
|
|
||||||
# - $(1)_ORDER_AFTER: a set of targets on which the object files of
|
|
||||||
# this libraries will have an order-only dependency.
|
|
||||||
#
|
|
||||||
# - $(1)_LIBS: the symbolic names of other libraries on which this
|
|
||||||
# library depends.
|
|
||||||
#
|
|
||||||
# - $(1)_ALLOW_UNDEFINED: if set, the library is allowed to have
|
|
||||||
# undefined symbols. Has no effect for static libraries.
|
|
||||||
#
|
|
||||||
# - $(1)_LDFLAGS: additional linker flags.
|
|
||||||
#
|
|
||||||
# - $(1)_LDFLAGS_PROPAGATED: additional linker flags, also propagated
|
|
||||||
# to the linking of programs/libraries that use this library.
|
|
||||||
#
|
|
||||||
# - $(1)_FORCE_INSTALL: if defined, the library will be installed even
|
|
||||||
# if it's not needed (i.e. dynamically linked) by a program.
|
|
||||||
#
|
|
||||||
# - $(1)_INSTALL_DIR: the directory where the library will be
|
|
||||||
# installed. Defaults to $(libdir).
|
|
||||||
#
|
|
||||||
# - $(1)_EXCLUDE_FROM_LIBRARY_LIST: if defined, the library will not
|
|
||||||
# be automatically marked as a dependency of the top-level all
|
|
||||||
# target andwill not be listed in the make help output. This is
|
|
||||||
# useful for libraries built solely for testing, for example.
|
|
||||||
#
|
|
||||||
# - BUILD_SHARED_LIBS: if equal to ‘1’, a dynamic library will be
|
|
||||||
# built, otherwise a static library.
|
|
||||||
define build-library
|
|
||||||
$(1)_NAME ?= $(1)
|
|
||||||
_d := $(buildprefix)$$(strip $$($(1)_DIR))
|
|
||||||
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
|
||||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
|
||||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
|
|
||||||
|
|
||||||
ifdef HOST_CYGWIN
|
|
||||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
|
||||||
else
|
|
||||||
$(1)_INSTALL_DIR ?= $$(libdir)
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE :=
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED :=
|
|
||||||
$(1)_LIB_CLOSURE := $(1)
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$(_d)))
|
|
||||||
|
|
||||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
|
||||||
|
|
||||||
ifdef $(1)_ALLOW_UNDEFINED
|
|
||||||
ifdef HOST_DARWIN
|
|
||||||
$(1)_LDFLAGS += -undefined suppress -flat_namespace
|
|
||||||
endif
|
|
||||||
else
|
|
||||||
# -Wl,-z,defs is broken with sanitizers on Linux/clang at least.
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
$(1)_LDFLAGS += -Wl,-soname=$$($(1)_NAME).$(SO_EXT)
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
|
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
|
||||||
+$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
|
|
||||||
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
|
|
||||||
endif
|
|
||||||
$(1)_LDFLAGS_USE += -L$$(_d) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
|
||||||
|
|
||||||
$(1)_INSTALL_PATH := $(DESTDIR)$$($(1)_INSTALL_DIR)/$$($(1)_NAME).$(SO_EXT)
|
|
||||||
|
|
||||||
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
|
||||||
|
|
||||||
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
|
||||||
+$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
|
||||||
ifndef HOST_DARWIN
|
|
||||||
ifeq ($(SET_RPATH_TO_LIBS), 1)
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$$($(1)_INSTALL_DIR)
|
|
||||||
else
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED += -Wl,-rpath-link,$$($(1)_INSTALL_DIR)
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifdef $(1)_FORCE_INSTALL
|
|
||||||
install: $$($(1)_INSTALL_PATH)
|
|
||||||
endif
|
|
||||||
|
|
||||||
else
|
|
||||||
|
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
|
||||||
$$(trace-ld) $(LD) $$(ifndef $(HOST_DARWIN),-U) -r -o $$(_d)/$$($(1)_NAME).o $$^
|
|
||||||
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
|
|
||||||
|
|
||||||
$(1)_INSTALL_PATH := $$(libdir)/$$($(1)_NAME).a
|
|
||||||
|
|
||||||
$(1)_LIB_CLOSURE += $$($(1)_LIBS)
|
|
||||||
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(1)_LDFLAGS_USE += $$($(1)_LDFLAGS_PROPAGATED)
|
|
||||||
$(1)_LDFLAGS_USE_INSTALLED += $$($(1)_LDFLAGS_PROPAGATED)
|
|
||||||
|
|
||||||
# Propagate CFLAGS and CXXFLAGS to the individual object files.
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CFLAGS=$$($(1)_CFLAGS)))
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CXXFLAGS=$$($(1)_CXXFLAGS)))
|
|
||||||
|
|
||||||
# Make each object file depend on the common dependencies.
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): $$($(1)_COMMON_DEPS) $$(GLOBAL_COMMON_DEPS)))
|
|
||||||
|
|
||||||
# Make each object file have order-only dependencies on the common
|
|
||||||
# order-only dependencies. This includes the order-only dependencies
|
|
||||||
# of libraries we're depending on.
|
|
||||||
$(1)_ORDER_AFTER_CLOSED = $$($(1)_ORDER_AFTER) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_ORDER_AFTER_CLOSED))
|
|
||||||
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): | $$($(1)_ORDER_AFTER_CLOSED) $$(GLOBAL_ORDER_AFTER)))
|
|
||||||
|
|
||||||
# Include .dep files, if they exist.
|
|
||||||
$(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn)))
|
|
||||||
-include $$($(1)_DEPS)
|
|
||||||
|
|
||||||
ifndef $(1)_EXCLUDE_FROM_LIBRARY_LIST
|
|
||||||
libs-list += $$($(1)_PATH)
|
|
||||||
endif
|
|
||||||
clean-files += $$(_d)/*.a $$(_d)/*.$(SO_EXT) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
|
|
||||||
endef
|
|
|
@ -1,11 +0,0 @@
|
||||||
$(buildprefix)%.o: %.cc
|
|
||||||
@mkdir -p "$(dir $@)"
|
|
||||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
|
||||||
|
|
||||||
$(buildprefix)%.o: %.cpp
|
|
||||||
@mkdir -p "$(dir $@)"
|
|
||||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
|
||||||
|
|
||||||
$(buildprefix)%.o: %.c
|
|
||||||
@mkdir -p "$(dir $@)"
|
|
||||||
$(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
|
|
|
@ -1,21 +0,0 @@
|
||||||
PRECOMPILE_HEADERS ?= 0
|
|
||||||
|
|
||||||
print-var-help += \
|
|
||||||
echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build";
|
|
||||||
|
|
||||||
GCH = $(buildprefix)precompiled-headers.h.gch
|
|
||||||
|
|
||||||
$(GCH): precompiled-headers.h
|
|
||||||
@rm -f $@
|
|
||||||
@mkdir -p "$(dir $@)"
|
|
||||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
|
|
||||||
|
|
||||||
clean-files += $(GCH)
|
|
||||||
|
|
||||||
ifeq ($(PRECOMPILE_HEADERS), 1)
|
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch
|
|
||||||
|
|
||||||
GLOBAL_ORDER_AFTER += $(GCH)
|
|
||||||
|
|
||||||
endif
|
|
|
@ -1,95 +0,0 @@
|
||||||
programs-list :=
|
|
||||||
|
|
||||||
# Build a program with symbolic name $(1). The program is defined by
|
|
||||||
# various variables prefixed by ‘$(1)_’:
|
|
||||||
#
|
|
||||||
# - $(1)_NAME: the name of the program (e.g. ‘foo’); defaults to
|
|
||||||
# $(1).
|
|
||||||
#
|
|
||||||
# - $(1)_ENV: environment variables to set when running the program
|
|
||||||
# from the Makefile using the $(1)_RUN target.
|
|
||||||
#
|
|
||||||
# - $(1)_DIR: the directory where the (non-installed) program will be
|
|
||||||
# placed.
|
|
||||||
#
|
|
||||||
# - $(1)_SOURCES: the source files of the program.
|
|
||||||
#
|
|
||||||
# - $(1)_CFLAGS: additional C compiler flags.
|
|
||||||
#
|
|
||||||
# - $(1)_CXXFLAGS: additional C++ compiler flags.
|
|
||||||
#
|
|
||||||
# - $(1)_ORDER_AFTER: a set of targets on which the object files of
|
|
||||||
# this program will have an order-only dependency.
|
|
||||||
#
|
|
||||||
# - $(1)_LIBS: the symbolic names of libraries on which this program
|
|
||||||
# depends.
|
|
||||||
#
|
|
||||||
# - $(1)_LDFLAGS: additional linker flags.
|
|
||||||
#
|
|
||||||
# - $(1)_INSTALL_DIR: the directory where the program will be
|
|
||||||
# installed; defaults to $(bindir).
|
|
||||||
define build-program
|
|
||||||
$(1)_NAME ?= $(1)
|
|
||||||
_d := $(buildprefix)$$($(1)_DIR)
|
|
||||||
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
|
||||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
|
||||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH)))
|
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME)
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$(_d)))
|
|
||||||
|
|
||||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
|
||||||
+$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
|
|
||||||
|
|
||||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
|
||||||
|
|
||||||
ifdef $(1)_INSTALL_DIR
|
|
||||||
|
|
||||||
$(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)
|
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
|
||||||
|
|
||||||
install: $(DESTDIR)$$($(1)_INSTALL_PATH)
|
|
||||||
|
|
||||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
|
||||||
|
|
||||||
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
|
||||||
|
|
||||||
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
|
||||||
+$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
|
||||||
|
|
||||||
else
|
|
||||||
|
|
||||||
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_PATH) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
|
||||||
+$$(trace-install) install -t $(DESTDIR)$$($(1)_INSTALL_DIR) $$<
|
|
||||||
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Propagate CFLAGS and CXXFLAGS to the individual object files.
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CFLAGS=$$($(1)_CFLAGS)))
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CXXFLAGS=$$($(1)_CXXFLAGS)))
|
|
||||||
|
|
||||||
# Make each object file depend on the common dependencies.
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): $$($(1)_COMMON_DEPS) $$(GLOBAL_COMMON_DEPS)))
|
|
||||||
|
|
||||||
# Make each object file have order-only dependencies on the common
|
|
||||||
# order-only dependencies. This includes the order-only dependencies
|
|
||||||
# of libraries we're depending on.
|
|
||||||
$(1)_ORDER_AFTER_CLOSED = $$($(1)_ORDER_AFTER) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_ORDER_AFTER_CLOSED))
|
|
||||||
|
|
||||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): | $$($(1)_ORDER_AFTER_CLOSED) $$(GLOBAL_ORDER_AFTER)))
|
|
||||||
|
|
||||||
# Include .dep files, if they exist.
|
|
||||||
$(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn)))
|
|
||||||
-include $$($(1)_DEPS)
|
|
||||||
|
|
||||||
programs-list += $$($(1)_PATH)
|
|
||||||
clean-files += $$($(1)_PATH) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
|
|
||||||
|
|
||||||
# Phony target to run this program (typically as a dependency of 'check').
|
|
||||||
.PHONY: $(1)_RUN
|
|
||||||
$(1)_RUN: $$($(1)_PATH)
|
|
||||||
$(trace-test) $$($(1)_ENV) $$($(1)_PATH)
|
|
||||||
|
|
||||||
endef
|
|
|
@ -1,38 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -eu -o pipefail
|
|
||||||
|
|
||||||
red=""
|
|
||||||
green=""
|
|
||||||
yellow=""
|
|
||||||
normal=""
|
|
||||||
|
|
||||||
test=$1
|
|
||||||
|
|
||||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
|
||||||
source "$dir/common-test.sh"
|
|
||||||
|
|
||||||
post_run_msg="ran test $test..."
|
|
||||||
if [ -t 1 ]; then
|
|
||||||
red="[31;1m"
|
|
||||||
green="[32;1m"
|
|
||||||
yellow="[33;1m"
|
|
||||||
normal="[m"
|
|
||||||
fi
|
|
||||||
|
|
||||||
run_test () {
|
|
||||||
(init_test 2>/dev/null > /dev/null)
|
|
||||||
log="$(run_test_proper 2>&1)" && status=0 || status=$?
|
|
||||||
}
|
|
||||||
|
|
||||||
run_test
|
|
||||||
|
|
||||||
if [ $status -eq 0 ]; then
|
|
||||||
echo "$post_run_msg [${green}PASS$normal]"
|
|
||||||
elif [ $status -eq 99 ]; then
|
|
||||||
echo "$post_run_msg [${yellow}SKIP$normal]"
|
|
||||||
else
|
|
||||||
echo "$post_run_msg [${red}FAIL$normal]"
|
|
||||||
echo "$log" | sed 's/^/ /'
|
|
||||||
exit "$status"
|
|
||||||
fi
|
|
|
@ -1,19 +0,0 @@
|
||||||
template-files :=
|
|
||||||
|
|
||||||
# Create the file $(1) from $(1).in by running config.status (which
|
|
||||||
# substitutes all ‘@var@’ variables set by the configure script).
|
|
||||||
define instantiate-template
|
|
||||||
|
|
||||||
clean-files += $(1)
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
ifneq ($(MAKECMDGOALS), clean)
|
|
||||||
|
|
||||||
$(buildprefix)%.h: %.h.in
|
|
||||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
|
||||||
|
|
||||||
$(buildprefix)%: %.in
|
|
||||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
|
||||||
|
|
||||||
endif
|
|
27
mk/tests.mk
27
mk/tests.mk
|
@ -1,27 +0,0 @@
|
||||||
# Run program $1 as part of ‘make installcheck’.
|
|
||||||
|
|
||||||
test-deps =
|
|
||||||
|
|
||||||
define run-install-test
|
|
||||||
|
|
||||||
.PHONY: $1.test
|
|
||||||
$1.test: $1 $(test-deps)
|
|
||||||
@env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
|
|
||||||
|
|
||||||
.PHONY: $1.test-debug
|
|
||||||
$1.test-debug: $1 $(test-deps)
|
|
||||||
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
define run-install-test-group
|
|
||||||
|
|
||||||
.PHONY: $1.test-group
|
|
||||||
|
|
||||||
endef
|
|
||||||
|
|
||||||
.PHONY: check installcheck
|
|
||||||
|
|
||||||
print-top-help += \
|
|
||||||
echo " check: Run unit tests"; \
|
|
||||||
echo " installcheck: Run functional tests";
|
|
|
@ -1,16 +0,0 @@
|
||||||
V ?= 0
|
|
||||||
|
|
||||||
ifeq ($(V), 0)
|
|
||||||
|
|
||||||
trace-gen = @echo " GEN " $@;
|
|
||||||
trace-cc = @echo " CC " $@;
|
|
||||||
trace-cxx = @echo " CXX " $@;
|
|
||||||
trace-ld = @echo " LD " $@;
|
|
||||||
trace-ar = @echo " AR " $@;
|
|
||||||
trace-install = @echo " INST " $@;
|
|
||||||
trace-mkdir = @echo " MKDIR " $@;
|
|
||||||
trace-test = @echo " TEST " $@;
|
|
||||||
|
|
||||||
suppress = @
|
|
||||||
|
|
||||||
endif
|
|
116
package.nix
116
package.nix
|
@ -2,8 +2,6 @@
|
||||||
pkgs,
|
pkgs,
|
||||||
lib,
|
lib,
|
||||||
stdenv,
|
stdenv,
|
||||||
autoconf-archive,
|
|
||||||
autoreconfHook,
|
|
||||||
aws-sdk-cpp,
|
aws-sdk-cpp,
|
||||||
# If the patched version of Boehm isn't passed, then patch it based off of
|
# If the patched version of Boehm isn't passed, then patch it based off of
|
||||||
# pkgs.boehmgc. This allows `callPackage`ing this file without needing to
|
# pkgs.boehmgc. This allows `callPackage`ing this file without needing to
|
||||||
|
@ -62,7 +60,7 @@
|
||||||
|
|
||||||
# FIXME(Qyriad): build Lix using Meson instead of autoconf and make.
|
# FIXME(Qyriad): build Lix using Meson instead of autoconf and make.
|
||||||
# This flag will be removed when the migration to Meson is complete.
|
# This flag will be removed when the migration to Meson is complete.
|
||||||
buildWithMeson ? false,
|
buildWithMeson ? true,
|
||||||
|
|
||||||
# Not a real argument, just the only way to approximate let-binding some
|
# Not a real argument, just the only way to approximate let-binding some
|
||||||
# stuff for argument defaults.
|
# stuff for argument defaults.
|
||||||
|
@ -98,7 +96,33 @@ let
|
||||||
customMemoryManagement = false;
|
customMemoryManagement = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
testConfigureFlags = [ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ];
|
# Reimplementation of Nixpkgs' Meson cross file, with some additions to make
|
||||||
|
# it actually work.
|
||||||
|
mesonCrossFile =
|
||||||
|
let
|
||||||
|
cpuFamily =
|
||||||
|
platform:
|
||||||
|
with platform;
|
||||||
|
if isAarch32 then
|
||||||
|
"arm"
|
||||||
|
else if isx86_32 then
|
||||||
|
"x86"
|
||||||
|
else
|
||||||
|
platform.uname.processor;
|
||||||
|
in
|
||||||
|
builtins.toFile "lix-cross-file.conf" ''
|
||||||
|
[properties]
|
||||||
|
# Meson is convinced that if !buildPlatform.canExecute hostPlatform then we cannot
|
||||||
|
# build anything at all, which is not at all correct. If we can't execute the host
|
||||||
|
# platform, we'll just disable tests and doc gen.
|
||||||
|
needs_exe_wrapper = false
|
||||||
|
|
||||||
|
[binaries]
|
||||||
|
# Meson refuses to consider any CMake binary during cross compilation if it's
|
||||||
|
# not explicitly specified here, in the cross file.
|
||||||
|
# https://github.com/mesonbuild/meson/blob/0ed78cf6fa6d87c0738f67ae43525e661b50a8a2/mesonbuild/cmake/executor.py#L72
|
||||||
|
cmake = 'cmake'
|
||||||
|
'';
|
||||||
|
|
||||||
# The internal API docs need these for the build, but if we're not building
|
# The internal API docs need these for the build, but if we're not building
|
||||||
# Nix itself, then these don't need to be propagated.
|
# Nix itself, then these don't need to be propagated.
|
||||||
|
@ -112,28 +136,14 @@ let
|
||||||
# that would interfere with repo semantics.
|
# that would interfere with repo semantics.
|
||||||
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||||
|
|
||||||
configureFiles = fileset.unions [
|
configureFiles = fileset.unions [ ./.version ];
|
||||||
./.version
|
|
||||||
./configure.ac
|
|
||||||
./m4
|
|
||||||
# TODO: do we really need README.md? It doesn't seem used in the build.
|
|
||||||
./README.md
|
|
||||||
];
|
|
||||||
|
|
||||||
topLevelBuildFiles = fileset.unions (
|
topLevelBuildFiles = fileset.unions ([
|
||||||
[
|
|
||||||
./local.mk
|
|
||||||
./Makefile
|
|
||||||
./Makefile.config.in
|
|
||||||
./mk
|
|
||||||
]
|
|
||||||
++ lib.optionals buildWithMeson [
|
|
||||||
./meson.build
|
./meson.build
|
||||||
./meson.options
|
./meson.options
|
||||||
./meson
|
./meson
|
||||||
./scripts/meson.build
|
./scripts/meson.build
|
||||||
]
|
]);
|
||||||
);
|
|
||||||
|
|
||||||
functionalTestFiles = fileset.unions [
|
functionalTestFiles = fileset.unions [
|
||||||
./tests/functional
|
./tests/functional
|
||||||
|
@ -160,7 +170,6 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
./precompiled-headers.h
|
./precompiled-headers.h
|
||||||
./src
|
./src
|
||||||
./COPYING
|
./COPYING
|
||||||
./scripts/local.mk
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -177,17 +186,21 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
|
|
||||||
dontBuild = false;
|
dontBuild = false;
|
||||||
|
|
||||||
# FIXME(Qyriad): see if this is still needed once the migration to Meson is completed.
|
|
||||||
mesonFlags =
|
mesonFlags =
|
||||||
lib.optionals (buildWithMeson && stdenv.hostPlatform.isLinux) [
|
lib.optionals stdenv.hostPlatform.isLinux [
|
||||||
"-Dsandbox-shell=${lib.getBin busybox-sandbox-shell}/bin/busybox"
|
"-Dsandbox-shell=${lib.getBin busybox-sandbox-shell}/bin/busybox"
|
||||||
]
|
]
|
||||||
++ lib.optional stdenv.hostPlatform.isStatic "-Denable-embedded-sandbox-shell=true"
|
++ lib.optional stdenv.hostPlatform.isStatic "-Denable-embedded-sandbox-shell=true"
|
||||||
++ lib.optional (finalAttrs.dontBuild) "-Denable-build=false"
|
++ lib.optional (finalAttrs.dontBuild) "-Denable-build=false"
|
||||||
|
++ [
|
||||||
# mesonConfigurePhase automatically passes -Dauto_features=enabled,
|
# mesonConfigurePhase automatically passes -Dauto_features=enabled,
|
||||||
# so we must explicitly enable or disable features that we are not passing
|
# so we must explicitly enable or disable features that we are not passing
|
||||||
# dependencies for.
|
# dependencies for.
|
||||||
++ lib.singleton (lib.mesonEnable "internal-api-docs" internalApiDocs);
|
(lib.mesonEnable "internal-api-docs" internalApiDocs)
|
||||||
|
(lib.mesonBool "enable-tests" finalAttrs.finalPackage.doCheck)
|
||||||
|
(lib.mesonBool "enable-docs" canRunInstalled)
|
||||||
|
]
|
||||||
|
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "--cross-file=${mesonCrossFile}";
|
||||||
|
|
||||||
# We only include CMake so that Meson can locate toml11, which only ships CMake dependency metadata.
|
# We only include CMake so that Meson can locate toml11, which only ships CMake dependency metadata.
|
||||||
dontUseCmakeConfigure = true;
|
dontUseCmakeConfigure = true;
|
||||||
|
@ -197,14 +210,15 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
bison
|
bison
|
||||||
flex
|
flex
|
||||||
python3
|
python3
|
||||||
|
meson
|
||||||
|
ninja
|
||||||
|
cmake
|
||||||
]
|
]
|
||||||
++ [
|
++ [
|
||||||
(lib.getBin lowdown)
|
(lib.getBin lowdown)
|
||||||
mdbook
|
mdbook
|
||||||
mdbook-linkcheck
|
mdbook-linkcheck
|
||||||
autoconf-archive
|
|
||||||
]
|
]
|
||||||
++ lib.optional (!buildWithMeson) autoreconfHook
|
|
||||||
++ [
|
++ [
|
||||||
pkg-config
|
pkg-config
|
||||||
|
|
||||||
|
@ -216,12 +230,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
]
|
]
|
||||||
++ lib.optional stdenv.hostPlatform.isLinux util-linuxMinimal
|
++ lib.optional stdenv.hostPlatform.isLinux util-linuxMinimal
|
||||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) build-release-notes
|
++ lib.optional (!officialRelease && buildUnreleasedNotes) build-release-notes
|
||||||
++ lib.optional (internalApiDocs || forDevShell) doxygen
|
++ lib.optional (internalApiDocs || forDevShell) doxygen;
|
||||||
++ lib.optionals buildWithMeson [
|
|
||||||
meson
|
|
||||||
ninja
|
|
||||||
cmake
|
|
||||||
];
|
|
||||||
|
|
||||||
buildInputs =
|
buildInputs =
|
||||||
[
|
[
|
||||||
|
@ -247,7 +256,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||||
# There have been issues building these dependencies
|
# There have been issues building these dependencies
|
||||||
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform) aws-sdk-cpp-nix
|
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform) aws-sdk-cpp-nix
|
||||||
++ lib.optionals (finalAttrs.dontBuild) maybePropagatedInputs;
|
++ lib.optionals finalAttrs.dontBuild maybePropagatedInputs;
|
||||||
|
|
||||||
checkInputs = [
|
checkInputs = [
|
||||||
gtest
|
gtest
|
||||||
|
@ -260,7 +269,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
|
|
||||||
# Needed for Meson to find Boost.
|
# Needed for Meson to find Boost.
|
||||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||||
env = lib.optionalAttrs (buildWithMeson || forDevShell) {
|
env = {
|
||||||
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
|
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
|
||||||
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
|
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
|
||||||
};
|
};
|
||||||
|
@ -284,47 +293,22 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
||||||
done
|
done
|
||||||
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
''
|
|
||||||
+ ''
|
|
||||||
# Workaround https://github.com/NixOS/nixpkgs/issues/294890.
|
|
||||||
if [[ -n "''${doCheck:-}" ]]; then
|
|
||||||
appendToVar configureFlags "--enable-tests"
|
|
||||||
else
|
|
||||||
appendToVar configureFlags "--disable-tests"
|
|
||||||
fi
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
configureFlags =
|
mesonBuildType = "debugoptimized";
|
||||||
[ "--with-boost=${boost}/lib" ]
|
|
||||||
++ lib.optionals stdenv.isLinux [ "--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox" ]
|
|
||||||
++ lib.optionals (
|
|
||||||
stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
|
|
||||||
) [ "LDFLAGS=-fuse-ld=gold" ]
|
|
||||||
++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell"
|
|
||||||
++ lib.optionals (finalAttrs.doCheck || internalApiDocs) testConfigureFlags
|
|
||||||
++ lib.optional (!canRunInstalled) "--disable-doc-gen"
|
|
||||||
++ [ (lib.enableFeature internalApiDocs "internal-api-docs") ]
|
|
||||||
++ lib.optional (!forDevShell) "--sysconfdir=/etc"
|
|
||||||
++ [ "TOML11_HEADERS=${lib.getDev toml11}/include" ];
|
|
||||||
|
|
||||||
mesonBuildType = lib.optional (buildWithMeson || forDevShell) "debugoptimized";
|
|
||||||
|
|
||||||
installTargets = lib.optional internalApiDocs "internal-api-html";
|
installTargets = lib.optional internalApiDocs "internal-api-html";
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
doCheck = canRunInstalled;
|
||||||
|
|
||||||
doCheck = true;
|
mesonCheckFlags = [ "--suite=check" ];
|
||||||
|
|
||||||
mesonCheckFlags = lib.optionals (buildWithMeson || forDevShell) [ "--suite=check" ];
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
# Make sure the internal API docs are already built, because mesonInstallPhase
|
# Make sure the internal API docs are already built, because mesonInstallPhase
|
||||||
# won't let us build them there. They would normally be built in buildPhase,
|
# won't let us build them there. They would normally be built in buildPhase,
|
||||||
# but the internal API docs are conventionally built with doBuild = false.
|
# but the internal API docs are conventionally built with doBuild = false.
|
||||||
preInstall = lib.optional (buildWithMeson && internalApiDocs) ''
|
preInstall = lib.optional internalApiDocs ''
|
||||||
meson ''${mesonBuildFlags:-} compile "$installTargets"
|
meson ''${mesonBuildFlags:-} compile "$installTargets"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
@ -351,8 +335,6 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
'';
|
'';
|
||||||
|
|
||||||
doInstallCheck = finalAttrs.doCheck;
|
doInstallCheck = finalAttrs.doCheck;
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
|
||||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
|
||||||
|
|
||||||
mesonInstallCheckFlags = [ "--suite=installcheck" ];
|
mesonInstallCheckFlags = [ "--suite=installcheck" ];
|
||||||
|
|
||||||
|
@ -360,7 +342,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
||||||
'';
|
'';
|
||||||
|
|
||||||
installCheckPhase = lib.optionalString buildWithMeson ''
|
installCheckPhase = ''
|
||||||
runHook preInstallCheck
|
runHook preInstallCheck
|
||||||
flagsArray=($mesonInstallCheckFlags "''${mesonInstallCheckFlagsArray[@]}")
|
flagsArray=($mesonInstallCheckFlags "''${mesonInstallCheckFlagsArray[@]}")
|
||||||
meson test --no-rebuild "''${flagsArray[@]}"
|
meson test --no-rebuild "''${flagsArray[@]}"
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
makefiles = local.mk
|
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a
|
|
||||||
|
|
||||||
# A convenience for concurrent development of Nix and its Perl bindings.
|
|
||||||
# Not needed in a standalone build of the Perl bindings.
|
|
||||||
ifneq ("$(wildcard ../src)", "")
|
|
||||||
GLOBAL_CXXFLAGS += -I ../src
|
|
||||||
endif
|
|
||||||
|
|
||||||
-include Makefile.config
|
|
||||||
|
|
||||||
OPTIMIZE = 1
|
|
||||||
|
|
||||||
ifeq ($(OPTIMIZE), 1)
|
|
||||||
GLOBAL_CXXFLAGS += -O3
|
|
||||||
else
|
|
||||||
GLOBAL_CXXFLAGS += -O0
|
|
||||||
endif
|
|
||||||
|
|
||||||
include mk/lib.mk
|
|
|
@ -1,18 +0,0 @@
|
||||||
HOST_OS = @host_os@
|
|
||||||
CC = @CC@
|
|
||||||
CFLAGS = @CFLAGS@
|
|
||||||
CXX = @CXX@
|
|
||||||
CXXFLAGS = @CXXFLAGS@
|
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
|
||||||
SODIUM_LIBS = @SODIUM_LIBS@
|
|
||||||
NIX_CFLAGS = @NIX_CFLAGS@
|
|
||||||
NIX_LIBS = @NIX_LIBS@
|
|
||||||
nixbindir = @nixbindir@
|
|
||||||
curl = @curl@
|
|
||||||
nixlibexecdir = @nixlibexecdir@
|
|
||||||
nixlocalstatedir = @nixlocalstatedir@
|
|
||||||
perl = @perl@
|
|
||||||
perllibdir = @perllibdir@
|
|
||||||
nixstoredir = @nixstoredir@
|
|
||||||
nixsysconfdir = @nixsysconfdir@
|
|
|
@ -1,84 +0,0 @@
|
||||||
AC_INIT(nix-perl, m4_esyscmd([bash -c "echo -n $(cat ../.version)$VERSION_SUFFIX"]))
|
|
||||||
AC_CONFIG_SRCDIR(MANIFEST)
|
|
||||||
AC_CONFIG_AUX_DIR(../config)
|
|
||||||
|
|
||||||
CFLAGS=
|
|
||||||
CXXFLAGS=
|
|
||||||
AC_PROG_CC
|
|
||||||
AC_PROG_CXX
|
|
||||||
|
|
||||||
AC_CANONICAL_HOST
|
|
||||||
|
|
||||||
# Use 64-bit file system calls so that we can support files > 2 GiB.
|
|
||||||
AC_SYS_LARGEFILE
|
|
||||||
|
|
||||||
AC_DEFUN([NEED_PROG],
|
|
||||||
[
|
|
||||||
AC_PATH_PROG($1, $2)
|
|
||||||
if test -z "$$1"; then
|
|
||||||
AC_MSG_ERROR([$2 is required])
|
|
||||||
fi
|
|
||||||
])
|
|
||||||
|
|
||||||
NEED_PROG(perl, perl)
|
|
||||||
NEED_PROG(curl, curl)
|
|
||||||
NEED_PROG(bzip2, bzip2)
|
|
||||||
NEED_PROG(xz, xz)
|
|
||||||
|
|
||||||
# Test that Perl has the open/fork feature (Perl 5.8.0 and beyond).
|
|
||||||
AC_MSG_CHECKING([whether Perl is recent enough])
|
|
||||||
if ! $perl -e 'open(FOO, "-|", "true"); while (<FOO>) { print; }; close FOO or die;'; then
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
AC_MSG_ERROR([Your Perl version is too old. Nix requires Perl 5.8.0 or newer.])
|
|
||||||
fi
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
|
|
||||||
|
|
||||||
# Figure out where to install Perl modules.
|
|
||||||
AC_MSG_CHECKING([for the Perl installation prefix])
|
|
||||||
perlversion=$($perl -e 'use Config; print $Config{version};')
|
|
||||||
perlarchname=$($perl -e 'use Config; print $Config{archname};')
|
|
||||||
AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname])
|
|
||||||
AC_MSG_RESULT($perllibdir)
|
|
||||||
|
|
||||||
# Look for libsodium.
|
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
|
||||||
|
|
||||||
# Check for the required Perl dependencies (DBI and DBD::SQLite).
|
|
||||||
perlFlags="-I$perllibdir"
|
|
||||||
|
|
||||||
AC_ARG_WITH(dbi, AC_HELP_STRING([--with-dbi=PATH],
|
|
||||||
[prefix of the Perl DBI library]),
|
|
||||||
perlFlags="$perlFlags -I$withval")
|
|
||||||
|
|
||||||
AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH],
|
|
||||||
[prefix of the Perl DBD::SQLite library]),
|
|
||||||
perlFlags="$perlFlags -I$withval")
|
|
||||||
|
|
||||||
AC_MSG_CHECKING([whether DBD::SQLite works])
|
|
||||||
if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then
|
|
||||||
AC_MSG_RESULT(no)
|
|
||||||
AC_MSG_FAILURE([The Perl modules DBI and/or DBD::SQLite are missing.])
|
|
||||||
fi
|
|
||||||
AC_MSG_RESULT(yes)
|
|
||||||
|
|
||||||
AC_SUBST(perlFlags)
|
|
||||||
|
|
||||||
PKG_CHECK_MODULES([NIX], [nix-store])
|
|
||||||
|
|
||||||
NEED_PROG([NIX], [nix])
|
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
|
||||||
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
|
|
||||||
for name in $ac_subst_vars; do
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
declare $name="$(eval echo "${!name}")"
|
|
||||||
done
|
|
||||||
|
|
||||||
rm -f Makefile.config
|
|
||||||
ln -sfn ../mk mk
|
|
||||||
|
|
||||||
AC_CONFIG_FILES([])
|
|
||||||
AC_OUTPUT
|
|
|
@ -1,43 +0,0 @@
|
||||||
nix_perl_sources := \
|
|
||||||
lib/Nix/Store.pm \
|
|
||||||
lib/Nix/Manifest.pm \
|
|
||||||
lib/Nix/SSH.pm \
|
|
||||||
lib/Nix/CopyClosure.pm \
|
|
||||||
lib/Nix/Config.pm.in \
|
|
||||||
lib/Nix/Utils.pm
|
|
||||||
|
|
||||||
nix_perl_modules := $(nix_perl_sources:.in=)
|
|
||||||
|
|
||||||
$(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllibdir)/Nix)))
|
|
||||||
|
|
||||||
lib/Nix/Store.cc: lib/Nix/Store.xs
|
|
||||||
$(trace-gen) xsubpp $^ -output $@
|
|
||||||
|
|
||||||
libraries += Store
|
|
||||||
|
|
||||||
Store_DIR := lib/Nix
|
|
||||||
|
|
||||||
Store_SOURCES := $(Store_DIR)/Store.cc
|
|
||||||
|
|
||||||
Store_CXXFLAGS = \
|
|
||||||
$(NIX_CFLAGS) \
|
|
||||||
-I$(shell perl -e 'use Config; print $$Config{archlibexp};')/CORE \
|
|
||||||
-D_FILE_OFFSET_BITS=64 \
|
|
||||||
-Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \
|
|
||||||
-Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion
|
|
||||||
|
|
||||||
Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS)
|
|
||||||
|
|
||||||
ifdef HOST_CYGWIN
|
|
||||||
archlib = $(shell perl -E 'use Config; print $$Config{archlib};')
|
|
||||||
libperl = $(shell perl -E 'use Config; print $$Config{libperl};')
|
|
||||||
Store_LDFLAGS += $(shell find ${archlib} -name ${libperl})
|
|
||||||
endif
|
|
||||||
|
|
||||||
Store_ALLOW_UNDEFINED = 1
|
|
||||||
|
|
||||||
Store_FORCE_INSTALL = 1
|
|
||||||
|
|
||||||
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
|
||||||
|
|
||||||
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
|
|
@ -1,13 +0,0 @@
|
||||||
nix_noinst_scripts := \
|
|
||||||
$(d)/nix-profile.sh
|
|
||||||
|
|
||||||
noinst-scripts += $(nix_noinst_scripts)
|
|
||||||
|
|
||||||
profiledir = $(sysconfdir)/profile.d
|
|
||||||
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
|
|
||||||
|
|
||||||
clean-files += $(nix_noinst_scripts)
|
|
|
@ -1,17 +0,0 @@
|
||||||
libraries += libcmd
|
|
||||||
|
|
||||||
libcmd_NAME = libnixcmd
|
|
||||||
|
|
||||||
libcmd_DIR := $(d)
|
|
||||||
|
|
||||||
libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
|
||||||
|
|
||||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(NIXDOC_LIBS) -pthread
|
|
||||||
|
|
||||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
|
||||||
|
|
||||||
$(d)/repl.cc: $(d)/repl-overlays.nix.gen.hh
|
|
|
@ -1,50 +0,0 @@
|
||||||
libraries += libexpr
|
|
||||||
|
|
||||||
libexpr_NAME = libnixexpr
|
|
||||||
|
|
||||||
libexpr_DIR := $(d)
|
|
||||||
|
|
||||||
libexpr_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
$(wildcard $(d)/value/*.cc) \
|
|
||||||
$(wildcard $(d)/primops/*.cc) \
|
|
||||||
$(wildcard $(d)/flake/*.cc) \
|
|
||||||
$(d)/lexer-tab.cc \
|
|
||||||
$(d)/parser-tab.cc
|
|
||||||
|
|
||||||
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
|
|
||||||
|
|
||||||
libexpr_LIBS = libutil libstore libfetchers
|
|
||||||
|
|
||||||
libexpr_LDFLAGS += -lboost_context -pthread
|
|
||||||
ifdef HOST_LINUX
|
|
||||||
libexpr_LDFLAGS += -ldl
|
|
||||||
endif
|
|
||||||
|
|
||||||
# The dependency on libgc must be propagated (i.e. meaning that
|
|
||||||
# programs/libraries that use libexpr must explicitly pass -lgc),
|
|
||||||
# because inline functions in libexpr's header files call libgc.
|
|
||||||
libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS)
|
|
||||||
|
|
||||||
libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
|
||||||
|
|
||||||
$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y
|
|
||||||
$(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d
|
|
||||||
|
|
||||||
$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
|
|
||||||
$(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $<
|
|
||||||
|
|
||||||
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
|
||||||
|
|
||||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
|
||||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
|
||||||
|
|
||||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
|
||||||
|
|
||||||
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
|
|
||||||
|
|
||||||
$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
|
|
|
@ -195,32 +195,12 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
input.attrs.erase("ref");
|
input.attrs.erase("ref");
|
||||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||||
|
|
||||||
Attrs lockedAttrs({
|
|
||||||
{"type", "git-tarball"},
|
|
||||||
{"rev", rev->gitRev()},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
|
||||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
|
||||||
return {std::move(res->second), input};
|
|
||||||
}
|
|
||||||
|
|
||||||
auto url = getDownloadUrl(input);
|
auto url = getDownloadUrl(input);
|
||||||
|
|
||||||
auto result = downloadTarball(store, url.url, input.getName(), true, url.headers);
|
auto result = downloadTarball(store, url.url, input.getName(), true, url.headers);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
||||||
|
|
||||||
getCache()->add(
|
|
||||||
store,
|
|
||||||
lockedAttrs,
|
|
||||||
{
|
|
||||||
{"rev", rev->gitRev()},
|
|
||||||
{"lastModified", uint64_t(result.lastModified)}
|
|
||||||
},
|
|
||||||
result.tree.storePath,
|
|
||||||
true);
|
|
||||||
|
|
||||||
return {result.tree.storePath, input};
|
return {result.tree.storePath, input};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
libraries += libfetchers
|
|
||||||
|
|
||||||
libfetchers_NAME = libnixfetchers
|
|
||||||
|
|
||||||
libfetchers_DIR := $(d)
|
|
||||||
|
|
||||||
libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
|
||||||
|
|
||||||
libfetchers_LDFLAGS += -pthread
|
|
||||||
|
|
||||||
libfetchers_LIBS = libutil libstore
|
|
|
@ -1,17 +0,0 @@
|
||||||
libraries += libmain
|
|
||||||
|
|
||||||
libmain_NAME = libnixmain
|
|
||||||
|
|
||||||
libmain_DIR := $(d)
|
|
||||||
|
|
||||||
libmain_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libmain_CXXFLAGS += -I src/libutil -I src/libstore
|
|
||||||
|
|
||||||
libmain_LDFLAGS += $(OPENSSL_LIBS)
|
|
||||||
|
|
||||||
libmain_LIBS = libstore libutil
|
|
||||||
|
|
||||||
libmain_ALLOW_UNDEFINED = 1
|
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
|
|
@ -38,7 +38,6 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
||||||
the result anyway. */
|
the result anyway. */
|
||||||
FileTransferRequest request(url);
|
FileTransferRequest request(url);
|
||||||
request.verifyTLS = false;
|
request.verifyTLS = false;
|
||||||
request.decompress = false;
|
|
||||||
|
|
||||||
auto decompressor = makeDecompressionSink(
|
auto decompressor = makeDecompressionSink(
|
||||||
unpack && mainUrl.ends_with(".xz") ? "xz" : "none", sink);
|
unpack && mainUrl.ends_with(".xz") ? "xz" : "none", sink);
|
||||||
|
|
|
@ -49,8 +49,10 @@ struct curlFileTransfer : public FileTransfer
|
||||||
Activity act;
|
Activity act;
|
||||||
bool done = false; // whether either the success or failure function has been called
|
bool done = false; // whether either the success or failure function has been called
|
||||||
Callback<FileTransferResult> callback;
|
Callback<FileTransferResult> callback;
|
||||||
|
std::function<void(TransferItem &, std::string_view data)> dataCallback;
|
||||||
CURL * req = 0;
|
CURL * req = 0;
|
||||||
bool active = false; // whether the handle has been added to the multi object
|
bool active = false; // whether the handle has been added to the multi object
|
||||||
|
bool headersProcessed = false;
|
||||||
std::string statusMsg;
|
std::string statusMsg;
|
||||||
|
|
||||||
unsigned int attempt = 0;
|
unsigned int attempt = 0;
|
||||||
|
@ -81,30 +83,15 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
TransferItem(curlFileTransfer & fileTransfer,
|
TransferItem(curlFileTransfer & fileTransfer,
|
||||||
const FileTransferRequest & request,
|
const FileTransferRequest & request,
|
||||||
Callback<FileTransferResult> && callback)
|
Callback<FileTransferResult> && callback,
|
||||||
|
std::function<void(TransferItem &, std::string_view data)> dataCallback)
|
||||||
: fileTransfer(fileTransfer)
|
: fileTransfer(fileTransfer)
|
||||||
, request(request)
|
, request(request)
|
||||||
, act(*logger, lvlTalkative, actFileTransfer,
|
, act(*logger, lvlTalkative, actFileTransfer,
|
||||||
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
|
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
|
||||||
{request.uri}, request.parentAct)
|
{request.uri}, request.parentAct)
|
||||||
, callback(std::move(callback))
|
, callback(std::move(callback))
|
||||||
, finalSink([this](std::string_view data) {
|
, dataCallback(std::move(dataCallback))
|
||||||
if (errorSink) {
|
|
||||||
(*errorSink)(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this->request.dataCallback) {
|
|
||||||
auto httpStatus = getHTTPStatus();
|
|
||||||
|
|
||||||
/* Only write data to the sink if this is a
|
|
||||||
successful response. */
|
|
||||||
if (successfulStatuses.count(httpStatus)) {
|
|
||||||
writtenToSink += data.size();
|
|
||||||
this->request.dataCallback(data);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
this->result.data.append(data);
|
|
||||||
})
|
|
||||||
{
|
{
|
||||||
requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
|
requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
|
||||||
if (!request.expectedETag.empty())
|
if (!request.expectedETag.empty())
|
||||||
|
@ -145,30 +132,44 @@ struct curlFileTransfer : public FileTransfer
|
||||||
failEx(std::make_exception_ptr(std::forward<T>(e)));
|
failEx(std::make_exception_ptr(std::forward<T>(e)));
|
||||||
}
|
}
|
||||||
|
|
||||||
LambdaSink finalSink;
|
|
||||||
std::shared_ptr<FinishSink> decompressionSink;
|
|
||||||
std::optional<StringSink> errorSink;
|
|
||||||
|
|
||||||
std::exception_ptr writeException;
|
std::exception_ptr writeException;
|
||||||
|
|
||||||
|
std::optional<std::string> getHeader(const char * name)
|
||||||
|
{
|
||||||
|
curl_header * result;
|
||||||
|
auto e = curl_easy_header(req, name, 0, CURLH_HEADER, -1, &result);
|
||||||
|
if (e == CURLHE_OK) {
|
||||||
|
return result->value;
|
||||||
|
} else if (e == CURLHE_MISSING || e == CURLHE_NOHEADERS) {
|
||||||
|
return std::nullopt;
|
||||||
|
} else {
|
||||||
|
throw nix::Error("unexpected error from curl_easy_header(): %i", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
size_t writeCallback(void * contents, size_t size, size_t nmemb)
|
size_t writeCallback(void * contents, size_t size, size_t nmemb)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
if (!headersProcessed) {
|
||||||
|
if (auto h = getHeader("content-encoding")) {
|
||||||
|
encoding = std::move(*h);
|
||||||
|
}
|
||||||
|
if (auto h = getHeader("accept-ranges"); h && *h == "bytes") {
|
||||||
|
acceptRanges = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
headersProcessed = true;
|
||||||
|
}
|
||||||
|
|
||||||
size_t realSize = size * nmemb;
|
size_t realSize = size * nmemb;
|
||||||
result.bodySize += realSize;
|
result.bodySize += realSize;
|
||||||
|
|
||||||
if (!decompressionSink) {
|
if (successfulStatuses.count(getHTTPStatus()) && this->dataCallback) {
|
||||||
decompressionSink = makeDecompressionSink(encoding, finalSink);
|
writtenToSink += realSize;
|
||||||
if (! successfulStatuses.count(getHTTPStatus())) {
|
dataCallback(*this, {(const char *) contents, realSize});
|
||||||
// In this case we want to construct a TeeSink, to keep
|
} else {
|
||||||
// the response around (which we figure won't be big
|
this->result.data.append((const char *) contents, realSize);
|
||||||
// like an actual download should be) to improve error
|
|
||||||
// messages.
|
|
||||||
errorSink = StringSink { };
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
(*decompressionSink)({(char *) contents, realSize});
|
|
||||||
|
|
||||||
return realSize;
|
return realSize;
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
|
@ -196,42 +197,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
statusMsg = trim(match.str(1));
|
statusMsg = trim(match.str(1));
|
||||||
acceptRanges = false;
|
acceptRanges = false;
|
||||||
encoding = "";
|
encoding = "";
|
||||||
} else {
|
headersProcessed = false;
|
||||||
|
|
||||||
auto i = line.find(':');
|
|
||||||
if (i != std::string::npos) {
|
|
||||||
std::string name = toLower(trim(line.substr(0, i)));
|
|
||||||
|
|
||||||
if (name == "etag") {
|
|
||||||
result.etag = trim(line.substr(i + 1));
|
|
||||||
/* Hack to work around a GitHub bug: it sends
|
|
||||||
ETags, but ignores If-None-Match. So if we get
|
|
||||||
the expected ETag on a 200 response, then shut
|
|
||||||
down the connection because we already have the
|
|
||||||
data. */
|
|
||||||
long httpStatus = 0;
|
|
||||||
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
|
|
||||||
if (result.etag == request.expectedETag && httpStatus == 200) {
|
|
||||||
debug("shutting down on 200 HTTP response with expected ETag");
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (name == "content-encoding")
|
|
||||||
encoding = trim(line.substr(i + 1));
|
|
||||||
|
|
||||||
else if (name == "accept-ranges" && toLower(trim(line.substr(i + 1))) == "bytes")
|
|
||||||
acceptRanges = true;
|
|
||||||
|
|
||||||
else if (name == "link" || name == "x-amz-meta-link") {
|
|
||||||
auto value = trim(line.substr(i + 1));
|
|
||||||
static std::regex linkRegex("<([^>]*)>; rel=\"immutable\"", std::regex::extended | std::regex::icase);
|
|
||||||
if (std::smatch match; std::regex_match(value, match, linkRegex))
|
|
||||||
result.immutableUrl = match.str(1);
|
|
||||||
else
|
|
||||||
debug("got invalid link header '%s'", value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return realSize;
|
return realSize;
|
||||||
}
|
}
|
||||||
|
@ -301,15 +267,11 @@ struct curlFileTransfer : public FileTransfer
|
||||||
curl_easy_setopt(req, CURLOPT_USERAGENT,
|
curl_easy_setopt(req, CURLOPT_USERAGENT,
|
||||||
("curl/" LIBCURL_VERSION " Lix/" + nixVersion +
|
("curl/" LIBCURL_VERSION " Lix/" + nixVersion +
|
||||||
(fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str());
|
(fileTransferSettings.userAgentSuffix != "" ? " " + fileTransferSettings.userAgentSuffix.get() : "")).c_str());
|
||||||
#if LIBCURL_VERSION_NUM >= 0x072b00
|
|
||||||
curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1);
|
curl_easy_setopt(req, CURLOPT_PIPEWAIT, 1);
|
||||||
#endif
|
|
||||||
#if LIBCURL_VERSION_NUM >= 0x072f00
|
|
||||||
if (fileTransferSettings.enableHttp2)
|
if (fileTransferSettings.enableHttp2)
|
||||||
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
|
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_2TLS);
|
||||||
else
|
else
|
||||||
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
|
curl_easy_setopt(req, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
|
||||||
#endif
|
|
||||||
curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper);
|
curl_easy_setopt(req, CURLOPT_WRITEFUNCTION, TransferItem::writeCallbackWrapper);
|
||||||
curl_easy_setopt(req, CURLOPT_WRITEDATA, this);
|
curl_easy_setopt(req, CURLOPT_WRITEDATA, this);
|
||||||
curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper);
|
curl_easy_setopt(req, CURLOPT_HEADERFUNCTION, TransferItem::headerCallbackWrapper);
|
||||||
|
@ -371,17 +333,31 @@ struct curlFileTransfer : public FileTransfer
|
||||||
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
|
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
|
||||||
request.verb(), request.uri, code, httpStatus, result.bodySize);
|
request.verb(), request.uri, code, httpStatus, result.bodySize);
|
||||||
|
|
||||||
if (decompressionSink) {
|
auto link = getHeader("link");
|
||||||
try {
|
if (!link) {
|
||||||
decompressionSink->finish();
|
link = getHeader("x-amz-meta-link");
|
||||||
} catch (...) {
|
}
|
||||||
writeException = std::current_exception();
|
if (link) {
|
||||||
|
static std::regex linkRegex(
|
||||||
|
"<([^>]*)>; rel=\"immutable\"", std::regex::extended | std::regex::icase
|
||||||
|
);
|
||||||
|
if (std::smatch match; std::regex_match(*link, match, linkRegex)) {
|
||||||
|
result.immutableUrl = match.str(1);
|
||||||
|
} else {
|
||||||
|
debug("got invalid link header '%s'", *link);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (code == CURLE_WRITE_ERROR && result.etag == request.expectedETag) {
|
if (auto etag = getHeader("etag")) {
|
||||||
code = CURLE_OK;
|
result.etag = std::move(*etag);
|
||||||
httpStatus = 304;
|
}
|
||||||
|
|
||||||
|
// this has to happen here until we can return an actual future.
|
||||||
|
// wrapping user `callback`s instead is not possible because the
|
||||||
|
// Callback api expects std::functions, and copying Callbacks is
|
||||||
|
// not possible due the promises they hold.
|
||||||
|
if (code == CURLE_OK && !dataCallback) {
|
||||||
|
result.data = decompress(encoding, result.data);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (writeException)
|
if (writeException)
|
||||||
|
@ -390,13 +366,6 @@ struct curlFileTransfer : public FileTransfer
|
||||||
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
|
else if (code == CURLE_OK && successfulStatuses.count(httpStatus))
|
||||||
{
|
{
|
||||||
result.cached = httpStatus == 304;
|
result.cached = httpStatus == 304;
|
||||||
|
|
||||||
// In 2021, GitHub responds to If-None-Match with 304,
|
|
||||||
// but omits ETag. We just use the If-None-Match etag
|
|
||||||
// since 304 implies they are the same.
|
|
||||||
if (httpStatus == 304 && result.etag == "")
|
|
||||||
result.etag = request.expectedETag;
|
|
||||||
|
|
||||||
act.progress(result.bodySize, result.bodySize);
|
act.progress(result.bodySize, result.bodySize);
|
||||||
done = true;
|
done = true;
|
||||||
callback(std::move(result));
|
callback(std::move(result));
|
||||||
|
@ -455,16 +424,16 @@ struct curlFileTransfer : public FileTransfer
|
||||||
attempt++;
|
attempt++;
|
||||||
|
|
||||||
std::optional<std::string> response;
|
std::optional<std::string> response;
|
||||||
if (errorSink)
|
if (!successfulStatuses.count(httpStatus))
|
||||||
response = std::move(errorSink->s);
|
response = std::move(result.data);
|
||||||
auto exc =
|
auto exc =
|
||||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||||
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
|
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||||
: httpStatus != 0
|
: httpStatus != 0
|
||||||
? FileTransferError(err,
|
? FileTransferError(err,
|
||||||
std::move(response),
|
std::move(response),
|
||||||
"unable to %s '%s': HTTP error %d%s",
|
"unable to %s '%s': HTTP error %d (%s)%s",
|
||||||
request.verb(), request.uri, httpStatus,
|
request.verb(), request.uri, httpStatus, statusMsg,
|
||||||
code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||||
: FileTransferError(err,
|
: FileTransferError(err,
|
||||||
std::move(response),
|
std::move(response),
|
||||||
|
@ -477,7 +446,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
ranged requests. */
|
ranged requests. */
|
||||||
if (err == Transient
|
if (err == Transient
|
||||||
&& attempt < request.tries
|
&& attempt < request.tries
|
||||||
&& (!this->request.dataCallback
|
&& (!this->dataCallback
|
||||||
|| writtenToSink == 0
|
|| writtenToSink == 0
|
||||||
|| (acceptRanges && encoding.empty())))
|
|| (acceptRanges && encoding.empty())))
|
||||||
{
|
{
|
||||||
|
@ -508,11 +477,6 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
Sync<State> state_;
|
Sync<State> state_;
|
||||||
|
|
||||||
/* We can't use a std::condition_variable to wake up the curl
|
|
||||||
thread, because it only monitors file descriptors. So use a
|
|
||||||
pipe instead. */
|
|
||||||
Pipe wakeupPipe;
|
|
||||||
|
|
||||||
std::thread workerThread;
|
std::thread workerThread;
|
||||||
|
|
||||||
curlFileTransfer()
|
curlFileTransfer()
|
||||||
|
@ -523,16 +487,9 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
curlm = curl_multi_init();
|
curlm = curl_multi_init();
|
||||||
|
|
||||||
#if LIBCURL_VERSION_NUM >= 0x072b00 // Multiplex requires >= 7.43.0
|
|
||||||
curl_multi_setopt(curlm, CURLMOPT_PIPELINING, CURLPIPE_MULTIPLEX);
|
curl_multi_setopt(curlm, CURLMOPT_PIPELINING, CURLPIPE_MULTIPLEX);
|
||||||
#endif
|
|
||||||
#if LIBCURL_VERSION_NUM >= 0x071e00 // Max connections requires >= 7.30.0
|
|
||||||
curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS,
|
curl_multi_setopt(curlm, CURLMOPT_MAX_TOTAL_CONNECTIONS,
|
||||||
fileTransferSettings.httpConnections.get());
|
fileTransferSettings.httpConnections.get());
|
||||||
#endif
|
|
||||||
|
|
||||||
wakeupPipe.create();
|
|
||||||
fcntl(wakeupPipe.readSide.get(), F_SETFL, O_NONBLOCK);
|
|
||||||
|
|
||||||
workerThread = std::thread([&]() { workerThreadEntry(); });
|
workerThread = std::thread([&]() { workerThreadEntry(); });
|
||||||
}
|
}
|
||||||
|
@ -546,6 +503,12 @@ struct curlFileTransfer : public FileTransfer
|
||||||
if (curlm) curl_multi_cleanup(curlm);
|
if (curlm) curl_multi_cleanup(curlm);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void wakeup()
|
||||||
|
{
|
||||||
|
if (auto mc = curl_multi_wakeup(curlm))
|
||||||
|
throw nix::Error("unexpected error from curl_multi_wakeup(): %s", curl_multi_strerror(mc));
|
||||||
|
}
|
||||||
|
|
||||||
void stopWorkerThread()
|
void stopWorkerThread()
|
||||||
{
|
{
|
||||||
/* Signal the worker thread to exit. */
|
/* Signal the worker thread to exit. */
|
||||||
|
@ -553,7 +516,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
auto state(state_.lock());
|
auto state(state_.lock());
|
||||||
state->quit = true;
|
state->quit = true;
|
||||||
}
|
}
|
||||||
writeFull(wakeupPipe.writeSide.get(), " ", false);
|
wakeup();
|
||||||
}
|
}
|
||||||
|
|
||||||
void workerThreadMain()
|
void workerThreadMain()
|
||||||
|
@ -595,32 +558,21 @@ struct curlFileTransfer : public FileTransfer
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Wait for activity, including wakeup events. */
|
/* Wait for activity, including wakeup events. */
|
||||||
int numfds = 0;
|
|
||||||
struct curl_waitfd extraFDs[1];
|
|
||||||
extraFDs[0].fd = wakeupPipe.readSide.get();
|
|
||||||
extraFDs[0].events = CURL_WAIT_POLLIN;
|
|
||||||
extraFDs[0].revents = 0;
|
|
||||||
long maxSleepTimeMs = items.empty() ? 10000 : 100;
|
long maxSleepTimeMs = items.empty() ? 10000 : 100;
|
||||||
auto sleepTimeMs =
|
auto sleepTimeMs =
|
||||||
nextWakeup != std::chrono::steady_clock::time_point()
|
nextWakeup != std::chrono::steady_clock::time_point()
|
||||||
? std::max(0, (int) std::chrono::duration_cast<std::chrono::milliseconds>(nextWakeup - std::chrono::steady_clock::now()).count())
|
? std::max(0, (int) std::chrono::duration_cast<std::chrono::milliseconds>(nextWakeup - std::chrono::steady_clock::now()).count())
|
||||||
: maxSleepTimeMs;
|
: maxSleepTimeMs;
|
||||||
vomit("download thread waiting for %d ms", sleepTimeMs);
|
vomit("download thread waiting for %d ms", sleepTimeMs);
|
||||||
mc = curl_multi_wait(curlm, extraFDs, 1, sleepTimeMs, &numfds);
|
mc = curl_multi_poll(curlm, nullptr, 0, sleepTimeMs, nullptr);
|
||||||
if (mc != CURLM_OK)
|
if (mc != CURLM_OK)
|
||||||
throw nix::Error("unexpected error from curl_multi_wait(): %s", curl_multi_strerror(mc));
|
throw nix::Error("unexpected error from curl_multi_poll(): %s", curl_multi_strerror(mc));
|
||||||
|
|
||||||
nextWakeup = std::chrono::steady_clock::time_point();
|
nextWakeup = std::chrono::steady_clock::time_point();
|
||||||
|
|
||||||
/* Add new curl requests from the incoming requests queue,
|
/* Add new curl requests from the incoming requests queue,
|
||||||
except for requests that are embargoed (waiting for a
|
except for requests that are embargoed (waiting for a
|
||||||
retry timeout to expire). */
|
retry timeout to expire). */
|
||||||
if (extraFDs[0].revents & CURL_WAIT_POLLIN) {
|
|
||||||
char buf[1024];
|
|
||||||
auto res = read(extraFDs[0].fd, buf, sizeof(buf));
|
|
||||||
if (res == -1 && errno != EINTR)
|
|
||||||
throw SysError("reading curl wakeup socket");
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::shared_ptr<TransferItem>> incoming;
|
std::vector<std::shared_ptr<TransferItem>> incoming;
|
||||||
auto now = std::chrono::steady_clock::now();
|
auto now = std::chrono::steady_clock::now();
|
||||||
|
@ -683,7 +635,7 @@ struct curlFileTransfer : public FileTransfer
|
||||||
throw nix::Error("cannot enqueue download request because the download thread is shutting down");
|
throw nix::Error("cannot enqueue download request because the download thread is shutting down");
|
||||||
state->incoming.push(item);
|
state->incoming.push(item);
|
||||||
}
|
}
|
||||||
writeFull(wakeupPipe.writeSide.get(), " ");
|
wakeup();
|
||||||
}
|
}
|
||||||
|
|
||||||
#if ENABLE_S3
|
#if ENABLE_S3
|
||||||
|
@ -704,6 +656,13 @@ struct curlFileTransfer : public FileTransfer
|
||||||
|
|
||||||
void enqueueFileTransfer(const FileTransferRequest & request,
|
void enqueueFileTransfer(const FileTransferRequest & request,
|
||||||
Callback<FileTransferResult> callback) override
|
Callback<FileTransferResult> callback) override
|
||||||
|
{
|
||||||
|
enqueueFileTransfer(request, std::move(callback), {});
|
||||||
|
}
|
||||||
|
|
||||||
|
void enqueueFileTransfer(const FileTransferRequest & request,
|
||||||
|
Callback<FileTransferResult> callback,
|
||||||
|
std::function<void(TransferItem &, std::string_view data)> dataCallback)
|
||||||
{
|
{
|
||||||
/* Ugly hack to support s3:// URIs. */
|
/* Ugly hack to support s3:// URIs. */
|
||||||
if (request.uri.starts_with("s3://")) {
|
if (request.uri.starts_with("s3://")) {
|
||||||
|
@ -733,7 +692,116 @@ struct curlFileTransfer : public FileTransfer
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
enqueueItem(std::make_shared<TransferItem>(*this, request, std::move(callback)));
|
enqueueItem(std::make_shared<TransferItem>(
|
||||||
|
*this, request, std::move(callback), std::move(dataCallback)
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
void download(FileTransferRequest && request, Sink & sink) override
|
||||||
|
{
|
||||||
|
/* Note: we can't call 'sink' via request.dataCallback, because
|
||||||
|
that would cause the sink to execute on the fileTransfer
|
||||||
|
thread. If 'sink' is a coroutine, this will fail. Also, if the
|
||||||
|
sink is expensive (e.g. one that does decompression and writing
|
||||||
|
to the Nix store), it would stall the download thread too much.
|
||||||
|
Therefore we use a buffer to communicate data between the
|
||||||
|
download thread and the calling thread. */
|
||||||
|
|
||||||
|
struct State {
|
||||||
|
bool quit = false;
|
||||||
|
std::exception_ptr exc;
|
||||||
|
std::string data;
|
||||||
|
std::condition_variable avail, request;
|
||||||
|
std::unique_ptr<FinishSink> decompressor;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto _state = std::make_shared<Sync<State>>();
|
||||||
|
|
||||||
|
/* In case of an exception, wake up the download thread. FIXME:
|
||||||
|
abort the download request. */
|
||||||
|
Finally finally([&]() {
|
||||||
|
auto state(_state->lock());
|
||||||
|
state->quit = true;
|
||||||
|
state->request.notify_one();
|
||||||
|
});
|
||||||
|
|
||||||
|
enqueueFileTransfer(request,
|
||||||
|
{[_state](std::future<FileTransferResult> fut) {
|
||||||
|
auto state(_state->lock());
|
||||||
|
state->quit = true;
|
||||||
|
try {
|
||||||
|
fut.get();
|
||||||
|
} catch (...) {
|
||||||
|
state->exc = std::current_exception();
|
||||||
|
}
|
||||||
|
state->avail.notify_one();
|
||||||
|
state->request.notify_one();
|
||||||
|
}},
|
||||||
|
[_state, &sink](TransferItem & transfer, std::string_view data) {
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
if (state->quit) return;
|
||||||
|
|
||||||
|
if (!state->decompressor) {
|
||||||
|
state->decompressor = makeDecompressionSink(transfer.encoding, sink);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If the buffer is full, then go to sleep until the calling
|
||||||
|
thread wakes us up (i.e. when it has removed data from the
|
||||||
|
buffer). We don't wait forever to prevent stalling the
|
||||||
|
download thread. (Hopefully sleeping will throttle the
|
||||||
|
sender.) */
|
||||||
|
if (state->data.size() > 1024 * 1024) {
|
||||||
|
debug("download buffer is full; going to sleep");
|
||||||
|
state.wait_for(state->request, std::chrono::seconds(10));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Append data to the buffer and wake up the calling
|
||||||
|
thread. */
|
||||||
|
state->data.append(data);
|
||||||
|
state->avail.notify_one();
|
||||||
|
});
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
checkInterrupt();
|
||||||
|
|
||||||
|
std::string chunk;
|
||||||
|
FinishSink * sink = nullptr;
|
||||||
|
|
||||||
|
/* Grab data if available, otherwise wait for the download
|
||||||
|
thread to wake us up. */
|
||||||
|
{
|
||||||
|
auto state(_state->lock());
|
||||||
|
|
||||||
|
if (state->data.empty()) {
|
||||||
|
|
||||||
|
if (state->quit) {
|
||||||
|
if (state->exc) std::rethrow_exception(state->exc);
|
||||||
|
if (state->decompressor) {
|
||||||
|
state->decompressor->finish();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.wait(state->avail);
|
||||||
|
|
||||||
|
if (state->data.empty()) continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
chunk = std::move(state->data);
|
||||||
|
sink = state->decompressor.get();
|
||||||
|
/* Reset state->data after the move, since we check data.empty() */
|
||||||
|
state->data = "";
|
||||||
|
|
||||||
|
state->request.notify_one();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Flush the data to the sink and wake up the download thread
|
||||||
|
if it's blocked on a full buffer. We don't hold the state
|
||||||
|
lock while doing this to prevent blocking the download
|
||||||
|
thread if sink() takes a long time. */
|
||||||
|
(*sink)(chunk);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -782,105 +850,6 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
|
||||||
return enqueueFileTransfer(request).get();
|
return enqueueFileTransfer(request).get();
|
||||||
}
|
}
|
||||||
|
|
||||||
void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
|
||||||
{
|
|
||||||
/* Note: we can't call 'sink' via request.dataCallback, because
|
|
||||||
that would cause the sink to execute on the fileTransfer
|
|
||||||
thread. If 'sink' is a coroutine, this will fail. Also, if the
|
|
||||||
sink is expensive (e.g. one that does decompression and writing
|
|
||||||
to the Nix store), it would stall the download thread too much.
|
|
||||||
Therefore we use a buffer to communicate data between the
|
|
||||||
download thread and the calling thread. */
|
|
||||||
|
|
||||||
struct State {
|
|
||||||
bool quit = false;
|
|
||||||
std::exception_ptr exc;
|
|
||||||
std::string data;
|
|
||||||
std::condition_variable avail, request;
|
|
||||||
};
|
|
||||||
|
|
||||||
auto _state = std::make_shared<Sync<State>>();
|
|
||||||
|
|
||||||
/* In case of an exception, wake up the download thread. FIXME:
|
|
||||||
abort the download request. */
|
|
||||||
Finally finally([&]() {
|
|
||||||
auto state(_state->lock());
|
|
||||||
state->quit = true;
|
|
||||||
state->request.notify_one();
|
|
||||||
});
|
|
||||||
|
|
||||||
request.dataCallback = [_state](std::string_view data) {
|
|
||||||
|
|
||||||
auto state(_state->lock());
|
|
||||||
|
|
||||||
if (state->quit) return;
|
|
||||||
|
|
||||||
/* If the buffer is full, then go to sleep until the calling
|
|
||||||
thread wakes us up (i.e. when it has removed data from the
|
|
||||||
buffer). We don't wait forever to prevent stalling the
|
|
||||||
download thread. (Hopefully sleeping will throttle the
|
|
||||||
sender.) */
|
|
||||||
if (state->data.size() > 1024 * 1024) {
|
|
||||||
debug("download buffer is full; going to sleep");
|
|
||||||
state.wait_for(state->request, std::chrono::seconds(10));
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Append data to the buffer and wake up the calling
|
|
||||||
thread. */
|
|
||||||
state->data.append(data);
|
|
||||||
state->avail.notify_one();
|
|
||||||
};
|
|
||||||
|
|
||||||
enqueueFileTransfer(request,
|
|
||||||
{[_state](std::future<FileTransferResult> fut) {
|
|
||||||
auto state(_state->lock());
|
|
||||||
state->quit = true;
|
|
||||||
try {
|
|
||||||
fut.get();
|
|
||||||
} catch (...) {
|
|
||||||
state->exc = std::current_exception();
|
|
||||||
}
|
|
||||||
state->avail.notify_one();
|
|
||||||
state->request.notify_one();
|
|
||||||
}});
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
checkInterrupt();
|
|
||||||
|
|
||||||
std::string chunk;
|
|
||||||
|
|
||||||
/* Grab data if available, otherwise wait for the download
|
|
||||||
thread to wake us up. */
|
|
||||||
{
|
|
||||||
auto state(_state->lock());
|
|
||||||
|
|
||||||
if (state->data.empty()) {
|
|
||||||
|
|
||||||
if (state->quit) {
|
|
||||||
if (state->exc) std::rethrow_exception(state->exc);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
state.wait(state->avail);
|
|
||||||
|
|
||||||
if (state->data.empty()) continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
chunk = std::move(state->data);
|
|
||||||
/* Reset state->data after the move, since we check data.empty() */
|
|
||||||
state->data = "";
|
|
||||||
|
|
||||||
state->request.notify_one();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Flush the data to the sink and wake up the download thread
|
|
||||||
if it's blocked on a full buffer. We don't hold the state
|
|
||||||
lock while doing this to prevent blocking the download
|
|
||||||
thread if sink() takes a long time. */
|
|
||||||
sink(chunk);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
template<typename... Args>
|
template<typename... Args>
|
||||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
||||||
: Error(args...), error(error), response(response)
|
: Error(args...), error(error), response(response)
|
||||||
|
|
|
@ -59,10 +59,8 @@ struct FileTransferRequest
|
||||||
size_t tries = fileTransferSettings.tries;
|
size_t tries = fileTransferSettings.tries;
|
||||||
unsigned int baseRetryTimeMs = 250;
|
unsigned int baseRetryTimeMs = 250;
|
||||||
ActivityId parentAct;
|
ActivityId parentAct;
|
||||||
bool decompress = true;
|
|
||||||
std::optional<std::string> data;
|
std::optional<std::string> data;
|
||||||
std::string mimeType;
|
std::string mimeType;
|
||||||
std::function<void(std::string_view data)> dataCallback;
|
|
||||||
|
|
||||||
FileTransferRequest(std::string_view uri)
|
FileTransferRequest(std::string_view uri)
|
||||||
: uri(uri), parentAct(getCurActivity()) { }
|
: uri(uri), parentAct(getCurActivity()) { }
|
||||||
|
@ -116,7 +114,7 @@ struct FileTransfer
|
||||||
* Download a file, writing its data to a sink. The sink will be
|
* Download a file, writing its data to a sink. The sink will be
|
||||||
* invoked on the thread of the caller.
|
* invoked on the thread of the caller.
|
||||||
*/
|
*/
|
||||||
void download(FileTransferRequest && request, Sink & sink);
|
virtual void download(FileTransferRequest && request, Sink & sink) = 0;
|
||||||
|
|
||||||
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
||||||
};
|
};
|
||||||
|
|
|
@ -7,7 +7,14 @@
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Garbage-collector roots, referring to a store path
|
||||||
|
*/
|
||||||
typedef std::unordered_map<StorePath, std::unordered_set<std::string>> Roots;
|
typedef std::unordered_map<StorePath, std::unordered_set<std::string>> Roots;
|
||||||
|
/**
|
||||||
|
* Possible garbage collector roots, referring to any path
|
||||||
|
*/
|
||||||
|
typedef std::unordered_map<Path, std::unordered_set<std::string>> UncheckedRoots;
|
||||||
|
|
||||||
|
|
||||||
struct GCOptions
|
struct GCOptions
|
||||||
|
|
|
@ -321,105 +321,8 @@ Roots LocalStore::findRoots(bool censor)
|
||||||
return roots;
|
return roots;
|
||||||
}
|
}
|
||||||
|
|
||||||
typedef std::unordered_map<Path, std::unordered_set<std::string>> UncheckedRoots;
|
void LocalStore::findPlatformRoots(UncheckedRoots & unchecked)
|
||||||
|
|
||||||
static void readProcLink(const std::string & file, UncheckedRoots & roots)
|
|
||||||
{
|
{
|
||||||
constexpr auto bufsiz = PATH_MAX;
|
|
||||||
char buf[bufsiz];
|
|
||||||
auto res = readlink(file.c_str(), buf, bufsiz);
|
|
||||||
if (res == -1) {
|
|
||||||
if (errno == ENOENT || errno == EACCES || errno == ESRCH)
|
|
||||||
return;
|
|
||||||
throw SysError("reading symlink");
|
|
||||||
}
|
|
||||||
if (res == bufsiz) {
|
|
||||||
throw Error("overly long symlink starting with '%1%'", std::string_view(buf, bufsiz));
|
|
||||||
}
|
|
||||||
if (res > 0 && buf[0] == '/')
|
|
||||||
roots[std::string(static_cast<char *>(buf), res)]
|
|
||||||
.emplace(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
static std::string quoteRegexChars(const std::string & raw)
|
|
||||||
{
|
|
||||||
static auto specialRegex = std::regex(R"([.^$\\*+?()\[\]{}|])");
|
|
||||||
return std::regex_replace(raw, specialRegex, R"(\$&)");
|
|
||||||
}
|
|
||||||
|
|
||||||
#if __linux__
|
|
||||||
static void readFileRoots(const char * path, UncheckedRoots & roots)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
roots[readFile(path)].emplace(path);
|
|
||||||
} catch (SysError & e) {
|
|
||||||
if (e.errNo != ENOENT && e.errNo != EACCES)
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
|
||||||
{
|
|
||||||
UncheckedRoots unchecked;
|
|
||||||
|
|
||||||
auto procDir = AutoCloseDir{opendir("/proc")};
|
|
||||||
if (procDir) {
|
|
||||||
struct dirent * ent;
|
|
||||||
auto digitsRegex = std::regex(R"(^\d+$)");
|
|
||||||
auto mapRegex = std::regex(R"(^\s*\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+(/\S+)\s*$)");
|
|
||||||
auto storePathRegex = std::regex(quoteRegexChars(storeDir) + R"(/[0-9a-z]+[0-9a-zA-Z\+\-\._\?=]*)");
|
|
||||||
while (errno = 0, ent = readdir(procDir.get())) {
|
|
||||||
checkInterrupt();
|
|
||||||
if (std::regex_match(ent->d_name, digitsRegex)) {
|
|
||||||
try {
|
|
||||||
readProcLink(fmt("/proc/%s/exe" ,ent->d_name), unchecked);
|
|
||||||
readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked);
|
|
||||||
|
|
||||||
auto fdStr = fmt("/proc/%s/fd", ent->d_name);
|
|
||||||
auto fdDir = AutoCloseDir(opendir(fdStr.c_str()));
|
|
||||||
if (!fdDir) {
|
|
||||||
if (errno == ENOENT || errno == EACCES)
|
|
||||||
continue;
|
|
||||||
throw SysError("opening %1%", fdStr);
|
|
||||||
}
|
|
||||||
struct dirent * fd_ent;
|
|
||||||
while (errno = 0, fd_ent = readdir(fdDir.get())) {
|
|
||||||
if (fd_ent->d_name[0] != '.')
|
|
||||||
readProcLink(fmt("%s/%s", fdStr, fd_ent->d_name), unchecked);
|
|
||||||
}
|
|
||||||
if (errno) {
|
|
||||||
if (errno == ESRCH)
|
|
||||||
continue;
|
|
||||||
throw SysError("iterating /proc/%1%/fd", ent->d_name);
|
|
||||||
}
|
|
||||||
fdDir.reset();
|
|
||||||
|
|
||||||
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
|
|
||||||
auto mapLines = tokenizeString<std::vector<std::string>>(readFile(mapFile), "\n");
|
|
||||||
for (const auto & line : mapLines) {
|
|
||||||
auto match = std::smatch{};
|
|
||||||
if (std::regex_match(line, match, mapRegex))
|
|
||||||
unchecked[match[1]].emplace(mapFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto envFile = fmt("/proc/%s/environ", ent->d_name);
|
|
||||||
auto envString = readFile(envFile);
|
|
||||||
auto env_end = std::sregex_iterator{};
|
|
||||||
for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i)
|
|
||||||
unchecked[i->str()].emplace(envFile);
|
|
||||||
} catch (SysError & e) {
|
|
||||||
if (errno == ENOENT || errno == EACCES || errno == ESRCH)
|
|
||||||
continue;
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (errno)
|
|
||||||
throw SysError("iterating /proc");
|
|
||||||
}
|
|
||||||
|
|
||||||
#if !defined(__linux__)
|
|
||||||
// lsof is really slow on OS X. This actually causes the gc-concurrent.sh test to fail.
|
// lsof is really slow on OS X. This actually causes the gc-concurrent.sh test to fail.
|
||||||
// See: https://github.com/NixOS/nix/issues/3011
|
// See: https://github.com/NixOS/nix/issues/3011
|
||||||
// Because of this we disable lsof when running the tests.
|
// Because of this we disable lsof when running the tests.
|
||||||
|
@ -437,13 +340,13 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
||||||
/* lsof not installed, lsof failed */
|
/* lsof not installed, lsof failed */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
}
|
||||||
|
|
||||||
#if __linux__
|
void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
||||||
readFileRoots("/proc/sys/kernel/modprobe", unchecked);
|
{
|
||||||
readFileRoots("/proc/sys/kernel/fbsplash", unchecked);
|
UncheckedRoots unchecked;
|
||||||
readFileRoots("/proc/sys/kernel/poweroff_cmd", unchecked);
|
|
||||||
#endif
|
findPlatformRoots(unchecked);
|
||||||
|
|
||||||
for (auto & [target, links] : unchecked) {
|
for (auto & [target, links] : unchecked) {
|
||||||
if (!isInStore(target)) continue;
|
if (!isInStore(target)) continue;
|
||||||
|
|
|
@ -1940,6 +1940,4 @@ std::optional<std::string> LocalStore::getVersion()
|
||||||
return nixVersion;
|
return nixVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterStoreImplementation<LocalStore, LocalStoreConfig> regLocalStore;
|
|
||||||
|
|
||||||
} // namespace nix
|
} // namespace nix
|
||||||
|
|
|
@ -127,6 +127,17 @@ private:
|
||||||
|
|
||||||
const PublicKeys & getPublicKeys();
|
const PublicKeys & getPublicKeys();
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialise the local store, upgrading the schema if
|
||||||
|
* necessary.
|
||||||
|
* Protected so that users don't accidentally create a LocalStore
|
||||||
|
* instead of a platform's subclass.
|
||||||
|
*/
|
||||||
|
LocalStore(const Params & params);
|
||||||
|
LocalStore(std::string scheme, std::string path, const Params & params);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -134,18 +145,16 @@ public:
|
||||||
*/
|
*/
|
||||||
PathSet locksHeld;
|
PathSet locksHeld;
|
||||||
|
|
||||||
/**
|
virtual ~LocalStore();
|
||||||
* Initialise the local store, upgrading the schema if
|
|
||||||
* necessary.
|
|
||||||
*/
|
|
||||||
LocalStore(const Params & params);
|
|
||||||
LocalStore(std::string scheme, std::string path, const Params & params);
|
|
||||||
|
|
||||||
~LocalStore();
|
|
||||||
|
|
||||||
static std::set<std::string> uriSchemes()
|
static std::set<std::string> uriSchemes()
|
||||||
{ return {}; }
|
{ return {}; }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a LocalStore, possibly a platform-specific subclass
|
||||||
|
*/
|
||||||
|
static std::shared_ptr<LocalStore> makeLocalStore(const Params & params);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementations of abstract store API methods.
|
* Implementations of abstract store API methods.
|
||||||
*/
|
*/
|
||||||
|
@ -330,6 +339,12 @@ private:
|
||||||
|
|
||||||
void findRootsNoTemp(Roots & roots, bool censor);
|
void findRootsNoTemp(Roots & roots, bool censor);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find possible garbage collector roots in a platform-specific manner,
|
||||||
|
* e.g. by looking in `/proc` or using `lsof`
|
||||||
|
*/
|
||||||
|
virtual void findPlatformRoots(UncheckedRoots & unchecked);
|
||||||
|
|
||||||
void findRuntimeRoots(Roots & roots, bool censor);
|
void findRuntimeRoots(Roots & roots, bool censor);
|
||||||
|
|
||||||
std::pair<Path, AutoCloseFD> createTempDirInStore();
|
std::pair<Path, AutoCloseFD> createTempDirInStore();
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
libraries += libstore
|
|
||||||
|
|
||||||
libstore_NAME = libnixstore
|
|
||||||
|
|
||||||
libstore_DIR := $(d)
|
|
||||||
|
|
||||||
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
|
||||||
|
|
||||||
libstore_LIBS = libutil
|
|
||||||
|
|
||||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
|
|
||||||
ifdef HOST_LINUX
|
|
||||||
libstore_LDFLAGS += -ldl
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_S3), 1)
|
|
||||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifdef HOST_SOLARIS
|
|
||||||
libstore_LDFLAGS += -lsocket
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(HAVE_SECCOMP), 1)
|
|
||||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
|
||||||
endif
|
|
||||||
|
|
||||||
libstore_CXXFLAGS += \
|
|
||||||
-I src/libutil -I src/libstore -I src/libstore/build \
|
|
||||||
-DNIX_PREFIX=\"$(prefix)\" \
|
|
||||||
-DNIX_STORE_DIR=\"$(storedir)\" \
|
|
||||||
-DNIX_DATA_DIR=\"$(datadir)\" \
|
|
||||||
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
|
|
||||||
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
|
|
||||||
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
|
|
||||||
-DNIX_BIN_DIR=\"$(bindir)\" \
|
|
||||||
-DNIX_MAN_DIR=\"$(mandir)\" \
|
|
||||||
-DLSOF=\"$(lsof)\"
|
|
||||||
|
|
||||||
ifeq ($(embedded_sandbox_shell),yes)
|
|
||||||
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
|
|
||||||
|
|
||||||
$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
|
|
||||||
|
|
||||||
$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
|
||||||
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
|
||||||
@mv $@.tmp $@
|
|
||||||
else
|
|
||||||
ifneq ($(sandbox_shell),)
|
|
||||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
|
||||||
|
|
||||||
$(d)/build.cc:
|
|
||||||
|
|
||||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
|
||||||
|
|
||||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
|
||||||
|
|
||||||
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
|
||||||
|
|
||||||
$(foreach i, $(wildcard src/libstore/build/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/build, 0644)))
|
|
|
@ -69,10 +69,10 @@ ref<Store> Machine::openStore() const
|
||||||
Store::Params storeParams;
|
Store::Params storeParams;
|
||||||
if (storeUri.starts_with("ssh://")) {
|
if (storeUri.starts_with("ssh://")) {
|
||||||
storeParams["max-connections"] = "1";
|
storeParams["max-connections"] = "1";
|
||||||
storeParams["log-fd"] = "4";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (storeUri.starts_with("ssh://") || storeUri.starts_with("ssh-ng://")) {
|
if (storeUri.starts_with("ssh://") || storeUri.starts_with("ssh-ng://")) {
|
||||||
|
storeParams["log-fd"] = "4";
|
||||||
if (sshKey != "")
|
if (sshKey != "")
|
||||||
storeParams["ssh-key"] = sshKey;
|
storeParams["ssh-key"] = sshKey;
|
||||||
if (sshPublicHostKey != "")
|
if (sshPublicHostKey != "")
|
||||||
|
|
|
@ -11,7 +11,7 @@ foreach header : [ 'schema.sql', 'ca-specific-schema.sql' ]
|
||||||
endforeach
|
endforeach
|
||||||
|
|
||||||
if enable_embedded_sandbox_shell
|
if enable_embedded_sandbox_shell
|
||||||
hexdump = find_program('hexdump', required : true)
|
hexdump = find_program('hexdump', required : true, native : true)
|
||||||
embedded_sandbox_shell_gen = custom_target(
|
embedded_sandbox_shell_gen = custom_target(
|
||||||
'embedded-sandbox-shell.gen.hh',
|
'embedded-sandbox-shell.gen.hh',
|
||||||
command : [
|
command : [
|
||||||
|
@ -66,6 +66,7 @@ libstore_sources = files(
|
||||||
'path-with-outputs.cc',
|
'path-with-outputs.cc',
|
||||||
'path.cc',
|
'path.cc',
|
||||||
'pathlocks.cc',
|
'pathlocks.cc',
|
||||||
|
'platform.cc',
|
||||||
'profiles.cc',
|
'profiles.cc',
|
||||||
'realisation.cc',
|
'realisation.cc',
|
||||||
'remote-fs-accessor.cc',
|
'remote-fs-accessor.cc',
|
||||||
|
@ -158,6 +159,17 @@ libstore_headers = files(
|
||||||
'worker-protocol.hh',
|
'worker-protocol.hh',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if host_machine.system() == 'linux'
|
||||||
|
libstore_sources += files('platform/linux.cc')
|
||||||
|
libstore_headers += files('platform/linux.hh')
|
||||||
|
elif host_machine.system() == 'darwin'
|
||||||
|
libstore_sources += files('platform/darwin.cc')
|
||||||
|
libstore_headers += files('platform/darwin.hh')
|
||||||
|
else
|
||||||
|
libstore_sources += files('platform/fallback.cc')
|
||||||
|
libstore_headers += files('platform/fallback.hh')
|
||||||
|
endif
|
||||||
|
|
||||||
# These variables (aside from LSOF) are created pseudo-dynamically, near the beginning of
|
# These variables (aside from LSOF) are created pseudo-dynamically, near the beginning of
|
||||||
# the top-level meson.build. Aside from prefix itself, each of these was
|
# the top-level meson.build. Aside from prefix itself, each of these was
|
||||||
# made into an absolute path by joining it with prefix, unless it was already
|
# made into an absolute path by joining it with prefix, unless it was already
|
||||||
|
|
|
@ -151,7 +151,7 @@ std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & s
|
||||||
for (auto i = e->begin(); i != e->end(); ++i) {
|
for (auto i = e->begin(); i != e->end(); ++i) {
|
||||||
StorePathSet storePaths;
|
StorePathSet storePaths;
|
||||||
for (auto & p : *i)
|
for (auto & p : *i)
|
||||||
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
storePaths.insert(store.toStorePath(p.get<std::string>()).first);
|
||||||
json[i.key()] = store.pathInfoToJSON(
|
json[i.key()] = store.pathInfoToJSON(
|
||||||
store.exportReferences(storePaths, inputPaths), false, true);
|
store.exportReferences(storePaths, inputPaths), false, true);
|
||||||
}
|
}
|
||||||
|
|
22
src/libstore/platform.cc
Normal file
22
src/libstore/platform.cc
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
#if __linux__
|
||||||
|
#include "platform/linux.hh"
|
||||||
|
#elif __APPLE__
|
||||||
|
#include "platform/darwin.hh"
|
||||||
|
#else
|
||||||
|
#include "platform/fallback.hh"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
std::shared_ptr<LocalStore> LocalStore::makeLocalStore(const Params & params)
|
||||||
|
{
|
||||||
|
#if __linux__
|
||||||
|
return std::shared_ptr<LocalStore>(new LinuxLocalStore(params));
|
||||||
|
#elif __APPLE__
|
||||||
|
return std::shared_ptr<LocalStore>(new DarwinLocalStore(params));
|
||||||
|
#else
|
||||||
|
return std::shared_ptr<LocalStore>(new FallbackLocalStore(params));
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
223
src/libstore/platform/darwin.cc
Normal file
223
src/libstore/platform/darwin.cc
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
#include "gc-store.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
#include "platform/darwin.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
|
||||||
|
#include <sys/proc_info.h>
|
||||||
|
#include <sys/sysctl.h>
|
||||||
|
#include <libproc.h>
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
void DarwinLocalStore::findPlatformRoots(UncheckedRoots & unchecked)
|
||||||
|
{
|
||||||
|
auto storePathRegex = regex::storePathRegex(storeDir);
|
||||||
|
|
||||||
|
std::vector<int> pids;
|
||||||
|
int pidBufSize = 1;
|
||||||
|
|
||||||
|
while (pidBufSize > pids.size() * sizeof(int)) {
|
||||||
|
// Reserve some extra size so we don't fail too much
|
||||||
|
pids.resize((pidBufSize + pidBufSize / 8) / sizeof(int));
|
||||||
|
pidBufSize = proc_listpids(PROC_ALL_PIDS, 0, pids.data(), pids.size() * sizeof(int));
|
||||||
|
|
||||||
|
if (pidBufSize <= 0) {
|
||||||
|
throw SysError("Listing PIDs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pids.resize(pidBufSize / sizeof(int));
|
||||||
|
|
||||||
|
for (auto pid : pids) {
|
||||||
|
// It doesn't make sense to ask about the kernel
|
||||||
|
if (pid == 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process cwd/root directory
|
||||||
|
struct proc_vnodepathinfo vnodeInfo;
|
||||||
|
if (proc_pidinfo(pid, PROC_PIDVNODEPATHINFO, 0, &vnodeInfo, sizeof(vnodeInfo)) <= 0) {
|
||||||
|
throw SysError("Getting pid %1% working directory", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
unchecked[std::string(vnodeInfo.pvi_cdir.vip_path)].emplace(fmt("{libproc/%d/cwd}", pid)
|
||||||
|
);
|
||||||
|
unchecked[std::string(vnodeInfo.pvi_rdir.vip_path)].emplace(
|
||||||
|
fmt("{libproc/%d/rootdir}", pid)
|
||||||
|
);
|
||||||
|
|
||||||
|
// File descriptors
|
||||||
|
std::vector<struct proc_fdinfo> fds;
|
||||||
|
int fdBufSize = 1;
|
||||||
|
while (fdBufSize > fds.size() * sizeof(struct proc_fdinfo)) {
|
||||||
|
// Reserve some extra size so we don't fail too much
|
||||||
|
fds.resize((fdBufSize + fdBufSize / 8) / sizeof(struct proc_fdinfo));
|
||||||
|
fdBufSize = proc_pidinfo(
|
||||||
|
pid, PROC_PIDLISTFDS, 0, fds.data(), fds.size() * sizeof(struct proc_fdinfo)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (fdBufSize <= 0) {
|
||||||
|
throw SysError("Listing pid %1% file descriptors", pid);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fds.resize(fdBufSize / sizeof(struct proc_fdinfo));
|
||||||
|
|
||||||
|
for (auto fd : fds) {
|
||||||
|
// By definition, only a vnode is on the filesystem
|
||||||
|
if (fd.proc_fdtype != PROX_FDTYPE_VNODE) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct vnode_fdinfowithpath fdInfo;
|
||||||
|
if (proc_pidfdinfo(
|
||||||
|
pid, fd.proc_fd, PROC_PIDFDVNODEPATHINFO, &fdInfo, sizeof(fdInfo)
|
||||||
|
)
|
||||||
|
<= 0)
|
||||||
|
{
|
||||||
|
// They probably just closed this fd, no need to cancel looking at ranges and
|
||||||
|
// arguments
|
||||||
|
if (errno == EBADF) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw SysError("Getting pid %1% fd %2% path", pid, fd.proc_fd);
|
||||||
|
}
|
||||||
|
|
||||||
|
unchecked[std::string(fdInfo.pvip.vip_path)].emplace(
|
||||||
|
fmt("{libproc/%d/fd/%d}", pid, fd.proc_fd)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regions (e.g. mmapped files, executables, shared libraries)
|
||||||
|
uint64_t nextAddr = 0;
|
||||||
|
while (true) {
|
||||||
|
// Seriously, what are you doing XNU?
|
||||||
|
// There's 3 flavors of PROC_PIDREGIONPATHINFO:
|
||||||
|
// * PROC_PIDREGIONPATHINFO includes all regions
|
||||||
|
// * PROC_PIDREGIONPATHINFO2 includes regions backed by a vnode
|
||||||
|
// * PROC_PIDREGIONPATHINFO3 includes regions backed by a vnode on a specified
|
||||||
|
// filesystem Only PROC_PIDREGIONPATHINFO is documented. Unfortunately, using it
|
||||||
|
// would make finding gcroots take about 100x as long and tests would fail from
|
||||||
|
// timeout. According to the Frida source code, PROC_PIDREGIONPATHINFO2 has been
|
||||||
|
// available since XNU 2782.1.97 in OS X 10.10
|
||||||
|
//
|
||||||
|
// 22 means PROC_PIDREGIONPATHINFO2
|
||||||
|
struct proc_regionwithpathinfo regionInfo;
|
||||||
|
if (proc_pidinfo(pid, 22, nextAddr, ®ionInfo, sizeof(regionInfo)) <= 0) {
|
||||||
|
// PROC_PIDREGIONPATHINFO signals we're done with an error,
|
||||||
|
// so we're expected to hit this once per process
|
||||||
|
if (errno == ESRCH || errno == EINVAL) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
throw SysError("Getting pid %1% region path", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
unchecked[std::string(regionInfo.prp_vip.vip_path)].emplace(
|
||||||
|
fmt("{libproc/%d/region}", pid)
|
||||||
|
);
|
||||||
|
|
||||||
|
nextAddr = regionInfo.prp_prinfo.pri_address + regionInfo.prp_prinfo.pri_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Arguments and environment variables
|
||||||
|
// We can't read environment variables of binaries with entitlements unless
|
||||||
|
// nix has the `com.apple.private.read-environment-variables` entitlement or SIP is off
|
||||||
|
// We can read arguments for all applications though.
|
||||||
|
|
||||||
|
// Yes, it's a sysctl, the proc_info and sysctl APIs are mostly similar,
|
||||||
|
// but both have exclusive capabilities
|
||||||
|
int sysctlName[3] = {CTL_KERN, KERN_PROCARGS2, pid};
|
||||||
|
size_t argsSize = 0;
|
||||||
|
if (sysctl(sysctlName, 3, nullptr, &argsSize, nullptr, 0) < 0) {
|
||||||
|
throw SysError("Reading pid %1% arguments", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<char> args(argsSize);
|
||||||
|
if (sysctl(sysctlName, 3, args.data(), &argsSize, nullptr, 0) < 0) {
|
||||||
|
throw SysError("Reading pid %1% arguments", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argsSize < args.size()) {
|
||||||
|
args.resize(argsSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have these perfectly nice arguments, but have to ignore them because
|
||||||
|
// otherwise we'd see arguments to nix-store commands and
|
||||||
|
// `nix-store --delete /nix/store/whatever` would always fail
|
||||||
|
// First 4 bytes are an int of argc.
|
||||||
|
if (args.size() < sizeof(int)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
auto argc = reinterpret_cast<int *>(args.data())[0];
|
||||||
|
|
||||||
|
auto argsIter = args.begin();
|
||||||
|
std::advance(argsIter, sizeof(int));
|
||||||
|
// Executable then argc args, each separated by some number of null bytes
|
||||||
|
for (int i = 0; argsIter != args.end() && i < argc + 1; i++) {
|
||||||
|
argsIter = std::find(argsIter, args.end(), '\0');
|
||||||
|
argsIter = std::find_if(argsIter, args.end(), [](char ch) { return ch != '\0'; });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (argsIter != args.end()) {
|
||||||
|
auto env_end = std::sregex_iterator{};
|
||||||
|
for (auto i = std::sregex_iterator{argsIter, args.end(), storePathRegex};
|
||||||
|
i != env_end;
|
||||||
|
++i)
|
||||||
|
{
|
||||||
|
unchecked[i->str()].emplace(fmt("{libproc/%d/environ}", pid));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Per-thread working directories
|
||||||
|
struct proc_taskallinfo taskAllInfo;
|
||||||
|
if (proc_pidinfo(pid, PROC_PIDTASKALLINFO, 0, &taskAllInfo, sizeof(taskAllInfo)) <= 0) {
|
||||||
|
throw SysError("Reading pid %1% tasks", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the process doesn't have the per-thread cwd flag then we already have the
|
||||||
|
// process-wide cwd from PROC_PIDVNODEPATHINFO
|
||||||
|
if (taskAllInfo.pbsd.pbi_flags & PROC_FLAG_THCWD) {
|
||||||
|
std::vector<uint64_t> tids(taskAllInfo.ptinfo.pti_threadnum);
|
||||||
|
int tidBufSize = proc_pidinfo(
|
||||||
|
pid, PROC_PIDLISTTHREADS, 0, tids.data(), tids.size() * sizeof(uint64_t)
|
||||||
|
);
|
||||||
|
if (tidBufSize <= 0) {
|
||||||
|
throw SysError("Listing pid %1% threads", pid);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto tid : tids) {
|
||||||
|
struct proc_threadwithpathinfo threadPathInfo;
|
||||||
|
if (proc_pidinfo(
|
||||||
|
pid,
|
||||||
|
PROC_PIDTHREADPATHINFO,
|
||||||
|
tid,
|
||||||
|
&threadPathInfo,
|
||||||
|
sizeof(threadPathInfo)
|
||||||
|
)
|
||||||
|
<= 0)
|
||||||
|
{
|
||||||
|
throw SysError("Reading pid %1% thread %2% cwd", pid, tid);
|
||||||
|
}
|
||||||
|
|
||||||
|
unchecked[std::string(threadPathInfo.pvip.vip_path)].emplace(
|
||||||
|
fmt("{libproc/%d/thread/%d/cwd}", pid, tid)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SysError & e) {
|
||||||
|
// ENOENT/ESRCH: Process no longer exists (proc_info)
|
||||||
|
// EINVAL: Process no longer exists (sysctl)
|
||||||
|
// EACCESS/EPERM: We don't have permission to read this field (proc_info)
|
||||||
|
// EIO: Kernel failed to read from target process memory during KERN_PROCARGS2 (sysctl)
|
||||||
|
if (errno == ENOENT || errno == ESRCH || errno == EINVAL || errno == EACCES
|
||||||
|
|| errno == EPERM || errno == EIO)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
35
src/libstore/platform/darwin.hh
Normal file
35
src/libstore/platform/darwin.hh
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "gc-store.hh"
|
||||||
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Darwin-specific implementation of LocalStore
|
||||||
|
*/
|
||||||
|
class DarwinLocalStore : public LocalStore
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
DarwinLocalStore(const Params & params)
|
||||||
|
: StoreConfig(params)
|
||||||
|
, LocalFSStoreConfig(params)
|
||||||
|
, LocalStoreConfig(params)
|
||||||
|
, Store(params)
|
||||||
|
, LocalFSStore(params)
|
||||||
|
, LocalStore(params)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
DarwinLocalStore(const std::string scheme, std::string path, const Params & params)
|
||||||
|
: DarwinLocalStore(params)
|
||||||
|
{
|
||||||
|
throw UnimplementedError("DarwinLocalStore");
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
void findPlatformRoots(UncheckedRoots & unchecked) override;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
5
src/libstore/platform/fallback.cc
Normal file
5
src/libstore/platform/fallback.cc
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
#include "platform/fallback.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
static RegisterStoreImplementation<FallbackLocalStore, LocalStoreConfig> regLocalStore;
|
||||||
|
}
|
31
src/libstore/platform/fallback.hh
Normal file
31
src/libstore/platform/fallback.hh
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fallback platform implementation of LocalStore
|
||||||
|
* Exists so we can make LocalStore constructor protected
|
||||||
|
*/
|
||||||
|
class FallbackLocalStore : public LocalStore
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
FallbackLocalStore(const Params & params)
|
||||||
|
: StoreConfig(params)
|
||||||
|
, LocalFSStoreConfig(params)
|
||||||
|
, LocalStoreConfig(params)
|
||||||
|
, Store(params)
|
||||||
|
, LocalFSStore(params)
|
||||||
|
, LocalStore(params)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
FallbackLocalStore(const std::string scheme, std::string path, const Params & params)
|
||||||
|
: FallbackLocalStore(params)
|
||||||
|
{
|
||||||
|
throw UnimplementedError("FallbackLocalStore");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
117
src/libstore/platform/linux.cc
Normal file
117
src/libstore/platform/linux.cc
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
#include "gc-store.hh"
|
||||||
|
#include "signals.hh"
|
||||||
|
#include "platform/linux.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
static RegisterStoreImplementation<LinuxLocalStore, LocalStoreConfig> regLocalStore;
|
||||||
|
|
||||||
|
static void readProcLink(const std::string & file, UncheckedRoots & roots)
|
||||||
|
{
|
||||||
|
constexpr auto bufsiz = PATH_MAX;
|
||||||
|
char buf[bufsiz];
|
||||||
|
auto res = readlink(file.c_str(), buf, bufsiz);
|
||||||
|
if (res == -1) {
|
||||||
|
if (errno == ENOENT || errno == EACCES || errno == ESRCH) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw SysError("reading symlink");
|
||||||
|
}
|
||||||
|
if (res == bufsiz) {
|
||||||
|
throw Error("overly long symlink starting with '%1%'", std::string_view(buf, bufsiz));
|
||||||
|
}
|
||||||
|
if (res > 0 && buf[0] == '/') {
|
||||||
|
roots[std::string(static_cast<char *>(buf), res)].emplace(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void readFileRoots(const char * path, UncheckedRoots & roots)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
roots[readFile(path)].emplace(path);
|
||||||
|
} catch (SysError & e) {
|
||||||
|
if (e.errNo != ENOENT && e.errNo != EACCES) {
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void LinuxLocalStore::findPlatformRoots(UncheckedRoots & unchecked)
|
||||||
|
{
|
||||||
|
auto procDir = AutoCloseDir{opendir("/proc")};
|
||||||
|
if (procDir) {
|
||||||
|
struct dirent * ent;
|
||||||
|
auto digitsRegex = std::regex(R"(^\d+$)");
|
||||||
|
auto mapRegex = std::regex(R"(^\s*\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+(/\S+)\s*$)");
|
||||||
|
auto storePathRegex = regex::storePathRegex(storeDir);
|
||||||
|
while (errno = 0, ent = readdir(procDir.get())) {
|
||||||
|
checkInterrupt();
|
||||||
|
if (std::regex_match(ent->d_name, digitsRegex)) {
|
||||||
|
try {
|
||||||
|
readProcLink(fmt("/proc/%s/exe", ent->d_name), unchecked);
|
||||||
|
readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked);
|
||||||
|
|
||||||
|
auto fdStr = fmt("/proc/%s/fd", ent->d_name);
|
||||||
|
auto fdDir = AutoCloseDir(opendir(fdStr.c_str()));
|
||||||
|
if (!fdDir) {
|
||||||
|
if (errno == ENOENT || errno == EACCES) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw SysError("opening %1%", fdStr);
|
||||||
|
}
|
||||||
|
struct dirent * fd_ent;
|
||||||
|
while (errno = 0, fd_ent = readdir(fdDir.get())) {
|
||||||
|
if (fd_ent->d_name[0] != '.') {
|
||||||
|
readProcLink(fmt("%s/%s", fdStr, fd_ent->d_name), unchecked);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (errno) {
|
||||||
|
if (errno == ESRCH) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw SysError("iterating /proc/%1%/fd", ent->d_name);
|
||||||
|
}
|
||||||
|
fdDir.reset();
|
||||||
|
|
||||||
|
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
|
||||||
|
auto mapLines =
|
||||||
|
tokenizeString<std::vector<std::string>>(readFile(mapFile), "\n");
|
||||||
|
for (const auto & line : mapLines) {
|
||||||
|
auto match = std::smatch{};
|
||||||
|
if (std::regex_match(line, match, mapRegex)) {
|
||||||
|
unchecked[match[1]].emplace(mapFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto envFile = fmt("/proc/%s/environ", ent->d_name);
|
||||||
|
auto envString = readFile(envFile);
|
||||||
|
auto env_end = std::sregex_iterator{};
|
||||||
|
for (auto i =
|
||||||
|
std::sregex_iterator{
|
||||||
|
envString.begin(), envString.end(), storePathRegex
|
||||||
|
};
|
||||||
|
i != env_end;
|
||||||
|
++i)
|
||||||
|
{
|
||||||
|
unchecked[i->str()].emplace(envFile);
|
||||||
|
}
|
||||||
|
} catch (SysError & e) {
|
||||||
|
if (errno == ENOENT || errno == EACCES || errno == ESRCH) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (errno) {
|
||||||
|
throw SysError("iterating /proc");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
readFileRoots("/proc/sys/kernel/modprobe", unchecked);
|
||||||
|
readFileRoots("/proc/sys/kernel/fbsplash", unchecked);
|
||||||
|
readFileRoots("/proc/sys/kernel/poweroff_cmd", unchecked);
|
||||||
|
}
|
||||||
|
}
|
35
src/libstore/platform/linux.hh
Normal file
35
src/libstore/platform/linux.hh
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "gc-store.hh"
|
||||||
|
#include "local-store.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Linux-specific implementation of LocalStore
|
||||||
|
*/
|
||||||
|
class LinuxLocalStore : public LocalStore
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
LinuxLocalStore(const Params & params)
|
||||||
|
: StoreConfig(params)
|
||||||
|
, LocalFSStoreConfig(params)
|
||||||
|
, LocalStoreConfig(params)
|
||||||
|
, Store(params)
|
||||||
|
, LocalFSStore(params)
|
||||||
|
, LocalStore(params)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
LinuxLocalStore(const std::string scheme, std::string path, const Params & params)
|
||||||
|
: LinuxLocalStore(params)
|
||||||
|
{
|
||||||
|
throw UnimplementedError("LinuxLocalStore");
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
void findPlatformRoots(UncheckedRoots & unchecked) override;
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -32,6 +32,10 @@ struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig
|
||||||
class SSHStore : public virtual SSHStoreConfig, public virtual RemoteStore
|
class SSHStore : public virtual SSHStoreConfig, public virtual RemoteStore
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
// Hack for getting remote build log output.
|
||||||
|
// Intentionally not in `SSHStoreConfig` so that it doesn't appear in
|
||||||
|
// the documentation
|
||||||
|
const Setting<int> logFD{(StoreConfig*) this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
|
||||||
|
|
||||||
SSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
SSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||||
: StoreConfig(params)
|
: StoreConfig(params)
|
||||||
|
@ -47,7 +51,8 @@ public:
|
||||||
sshPublicHostKey,
|
sshPublicHostKey,
|
||||||
// Use SSH master only if using more than 1 connection.
|
// Use SSH master only if using more than 1 connection.
|
||||||
connections->capacity() > 1,
|
connections->capacity() > 1,
|
||||||
compress)
|
compress,
|
||||||
|
logFD)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -88,8 +88,6 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
||||||
addCommonSSHOpts(args);
|
addCommonSSHOpts(args);
|
||||||
if (socketPath != "")
|
if (socketPath != "")
|
||||||
args.insert(args.end(), {"-S", socketPath});
|
args.insert(args.end(), {"-S", socketPath});
|
||||||
if (verbosity >= lvlChatty)
|
|
||||||
args.push_back("-v");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
args.push_back(command);
|
args.push_back(command);
|
||||||
|
@ -154,8 +152,6 @@ Path SSHMaster::startMaster()
|
||||||
throw SysError("duping over stdout");
|
throw SysError("duping over stdout");
|
||||||
|
|
||||||
Strings args = { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath };
|
Strings args = { "ssh", host.c_str(), "-M", "-N", "-S", state->socketPath };
|
||||||
if (verbosity >= lvlChatty)
|
|
||||||
args.push_back("-v");
|
|
||||||
addCommonSSHOpts(args);
|
addCommonSSHOpts(args);
|
||||||
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
|
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
|
||||||
|
|
||||||
|
|
|
@ -1085,8 +1085,6 @@ void copyStorePath(
|
||||||
|
|
||||||
auto info = srcStore.queryPathInfo(storePath);
|
auto info = srcStore.queryPathInfo(storePath);
|
||||||
|
|
||||||
uint64_t total = 0;
|
|
||||||
|
|
||||||
// recompute store path on the chance dstStore does it differently
|
// recompute store path on the chance dstStore does it differently
|
||||||
if (info->ca && info->references.empty()) {
|
if (info->ca && info->references.empty()) {
|
||||||
auto info2 = make_ref<ValidPathInfo>(*info);
|
auto info2 = make_ref<ValidPathInfo>(*info);
|
||||||
|
@ -1105,7 +1103,7 @@ void copyStorePath(
|
||||||
}
|
}
|
||||||
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
LambdaSink progressSink([&](std::string_view data) {
|
LambdaSink progressSink([&, total = 0ULL](std::string_view data) mutable {
|
||||||
total += data.size();
|
total += data.size();
|
||||||
act.progress(total, info->narSize);
|
act.progress(total, info->narSize);
|
||||||
});
|
});
|
||||||
|
@ -1218,9 +1216,6 @@ std::map<StorePath, StorePath> copyPaths(
|
||||||
return storePathForDst;
|
return storePathForDst;
|
||||||
};
|
};
|
||||||
|
|
||||||
// total is accessed by each copy, which are each handled in separate threads
|
|
||||||
std::atomic<uint64_t> total = 0;
|
|
||||||
|
|
||||||
for (auto & missingPath : sortedMissing) {
|
for (auto & missingPath : sortedMissing) {
|
||||||
auto info = srcStore.queryPathInfo(missingPath);
|
auto info = srcStore.queryPathInfo(missingPath);
|
||||||
|
|
||||||
|
@ -1241,7 +1236,7 @@ std::map<StorePath, StorePath> copyPaths(
|
||||||
{storePathS, srcUri, dstUri});
|
{storePathS, srcUri, dstUri});
|
||||||
PushActivity pact(act.id);
|
PushActivity pact(act.id);
|
||||||
|
|
||||||
LambdaSink progressSink([&](std::string_view data) {
|
LambdaSink progressSink([&, total = 0ULL](std::string_view data) mutable {
|
||||||
total += data.size();
|
total += data.size();
|
||||||
act.progress(total, info->narSize);
|
act.progress(total, info->narSize);
|
||||||
});
|
});
|
||||||
|
@ -1426,7 +1421,7 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
|
||||||
if (uri == "" || uri == "auto") {
|
if (uri == "" || uri == "auto") {
|
||||||
auto stateDir = getOr(params, "state", settings.nixStateDir);
|
auto stateDir = getOr(params, "state", settings.nixStateDir);
|
||||||
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
|
||||||
return std::make_shared<LocalStore>(params);
|
return LocalStore::makeLocalStore(params);
|
||||||
else if (pathExists(settings.nixDaemonSocketFile))
|
else if (pathExists(settings.nixDaemonSocketFile))
|
||||||
return std::make_shared<UDSRemoteStore>(params);
|
return std::make_shared<UDSRemoteStore>(params);
|
||||||
#if __linux__
|
#if __linux__
|
||||||
|
@ -1444,26 +1439,26 @@ std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Para
|
||||||
try {
|
try {
|
||||||
createDirs(chrootStore);
|
createDirs(chrootStore);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
return std::make_shared<LocalStore>(params);
|
return LocalStore::makeLocalStore(params);
|
||||||
}
|
}
|
||||||
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
warn("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
||||||
} else
|
} else
|
||||||
debug("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
debug("'%s' does not exist, so Nix will use '%s' as a chroot store", stateDir, chrootStore);
|
||||||
Store::Params params2;
|
Store::Params params2;
|
||||||
params2["root"] = chrootStore;
|
params2["root"] = chrootStore;
|
||||||
return std::make_shared<LocalStore>(params2);
|
return LocalStore::makeLocalStore(params);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
else
|
else
|
||||||
return std::make_shared<LocalStore>(params);
|
return LocalStore::makeLocalStore(params);
|
||||||
} else if (uri == "daemon") {
|
} else if (uri == "daemon") {
|
||||||
return std::make_shared<UDSRemoteStore>(params);
|
return std::make_shared<UDSRemoteStore>(params);
|
||||||
} else if (uri == "local") {
|
} else if (uri == "local") {
|
||||||
return std::make_shared<LocalStore>(params);
|
return LocalStore::makeLocalStore(params);
|
||||||
} else if (isNonUriPath(uri)) {
|
} else if (isNonUriPath(uri)) {
|
||||||
Store::Params params2 = params;
|
Store::Params params2 = params;
|
||||||
params2["root"] = absPath(uri);
|
params2["root"] = absPath(uri);
|
||||||
return std::make_shared<LocalStore>(params2);
|
return LocalStore::makeLocalStore(params2);
|
||||||
} else {
|
} else {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
libraries += libutil
|
|
||||||
|
|
||||||
libutil_NAME = libnixutil
|
|
||||||
|
|
||||||
libutil_DIR := $(d)
|
|
||||||
|
|
||||||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libutil_CXXFLAGS += -I src/libutil
|
|
||||||
|
|
||||||
libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
|
||||||
|
|
||||||
$(foreach i, $(wildcard $(d)/args/*.hh), \
|
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644)))
|
|
||||||
|
|
||||||
ifeq ($(HAVE_LIBCPUID), 1)
|
|
||||||
libutil_LDFLAGS += -lcpuid
|
|
||||||
endif
|
|
|
@ -22,6 +22,7 @@ libutil_sources = files(
|
||||||
'position.cc',
|
'position.cc',
|
||||||
'print-elided.cc',
|
'print-elided.cc',
|
||||||
'references.cc',
|
'references.cc',
|
||||||
|
'regex.cc',
|
||||||
'serialise.cc',
|
'serialise.cc',
|
||||||
'shlex.cc',
|
'shlex.cc',
|
||||||
'signals.cc',
|
'signals.cc',
|
||||||
|
@ -77,6 +78,7 @@ libutil_headers = files(
|
||||||
'ref.hh',
|
'ref.hh',
|
||||||
'references.hh',
|
'references.hh',
|
||||||
'regex-combinators.hh',
|
'regex-combinators.hh',
|
||||||
|
'regex.hh',
|
||||||
'repair-flag.hh',
|
'repair-flag.hh',
|
||||||
'serialise.hh',
|
'serialise.hh',
|
||||||
'shlex.hh',
|
'shlex.hh',
|
||||||
|
|
16
src/libutil/regex.cc
Normal file
16
src/libutil/regex.cc
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
#include <string>
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix::regex {
|
||||||
|
std::string quoteRegexChars(const std::string & raw)
|
||||||
|
{
|
||||||
|
static auto specialRegex = std::regex(R"([.^$\\*+?()\[\]{}|])");
|
||||||
|
return std::regex_replace(raw, specialRegex, R"(\$&)");
|
||||||
|
}
|
||||||
|
|
||||||
|
std::regex storePathRegex(const std::string & storeDir)
|
||||||
|
{
|
||||||
|
return std::regex(quoteRegexChars(storeDir) + R"(/[0-9a-z]+[0-9a-zA-Z\+\-\._\?=]*)");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
11
src/libutil/regex.hh
Normal file
11
src/libutil/regex.hh
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix::regex {
|
||||||
|
std::string quoteRegexChars(const std::string & raw);
|
||||||
|
|
||||||
|
std::regex storePathRegex(const std::string & storeDir);
|
||||||
|
}
|
|
@ -1,40 +0,0 @@
|
||||||
programs += nix
|
|
||||||
|
|
||||||
nix_DIR := $(d)
|
|
||||||
|
|
||||||
nix_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
$(wildcard src/build-remote/*.cc) \
|
|
||||||
$(wildcard src/nix-build/*.cc) \
|
|
||||||
$(wildcard src/nix-channel/*.cc) \
|
|
||||||
$(wildcard src/nix-collect-garbage/*.cc) \
|
|
||||||
$(wildcard src/nix-copy-closure/*.cc) \
|
|
||||||
$(wildcard src/nix-daemon/*.cc) \
|
|
||||||
$(wildcard src/nix-env/*.cc) \
|
|
||||||
$(wildcard src/nix-instantiate/*.cc) \
|
|
||||||
$(wildcard src/nix-store/*.cc) \
|
|
||||||
|
|
||||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd -I doc/manual
|
|
||||||
|
|
||||||
nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd
|
|
||||||
|
|
||||||
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS)
|
|
||||||
|
|
||||||
$(foreach name, \
|
|
||||||
nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \
|
|
||||||
$(eval $(call install-symlink, nix, $(bindir)/$(name))))
|
|
||||||
$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
|
|
||||||
|
|
||||||
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
|
|
||||||
|
|
||||||
src/nix/develop.cc: src/nix/get-env.sh.gen.hh
|
|
||||||
|
|
||||||
src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh
|
|
||||||
|
|
||||||
src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh
|
|
||||||
|
|
||||||
src/nix/doc/files/%.md: doc/manual/src/command-ref/files/%.md
|
|
||||||
@mkdir -p $$(dirname $@)
|
|
||||||
@cp $< $@
|
|
||||||
|
|
||||||
src/nix/profile.cc: src/nix/profile.md src/nix/doc/files/profiles.md.gen.hh
|
|
|
@ -98,7 +98,6 @@ std::tuple<StorePath, Hash> prefetchFile(
|
||||||
FdSink sink(fd.get());
|
FdSink sink(fd.get());
|
||||||
|
|
||||||
FileTransferRequest req(url);
|
FileTransferRequest req(url);
|
||||||
req.decompress = false;
|
|
||||||
getFileTransfer()->download(std::move(req), sink);
|
getFileTransfer()->download(std::move(req), sink);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
ifdef HOST_DARWIN
|
|
||||||
programs += resolve-system-dependencies
|
|
||||||
endif
|
|
||||||
|
|
||||||
resolve-system-dependencies_DIR := $(d)
|
|
||||||
|
|
||||||
resolve-system-dependencies_INSTALL_DIR := $(libexecdir)/nix
|
|
||||||
|
|
||||||
resolve-system-dependencies_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain
|
|
||||||
|
|
||||||
resolve-system-dependencies_LIBS := libstore libmain libutil
|
|
||||||
|
|
||||||
resolve-system-dependencies_SOURCES := $(d)/resolve-system-dependencies.cc
|
|
|
@ -1,29 +0,0 @@
|
||||||
ca-tests := \
|
|
||||||
$(d)/build-with-garbage-path.sh \
|
|
||||||
$(d)/build.sh \
|
|
||||||
$(d)/build-cache.sh \
|
|
||||||
$(d)/concurrent-builds.sh \
|
|
||||||
$(d)/derivation-json.sh \
|
|
||||||
$(d)/duplicate-realisation-in-closure.sh \
|
|
||||||
$(d)/eval-store.sh \
|
|
||||||
$(d)/gc.sh \
|
|
||||||
$(d)/import-derivation.sh \
|
|
||||||
$(d)/new-build-cmd.sh \
|
|
||||||
$(d)/nix-copy.sh \
|
|
||||||
$(d)/nix-run.sh \
|
|
||||||
$(d)/nix-shell.sh \
|
|
||||||
$(d)/post-hook.sh \
|
|
||||||
$(d)/recursive.sh \
|
|
||||||
$(d)/repl.sh \
|
|
||||||
$(d)/selfref-gc.sh \
|
|
||||||
$(d)/signatures.sh \
|
|
||||||
$(d)/substitute.sh \
|
|
||||||
$(d)/why-depends.sh
|
|
||||||
|
|
||||||
install-tests-groups += ca
|
|
||||||
|
|
||||||
clean-files += \
|
|
||||||
$(d)/config.nix
|
|
||||||
|
|
||||||
test-deps += \
|
|
||||||
tests/functional/ca/config.nix
|
|
|
@ -24,7 +24,6 @@ if [[ -n $NIX_STORE ]]; then
|
||||||
export _NIX_TEST_NO_SANDBOX=1
|
export _NIX_TEST_NO_SANDBOX=1
|
||||||
fi
|
fi
|
||||||
export _NIX_IN_TEST=$TEST_ROOT/shared
|
export _NIX_IN_TEST=$TEST_ROOT/shared
|
||||||
export _NIX_TEST_NO_LSOF=1
|
|
||||||
export NIX_REMOTE=${NIX_REMOTE_-}
|
export NIX_REMOTE=${NIX_REMOTE_-}
|
||||||
unset NIX_PATH
|
unset NIX_PATH
|
||||||
export TEST_HOME=$TEST_ROOT/test-home
|
export TEST_HOME=$TEST_ROOT/test-home
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
dyn-drv-tests := \
|
|
||||||
$(d)/text-hashed-output.sh \
|
|
||||||
$(d)/recursive-mod-json.sh \
|
|
||||||
$(d)/build-built-drv.sh \
|
|
||||||
$(d)/eval-outputOf.sh \
|
|
||||||
$(d)/dep-built-drv.sh \
|
|
||||||
$(d)/old-daemon-error-hack.sh
|
|
||||||
|
|
||||||
install-tests-groups += dyn-drv
|
|
||||||
|
|
||||||
clean-files += \
|
|
||||||
$(d)/config.nix
|
|
||||||
|
|
||||||
test-deps += \
|
|
||||||
tests/functional/dyn-drv/config.nix
|
|
|
@ -1,7 +1,18 @@
|
||||||
with import ./config.nix;
|
with import ./config.nix;
|
||||||
|
|
||||||
mkDerivation {
|
{
|
||||||
name = "gc-runtime";
|
environ = mkDerivation {
|
||||||
|
name = "gc-runtime-environ";
|
||||||
|
buildCommand = "mkdir $out; echo environ > $out/environ";
|
||||||
|
};
|
||||||
|
|
||||||
|
open = mkDerivation {
|
||||||
|
name = "gc-runtime-open";
|
||||||
|
buildCommand = "mkdir $out; echo open > $out/open";
|
||||||
|
};
|
||||||
|
|
||||||
|
program = mkDerivation {
|
||||||
|
name = "gc-runtime-program";
|
||||||
builder =
|
builder =
|
||||||
# Test inline source file definitions.
|
# Test inline source file definitions.
|
||||||
builtins.toFile "builder.sh" ''
|
builtins.toFile "builder.sh" ''
|
||||||
|
@ -9,9 +20,10 @@ mkDerivation {
|
||||||
|
|
||||||
cat > $out/program <<EOF
|
cat > $out/program <<EOF
|
||||||
#! ${shell}
|
#! ${shell}
|
||||||
sleep 10000
|
sleep 10000 < \$1
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
chmod +x $out/program
|
chmod +x $out/program
|
||||||
'';
|
'';
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,38 +1,44 @@
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
case $system in
|
|
||||||
*linux*)
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
skipTest "Not running Linux";
|
|
||||||
esac
|
|
||||||
|
|
||||||
set -m # enable job control, needed for kill
|
set -m # enable job control, needed for kill
|
||||||
|
|
||||||
profiles="$NIX_STATE_DIR"/profiles
|
profiles="$NIX_STATE_DIR"/profiles
|
||||||
rm -rf $profiles
|
rm -rf $profiles
|
||||||
|
|
||||||
nix-env -p $profiles/test -f ./gc-runtime.nix -i gc-runtime
|
nix-env -p $profiles/test -f ./gc-runtime.nix -i gc-runtime-{program,environ,open}
|
||||||
|
|
||||||
outPath=$(nix-env -p $profiles/test -q --no-name --out-path gc-runtime)
|
programPath=$(nix-env -p $profiles/test -q --no-name --out-path gc-runtime-program)
|
||||||
echo $outPath
|
environPath=$(nix-env -p $profiles/test -q --no-name --out-path gc-runtime-environ)
|
||||||
|
openPath=$(nix-env -p $profiles/test -q --no-name --out-path gc-runtime-open)
|
||||||
|
echo $programPath $environPath $openPath
|
||||||
|
|
||||||
echo "backgrounding program..."
|
echo "backgrounding program..."
|
||||||
$profiles/test/program &
|
export environPath
|
||||||
|
$profiles/test/program $openPath/open &
|
||||||
sleep 2 # hack - wait for the program to get started
|
sleep 2 # hack - wait for the program to get started
|
||||||
child=$!
|
child=$!
|
||||||
echo PID=$child
|
echo PID=$child
|
||||||
|
|
||||||
nix-env -p $profiles/test -e gc-runtime
|
nix-env -p $profiles/test -e gc-runtime-{program,environ,open}
|
||||||
nix-env -p $profiles/test --delete-generations old
|
nix-env -p $profiles/test --delete-generations old
|
||||||
|
|
||||||
nix-store --gc
|
nix-store --gc
|
||||||
|
|
||||||
kill -- -$child
|
kill -- -$child
|
||||||
|
|
||||||
if ! test -e $outPath; then
|
if ! test -e $programPath; then
|
||||||
echo "running program was garbage collected!"
|
echo "running program was garbage collected!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if ! test -e $environPath; then
|
||||||
|
echo "file in environment variable was garbage collected!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! test -e $openPath; then
|
||||||
|
echo "opened file was garbage collected!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
|
|
@ -1,156 +0,0 @@
|
||||||
nix_tests = \
|
|
||||||
test-infra.sh \
|
|
||||||
init.sh \
|
|
||||||
flakes/flakes.sh \
|
|
||||||
flakes/develop.sh \
|
|
||||||
flakes/develop-r8854.sh \
|
|
||||||
flakes/run.sh \
|
|
||||||
flakes/mercurial.sh \
|
|
||||||
flakes/circular.sh \
|
|
||||||
flakes/init.sh \
|
|
||||||
flakes/inputs.sh \
|
|
||||||
flakes/follow-paths.sh \
|
|
||||||
flakes/bundle.sh \
|
|
||||||
flakes/check.sh \
|
|
||||||
flakes/unlocked-override.sh \
|
|
||||||
flakes/absolute-paths.sh \
|
|
||||||
flakes/build-paths.sh \
|
|
||||||
flakes/flake-in-submodule.sh \
|
|
||||||
gc.sh \
|
|
||||||
nix-collect-garbage-d.sh \
|
|
||||||
remote-store.sh \
|
|
||||||
legacy-ssh-store.sh \
|
|
||||||
lang.sh \
|
|
||||||
lang-test-infra.sh \
|
|
||||||
experimental-features.sh \
|
|
||||||
fetchMercurial.sh \
|
|
||||||
gc-auto.sh \
|
|
||||||
user-envs.sh \
|
|
||||||
user-envs-migration.sh \
|
|
||||||
binary-cache.sh \
|
|
||||||
multiple-outputs.sh \
|
|
||||||
nix-build.sh \
|
|
||||||
gc-concurrent.sh \
|
|
||||||
repair.sh \
|
|
||||||
fixed.sh \
|
|
||||||
export-graph.sh \
|
|
||||||
timeout.sh \
|
|
||||||
fetchGitRefs.sh \
|
|
||||||
gc-runtime.sh \
|
|
||||||
tarball.sh \
|
|
||||||
fetchGit.sh \
|
|
||||||
fetchurl.sh \
|
|
||||||
fetchPath.sh \
|
|
||||||
fetchTree-file.sh \
|
|
||||||
simple.sh \
|
|
||||||
referrers.sh \
|
|
||||||
optimise-store.sh \
|
|
||||||
substitute-with-invalid-ca.sh \
|
|
||||||
signing.sh \
|
|
||||||
hash.sh \
|
|
||||||
gc-non-blocking.sh \
|
|
||||||
check.sh \
|
|
||||||
nix-shell.sh \
|
|
||||||
check-refs.sh \
|
|
||||||
build-remote-input-addressed.sh \
|
|
||||||
secure-drv-outputs.sh \
|
|
||||||
restricted.sh \
|
|
||||||
fetchGitSubmodules.sh \
|
|
||||||
flakes/search-root.sh \
|
|
||||||
readfile-context.sh \
|
|
||||||
nix-channel.sh \
|
|
||||||
recursive.sh \
|
|
||||||
dependencies.sh \
|
|
||||||
check-reqs.sh \
|
|
||||||
build-remote-content-addressed-fixed.sh \
|
|
||||||
build-remote-content-addressed-floating.sh \
|
|
||||||
build-remote-trustless-should-pass-0.sh \
|
|
||||||
build-remote-trustless-should-pass-1.sh \
|
|
||||||
build-remote-trustless-should-pass-2.sh \
|
|
||||||
build-remote-trustless-should-pass-3.sh \
|
|
||||||
build-remote-trustless-should-fail-0.sh \
|
|
||||||
nar-access.sh \
|
|
||||||
impure-eval.sh \
|
|
||||||
pure-eval.sh \
|
|
||||||
eval.sh \
|
|
||||||
repl.sh \
|
|
||||||
binary-cache-build-remote.sh \
|
|
||||||
search.sh \
|
|
||||||
logging.sh \
|
|
||||||
export.sh \
|
|
||||||
config.sh \
|
|
||||||
add.sh \
|
|
||||||
local-store.sh \
|
|
||||||
filter-source.sh \
|
|
||||||
misc.sh \
|
|
||||||
dump-db.sh \
|
|
||||||
linux-sandbox.sh \
|
|
||||||
supplementary-groups.sh \
|
|
||||||
build-dry.sh \
|
|
||||||
structured-attrs.sh \
|
|
||||||
shell.sh \
|
|
||||||
brotli.sh \
|
|
||||||
zstd.sh \
|
|
||||||
compression-levels.sh \
|
|
||||||
nix-copy-ssh.sh \
|
|
||||||
nix-copy-ssh-ng.sh \
|
|
||||||
post-hook.sh \
|
|
||||||
function-trace.sh \
|
|
||||||
flakes/config.sh \
|
|
||||||
fmt.sh \
|
|
||||||
eval-store.sh \
|
|
||||||
why-depends.sh \
|
|
||||||
derivation-json.sh \
|
|
||||||
import-derivation.sh \
|
|
||||||
nix_path.sh \
|
|
||||||
case-hack.sh \
|
|
||||||
placeholders.sh \
|
|
||||||
ssh-relay.sh \
|
|
||||||
build.sh \
|
|
||||||
build-delete.sh \
|
|
||||||
output-normalization.sh \
|
|
||||||
selfref-gc.sh \
|
|
||||||
db-migration.sh \
|
|
||||||
bash-profile.sh \
|
|
||||||
pass-as-file.sh \
|
|
||||||
nix-profile.sh \
|
|
||||||
suggestions.sh \
|
|
||||||
store-ping.sh \
|
|
||||||
fetchClosure.sh \
|
|
||||||
completions.sh \
|
|
||||||
flakes/show.sh \
|
|
||||||
impure-derivations.sh \
|
|
||||||
path-from-hash-part.sh \
|
|
||||||
toString-path.sh \
|
|
||||||
read-only-store.sh \
|
|
||||||
nested-sandboxing.sh \
|
|
||||||
debugger.sh
|
|
||||||
|
|
||||||
ifeq ($(HAVE_LIBCPUID), 1)
|
|
||||||
nix_tests += compute-levels.sh
|
|
||||||
endif
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
|
||||||
nix_tests += test-libstoreconsumer.sh test-repl-characterization.sh
|
|
||||||
|
|
||||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
|
||||||
nix_tests += plugins.sh
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
$(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \
|
|
||||||
$(buildprefix)$(d)/test-libstoreconsumer/test-libstoreconsumer
|
|
||||||
$(d)/plugins.sh.test $(d)/plugins.sh.test-debug: \
|
|
||||||
$(buildprefix)$(d)/plugins/libplugintest.$(SO_EXT) \
|
|
||||||
$(buildprefix)$(d)/plugins/libplugintestfail.$(SO_EXT)
|
|
||||||
$(d)/test-repl-characterization.sh.test $(d)/test-repl-characterization.sh.test-debug: \
|
|
||||||
$(buildprefix)$(d)/repl_characterization/test-repl-characterization
|
|
||||||
|
|
||||||
install-tests += $(foreach x, $(nix_tests), $(d)/$(x))
|
|
||||||
|
|
||||||
test-clean-files := \
|
|
||||||
$(d)/common/vars-and-functions.sh \
|
|
||||||
$(d)/config.nix
|
|
||||||
|
|
||||||
clean-files += $(test-clean-files)
|
|
||||||
test-deps += $(test-clean-files)
|
|
|
@ -1,27 +0,0 @@
|
||||||
libraries += libplugintest libplugintestfail
|
|
||||||
|
|
||||||
libplugintest_DIR := $(d)
|
|
||||||
|
|
||||||
libplugintest_SOURCES := $(d)/plugintest.cc
|
|
||||||
|
|
||||||
libplugintest_ALLOW_UNDEFINED := 1
|
|
||||||
|
|
||||||
libplugintest_EXCLUDE_FROM_LIBRARY_LIST := 1
|
|
||||||
|
|
||||||
libplugintest_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr -I src/libfetchers
|
|
||||||
|
|
||||||
libplugintestfail_DIR := $(d)
|
|
||||||
|
|
||||||
libplugintestfail_SOURCES := $(d)/plugintestfail.cc
|
|
||||||
|
|
||||||
libplugintestfail_ALLOW_UNDEFINED := 1
|
|
||||||
|
|
||||||
libplugintestfail_EXCLUDE_FROM_LIBRARY_LIST := 1
|
|
||||||
|
|
||||||
libplugintestfail_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr -I src/libfetchers -DMISSING_REFERENCE
|
|
||||||
|
|
||||||
# Make sure that the linker strictly evaluates all symbols on .so load on Linux
|
|
||||||
# so it will definitely fail to load as expected.
|
|
||||||
ifdef HOST_LINUX
|
|
||||||
libplugintestfail_LDFLAGS += -z now
|
|
||||||
endif
|
|
|
@ -1,15 +0,0 @@
|
||||||
programs += test-repl-characterization
|
|
||||||
|
|
||||||
test-repl-characterization_DIR := $(d)
|
|
||||||
|
|
||||||
# do not install
|
|
||||||
test-repl-characterization_INSTALL_DIR :=
|
|
||||||
|
|
||||||
test-repl-characterization_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
|
|
||||||
test-repl-characterization_CXXFLAGS += -I src/libutil -I tests/unit/libutil-support -DNIX_BIN_DIR="\"$(bindir)\""
|
|
||||||
|
|
||||||
test-repl-characterization_LIBS = libutil libutil-test-support
|
|
||||||
|
|
||||||
test-repl-characterization_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) $(GTEST_LIBS)
|
|
|
@ -1,15 +0,0 @@
|
||||||
programs += test-libstoreconsumer
|
|
||||||
|
|
||||||
test-libstoreconsumer_DIR := $(d)
|
|
||||||
|
|
||||||
# do not install
|
|
||||||
test-libstoreconsumer_INSTALL_DIR :=
|
|
||||||
|
|
||||||
test-libstoreconsumer_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
|
|
||||||
test-libstoreconsumer_CXXFLAGS += -I src/libutil -I src/libstore
|
|
||||||
|
|
||||||
test-libstoreconsumer_LIBS = libstore libutil
|
|
||||||
|
|
||||||
test-libstoreconsumer_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS)
|
|
|
@ -119,6 +119,9 @@ in
|
||||||
[ { urlPath = "/repos/NixOS/nixpkgs";
|
[ { urlPath = "/repos/NixOS/nixpkgs";
|
||||||
dir = nixpkgs-api;
|
dir = nixpkgs-api;
|
||||||
}
|
}
|
||||||
|
{ urlPath = "/repos/fork/nixpkgs";
|
||||||
|
dir = nixpkgs-api;
|
||||||
|
}
|
||||||
{ urlPath = "/repos/fancy-enterprise/private-flake";
|
{ urlPath = "/repos/fancy-enterprise/private-flake";
|
||||||
dir = private-flake-api;
|
dir = private-flake-api;
|
||||||
}
|
}
|
||||||
|
@ -190,6 +193,10 @@ in
|
||||||
client.succeed("nix registry pin nixpkgs")
|
client.succeed("nix registry pin nixpkgs")
|
||||||
client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2")
|
client.succeed("nix flake metadata nixpkgs --tarball-ttl 0 >&2")
|
||||||
|
|
||||||
|
# fetching a fork with the same commit ID should fail, even if the revision is cached
|
||||||
|
client.succeed("nix flake metadata github:NixOS/nixpkgs")
|
||||||
|
client.fail("nix flake metadata github:fork/nixpkgs")
|
||||||
|
|
||||||
# Shut down the web server. The flake should be cached on the client.
|
# Shut down the web server. The flake should be cached on the client.
|
||||||
github.succeed("systemctl stop httpd.service")
|
github.succeed("systemctl stop httpd.service")
|
||||||
|
|
||||||
|
|
|
@ -95,6 +95,10 @@ in
|
||||||
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
builder.succeed("mkdir -p -m 700 /root/.ssh")
|
||||||
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys")
|
||||||
builder.wait_for_unit("sshd.service")
|
builder.wait_for_unit("sshd.service")
|
||||||
|
|
||||||
|
out = client.fail("nix-build ${expr nodes.client 1} 2>&1")
|
||||||
|
assert "error: failed to start SSH connection to 'root@builder': Host key verification failed" in out, f"No host verification error in {out}"
|
||||||
|
|
||||||
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world' >&2")
|
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world' >&2")
|
||||||
|
|
||||||
# Perform a build
|
# Perform a build
|
||||||
|
|
|
@ -64,15 +64,18 @@ in
|
||||||
info = json.loads(out)
|
info = json.loads(out)
|
||||||
|
|
||||||
# Check that we got redirected to the immutable URL.
|
# Check that we got redirected to the immutable URL.
|
||||||
assert info["locked"]["url"] == "http://localhost/stable/${nixpkgs.rev}.tar.gz"
|
locked_url = info["locked"]["url"]
|
||||||
|
assert locked_url == "http://localhost/stable/${nixpkgs.rev}.tar.gz", f"{locked_url=} != http://localhost/stable/${nixpkgs.rev}.tar.gz"
|
||||||
|
|
||||||
# Check that we got the rev and revCount attributes.
|
# Check that we got the rev and revCount attributes.
|
||||||
assert info["revision"] == "${nixpkgs.rev}"
|
revision = info["revision"]
|
||||||
assert info["revCount"] == 1234
|
rev_count = info["revCount"]
|
||||||
|
assert revision == "${nixpkgs.rev}", f"{revision=} != ${nixpkgs.rev}"
|
||||||
|
assert rev_count == 1234, f"{rev_count=} != 1234"
|
||||||
|
|
||||||
# Check that fetching with rev/revCount/narHash succeeds.
|
# Check that fetching with rev/revCount/narHash succeeds.
|
||||||
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?rev=" + info["revision"])
|
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?rev=" + revision)
|
||||||
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?revCount=" + str(info["revCount"]))
|
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?revCount=" + str(rev_count))
|
||||||
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?narHash=" + info["locked"]["narHash"])
|
machine.succeed("nix flake metadata --json http://localhost/latest.tar.gz?narHash=" + info["locked"]["narHash"])
|
||||||
|
|
||||||
# Check that fetching fails if we provide incorrect attributes.
|
# Check that fetching fails if we provide incorrect attributes.
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
libraries += libexpr-test-support
|
|
||||||
|
|
||||||
libexpr-test-support_NAME = libnixexpr-test-support
|
|
||||||
|
|
||||||
libexpr-test-support_DIR := $(d)
|
|
||||||
|
|
||||||
libexpr-test-support_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libexpr-test-support_SOURCES := \
|
|
||||||
$(wildcard $(d)/tests/*.cc) \
|
|
||||||
$(wildcard $(d)/tests/value/*.cc)
|
|
||||||
|
|
||||||
libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
|
||||||
|
|
||||||
libexpr-test-support_LIBS = \
|
|
||||||
libstore-test-support libutil-test-support \
|
|
||||||
libexpr libstore libutil
|
|
||||||
|
|
||||||
libexpr-test-support_LDFLAGS := -pthread -lrapidcheck
|
|
|
@ -1,32 +0,0 @@
|
||||||
check: libexpr-tests_RUN
|
|
||||||
|
|
||||||
programs += libexpr-tests
|
|
||||||
|
|
||||||
libexpr-tests_NAME := libnixexpr-tests
|
|
||||||
|
|
||||||
libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
|
||||||
|
|
||||||
libexpr-tests_DIR := $(d)
|
|
||||||
|
|
||||||
libexpr-tests_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libexpr-tests_SOURCES := \
|
|
||||||
$(wildcard $(d)/*.cc) \
|
|
||||||
$(wildcard $(d)/value/*.cc)
|
|
||||||
|
|
||||||
libexpr-tests_EXTRA_INCLUDES = \
|
|
||||||
-I tests/unit/libexpr-support \
|
|
||||||
-I tests/unit/libstore-support \
|
|
||||||
-I tests/unit/libutil-support \
|
|
||||||
-I src/libexpr \
|
|
||||||
-I src/libfetchers \
|
|
||||||
-I src/libstore \
|
|
||||||
-I src/libutil
|
|
||||||
|
|
||||||
libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
|
||||||
|
|
||||||
libexpr-tests_LIBS = \
|
|
||||||
libexpr-test-support libstore-test-support libutils-test-support \
|
|
||||||
libexpr libfetchers libstore libutil
|
|
||||||
|
|
||||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
|
|
@ -1,17 +0,0 @@
|
||||||
libraries += libstore-test-support
|
|
||||||
|
|
||||||
libstore-test-support_NAME = libnixstore-test-support
|
|
||||||
|
|
||||||
libstore-test-support_DIR := $(d)
|
|
||||||
|
|
||||||
libstore-test-support_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
|
||||||
|
|
||||||
libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
|
||||||
|
|
||||||
libstore-test-support_LIBS = \
|
|
||||||
libutil-test-support \
|
|
||||||
libstore libutil
|
|
||||||
|
|
||||||
libstore-test-support_LDFLAGS := -pthread -lrapidcheck
|
|
|
@ -1,27 +0,0 @@
|
||||||
check: libstore-tests_RUN
|
|
||||||
|
|
||||||
programs += libstore-tests
|
|
||||||
|
|
||||||
libstore-tests_NAME = libnixstore-tests
|
|
||||||
|
|
||||||
libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
|
||||||
|
|
||||||
libstore-tests_DIR := $(d)
|
|
||||||
|
|
||||||
libstore-tests_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libstore-tests_EXTRA_INCLUDES = \
|
|
||||||
-I tests/unit/libstore-support \
|
|
||||||
-I tests/unit/libutil-support \
|
|
||||||
-I src/libstore \
|
|
||||||
-I src/libutil
|
|
||||||
|
|
||||||
libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
|
||||||
|
|
||||||
libstore-tests_LIBS = \
|
|
||||||
libstore-test-support libutil-test-support \
|
|
||||||
libstore libutil
|
|
||||||
|
|
||||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
|
|
@ -1,16 +0,0 @@
|
||||||
libraries += libutil-test-support
|
|
||||||
|
|
||||||
libutil-test-support_NAME = libnixutil-test-support
|
|
||||||
|
|
||||||
libutil-test-support_DIR := $(d)
|
|
||||||
|
|
||||||
libutil-test-support_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libutil-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
|
||||||
|
|
||||||
libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) -I src/libutil
|
|
||||||
|
|
||||||
# libexpr so we can steal their string printer from print.cc
|
|
||||||
libutil-test-support_LIBS = libutil libexpr
|
|
||||||
|
|
||||||
libutil-test-support_LDFLAGS := -pthread -lrapidcheck
|
|
|
@ -1,23 +0,0 @@
|
||||||
check: libutil-tests_RUN
|
|
||||||
|
|
||||||
programs += libutil-tests
|
|
||||||
|
|
||||||
libutil-tests_NAME = libnixutil-tests
|
|
||||||
|
|
||||||
libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
|
||||||
|
|
||||||
libutil-tests_DIR := $(d)
|
|
||||||
|
|
||||||
libutil-tests_INSTALL_DIR :=
|
|
||||||
|
|
||||||
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
|
|
||||||
|
|
||||||
libutil-tests_EXTRA_INCLUDES = \
|
|
||||||
-I tests/unit/libutil-support \
|
|
||||||
-I src/libutil
|
|
||||||
|
|
||||||
libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
|
|
||||||
|
|
||||||
libutil-tests_LIBS = libutil-test-support libutil
|
|
||||||
|
|
||||||
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
|
Loading…
Reference in a new issue